repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
onitake/ansible
lib/ansible/modules/network/netscaler/netscaler_gslb_site.py
72
14171
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2017 Citrix Systems # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: netscaler_gslb_site short_description: Manage gslb site entities in Netscaler. description: - Manage gslb site entities in Netscaler. version_added: "2.4.0" author: George Nikolopoulos (@giorgos-nikolopoulos) options: sitename: description: - >- Name for the GSLB site. Must begin with an ASCII alphanumeric or underscore C(_) character, and must contain only ASCII alphanumeric, underscore C(_), hash C(#), period C(.), space C( ), colon C(:), at C(@), equals C(=), and hyphen C(-) characters. Cannot be changed after the virtual server is created. - "Minimum length = 1" sitetype: choices: - 'REMOTE' - 'LOCAL' description: - >- Type of site to create. If the type is not specified, the appliance automatically detects and sets the type on the basis of the IP address being assigned to the site. If the specified site IP address is owned by the appliance (for example, a MIP address or SNIP address), the site is a local site. Otherwise, it is a remote site. siteipaddress: description: - >- IP address for the GSLB site. The GSLB site uses this IP address to communicate with other GSLB sites. For a local site, use any IP address that is owned by the appliance (for example, a SNIP or MIP address, or the IP address of the ADNS service). - "Minimum length = 1" publicip: description: - >- Public IP address for the local site. Required only if the appliance is deployed in a private address space and the site has a public IP address hosted on an external firewall or a NAT device. - "Minimum length = 1" metricexchange: choices: - 'enabled' - 'disabled' description: - >- Exchange metrics with other sites. Metrics are exchanged by using Metric Exchange Protocol (MEP). The appliances in the GSLB setup exchange health information once every second. - >- If you disable metrics exchange, you can use only static load balancing methods (such as round robin, static proximity, or the hash-based methods), and if you disable metrics exchange when a dynamic load balancing method (such as least connection) is in operation, the appliance falls back to round robin. Also, if you disable metrics exchange, you must use a monitor to determine the state of GSLB services. Otherwise, the service is marked as DOWN. nwmetricexchange: choices: - 'enabled' - 'disabled' description: - >- Exchange, with other GSLB sites, network metrics such as round-trip time (RTT), learned from communications with various local DNS (LDNS) servers used by clients. RTT information is used in the dynamic RTT load balancing method, and is exchanged every 5 seconds. sessionexchange: choices: - 'enabled' - 'disabled' description: - "Exchange persistent session entries with other GSLB sites every five seconds." triggermonitor: choices: - 'ALWAYS' - 'MEPDOWN' - 'MEPDOWN_SVCDOWN' description: - >- Specify the conditions under which the GSLB service must be monitored by a monitor, if one is bound. Available settings function as follows: - "* C(ALWAYS) - Monitor the GSLB service at all times." - >- * C(MEPDOWN) - Monitor the GSLB service only when the exchange of metrics through the Metrics Exchange Protocol (MEP) is disabled. - "C(MEPDOWN_SVCDOWN) - Monitor the service in either of the following situations:" - "* The exchange of metrics through MEP is disabled." - >- * The exchange of metrics through MEP is enabled but the status of the service, learned through metrics exchange, is DOWN. parentsite: description: - "Parent site of the GSLB site, in a parent-child topology." clip: description: - >- Cluster IP address. Specify this parameter to connect to the remote cluster site for GSLB auto-sync. Note: The cluster IP address is defined when creating the cluster. publicclip: description: - >- IP address to be used to globally access the remote cluster when it is deployed behind a NAT. It can be same as the normal cluster IP address. naptrreplacementsuffix: description: - >- The naptr replacement suffix configured here will be used to construct the naptr replacement field in NAPTR record. - "Minimum length = 1" extends_documentation_fragment: netscaler requirements: - nitro python sdk ''' EXAMPLES = ''' - name: Setup gslb site delegate_to: localhost netscaler_gslb_site: nsip: 172.18.0.2 nitro_user: nsroot nitro_pass: nsroot sitename: gslb-site-1 siteipaddress: 192.168.1.1 sitetype: LOCAL publicip: 192.168.1.1 metricexchange: enabled nwmetricexchange: enabled sessionexchange: enabled triggermonitor: ALWAYS ''' RETURN = ''' loglines: description: list of logged messages by the module returned: always type: list sample: "['message 1', 'message 2']" msg: description: Message detailing the failure reason returned: failure type: string sample: "Action does not exist" diff: description: List of differences between the actual configured object and the configuration specified in the module returned: failure type: dictionary sample: "{ 'targetlbvserver': 'difference. ours: (str) server1 other: (str) server2' }" ''' try: from nssrc.com.citrix.netscaler.nitro.resource.config.gslb.gslbsite import gslbsite from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception PYTHON_SDK_IMPORTED = True except ImportError as e: PYTHON_SDK_IMPORTED = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.netscaler.netscaler import ( ConfigProxy, get_nitro_client, netscaler_common_arguments, log, loglines, ensure_feature_is_enabled, get_immutables_intersection, ) def gslb_site_exists(client, module): if gslbsite.count_filtered(client, 'sitename:%s' % module.params['sitename']) > 0: return True else: return False def gslb_site_identical(client, module, gslb_site_proxy): gslb_site_list = gslbsite.get_filtered(client, 'sitename:%s' % module.params['sitename']) diff_dict = gslb_site_proxy.diff_object(gslb_site_list[0]) if len(diff_dict) == 0: return True else: return False def diff_list(client, module, gslb_site_proxy): gslb_site_list = gslbsite.get_filtered(client, 'sitename:%s' % module.params['sitename']) return gslb_site_proxy.diff_object(gslb_site_list[0]) def main(): module_specific_arguments = dict( sitename=dict(type='str'), sitetype=dict( type='str', choices=[ 'REMOTE', 'LOCAL', ] ), siteipaddress=dict(type='str'), publicip=dict(type='str'), metricexchange=dict( type='str', choices=[ 'enabled', 'disabled', ] ), nwmetricexchange=dict( type='str', choices=[ 'enabled', 'disabled', ] ), sessionexchange=dict( type='str', choices=[ 'enabled', 'disabled', ] ), triggermonitor=dict( type='str', choices=[ 'ALWAYS', 'MEPDOWN', 'MEPDOWN_SVCDOWN', ] ), parentsite=dict(type='str'), clip=dict(type='str'), publicclip=dict(type='str'), naptrreplacementsuffix=dict(type='str'), ) hand_inserted_arguments = dict( ) argument_spec = dict() argument_spec.update(netscaler_common_arguments) argument_spec.update(module_specific_arguments) argument_spec.update(hand_inserted_arguments) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) module_result = dict( changed=False, failed=False, loglines=loglines, ) # Fail the module if imports failed if not PYTHON_SDK_IMPORTED: module.fail_json(msg='Could not load nitro python sdk') # Fallthrough to rest of execution client = get_nitro_client(module) try: client.login() except nitro_exception as e: msg = "nitro exception during login. errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg) except Exception as e: if str(type(e)) == "<class 'requests.exceptions.ConnectionError'>": module.fail_json(msg='Connection error %s' % str(e)) elif str(type(e)) == "<class 'requests.exceptions.SSLError'>": module.fail_json(msg='SSL Error %s' % str(e)) else: module.fail_json(msg='Unexpected error during login %s' % str(e)) readwrite_attrs = [ 'sitename', 'sitetype', 'siteipaddress', 'publicip', 'metricexchange', 'nwmetricexchange', 'sessionexchange', 'triggermonitor', 'parentsite', 'clip', 'publicclip', 'naptrreplacementsuffix', ] readonly_attrs = [ 'status', 'persistencemepstatus', 'version', '__count', ] immutable_attrs = [ 'sitename', 'sitetype', 'siteipaddress', 'publicip', 'parentsite', 'clip', 'publicclip', ] transforms = { 'metricexchange': [lambda v: v.upper()], 'nwmetricexchange': [lambda v: v.upper()], 'sessionexchange': [lambda v: v.upper()], } # Instantiate config proxy gslb_site_proxy = ConfigProxy( actual=gslbsite(), client=client, attribute_values_dict=module.params, readwrite_attrs=readwrite_attrs, readonly_attrs=readonly_attrs, immutable_attrs=immutable_attrs, transforms=transforms, ) try: ensure_feature_is_enabled(client, 'GSLB') # Apply appropriate state if module.params['state'] == 'present': log('Applying actions for state present') if not gslb_site_exists(client, module): if not module.check_mode: gslb_site_proxy.add() if module.params['save_config']: client.save_config() module_result['changed'] = True elif not gslb_site_identical(client, module, gslb_site_proxy): # Check if we try to change value of immutable attributes immutables_changed = get_immutables_intersection(gslb_site_proxy, diff_list(client, module, gslb_site_proxy).keys()) if immutables_changed != []: module.fail_json( msg='Cannot update immutable attributes %s' % (immutables_changed,), diff=diff_list(client, module, gslb_site_proxy), **module_result ) if not module.check_mode: gslb_site_proxy.update() if module.params['save_config']: client.save_config() module_result['changed'] = True else: module_result['changed'] = False # Sanity check for state if not module.check_mode: log('Sanity checks for state present') if not gslb_site_exists(client, module): module.fail_json(msg='GSLB site does not exist', **module_result) if not gslb_site_identical(client, module, gslb_site_proxy): module.fail_json(msg='GSLB site differs from configured', diff=diff_list(client, module, gslb_site_proxy), **module_result) elif module.params['state'] == 'absent': log('Applying actions for state absent') if gslb_site_exists(client, module): if not module.check_mode: gslb_site_proxy.delete() if module.params['save_config']: client.save_config() module_result['changed'] = True else: module_result['changed'] = False # Sanity check for state if not module.check_mode: log('Sanity checks for state absent') if gslb_site_exists(client, module): module.fail_json(msg='GSLB site still exists', **module_result) except nitro_exception as e: msg = "nitro exception errorcode=%s, message=%s" % (str(e.errorcode), e.message) module.fail_json(msg=msg, **module_result) client.logout() module.exit_json(**module_result) if __name__ == "__main__": main()
gpl-3.0
teleological/camxes-py
transformers/raw.py
1
1215
# pylint: disable=I0011, C0111, too-few-public-methods, no-self-use from collections import OrderedDict from parsimonious.nodes import NodeVisitor KEY_ORDER = { "name" : 1, "type" : 2, "description" : 3, "text" : 4, "pos" : 5, "children" : 6 } def _sorted_node_dictionary(dct): return OrderedDict(sorted(dct.items(), key=lambda t: KEY_ORDER[t[0]])) def generic_node(node, visited_children): node_dictionary = {"type" : node.node_type()} if node.expr_name: node_dictionary["name"] = node.expr_name else: node_dictionary["description"] = node.description() start = node_dictionary["pos"] = node.start length = node.end - start if length > 0 and node.text: node_dictionary["text"] = node.text if visited_children: node_dictionary["children"] = visited_children return _sorted_node_dictionary(node_dictionary) class Transformer(object): def transform(self, parsed): return Visitor().visit(parsed) class Visitor(NodeVisitor): def generic_visit(self, node, visited_children): return generic_node(node, visited_children)
mit
kingmotley/SickRage
sickbeard/providers/__init__.py
1
3649
# coding=utf-8 # Author: Nic Wolfe <[email protected]> # # URL: https://sickrage.github.io # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from os import sys from random import shuffle import sickbeard from sickbeard.providers import btn, womble, thepiratebay, torrentleech, kat, iptorrents, torrentz, \ omgwtfnzbs, scc, hdtorrents, torrentday, hdbits, hounddawgs, speedcd, nyaatorrents, bluetigers, xthor, abnormal, torrentbytes, cpasbien,\ freshontv, morethantv, t411, tokyotoshokan, shazbat, rarbg, alpharatio, tntvillage, binsearch, torrentproject, extratorrent, \ scenetime, btdigg, transmitthenet, tvchaosuk, bitcannon, pretome, gftracker, hdspace, newpct, elitetorrent, bitsnoop, danishbits, hd4free, limetorrents, \ norbits, ilovetorrents __all__ = [ 'womble', 'btn', 'thepiratebay', 'kat', 'torrentleech', 'scc', 'hdtorrents', 'torrentday', 'hdbits', 'hounddawgs', 'iptorrents', 'omgwtfnzbs', 'speedcd', 'nyaatorrents', 'torrentbytes', 'freshontv', 'cpasbien', 'morethantv', 't411', 'tokyotoshokan', 'alpharatio', 'shazbat', 'rarbg', 'tntvillage', 'binsearch', 'bluetigers', 'xthor', 'abnormal', 'scenetime', 'btdigg', 'transmitthenet', 'tvchaosuk', 'torrentproject', 'extratorrent', 'bitcannon', 'torrentz', 'pretome', 'gftracker', 'hdspace', 'newpct', 'elitetorrent', 'bitsnoop', 'danishbits', 'hd4free', 'limetorrents', 'norbits', 'ilovetorrents' ] def sortedProviderList(randomize=False): initialList = sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList providerDict = dict(zip([x.get_id() for x in initialList], initialList)) newList = [] # add all modules in the priority list, in order for curModule in sickbeard.PROVIDER_ORDER: if curModule in providerDict: newList.append(providerDict[curModule]) # add all enabled providers first for curModule in providerDict: if providerDict[curModule] not in newList and providerDict[curModule].is_enabled(): newList.append(providerDict[curModule]) # add any modules that are missing from that list for curModule in providerDict: if providerDict[curModule] not in newList: newList.append(providerDict[curModule]) if randomize: shuffle(newList) return newList def makeProviderList(): return [x.provider for x in (getProviderModule(y) for y in __all__) if x] def getProviderModule(name): name = name.lower() prefix = "sickbeard.providers." if name in __all__ and prefix + name in sys.modules: return sys.modules[prefix + name] else: raise Exception("Can't find " + prefix + name + " in " + "Providers") def getProviderClass(provider_id): providerMatch = [x for x in sickbeard.providerList + sickbeard.newznabProviderList + sickbeard.torrentRssProviderList if x and x.get_id() == provider_id] if len(providerMatch) != 1: return None else: return providerMatch[0]
gpl-3.0
kursitet/edx-platform
openedx/core/djangoapps/credit/email_utils.py
32
6890
""" This file contains utility functions which will responsible for sending emails. """ import os import logging import pynliner import urlparse import uuid import HTMLParser from django.conf import settings from django.contrib.auth.models import User from django.contrib.staticfiles import finders from django.core.cache import cache from django.core.mail import EmailMessage, SafeMIMEText from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from email.mime.image import MIMEImage from email.mime.multipart import MIMEMultipart from eventtracking import tracker from edxmako.shortcuts import render_to_string from edxmako.template import Template from microsite_configuration import microsite from xmodule.modulestore.django import modulestore log = logging.getLogger(__name__) def send_credit_notifications(username, course_key): """Sends email notification to user on different phases during credit course e.g., credit eligibility, credit payment etc. """ try: user = User.objects.get(username=username) except User.DoesNotExist: log.error('No user with %s exist', username) return course = modulestore().get_course(course_key, depth=0) course_display_name = course.display_name tracking_context = tracker.get_tracker().resolve_context() tracking_id = str(tracking_context.get('user_id')) client_id = str(tracking_context.get('client_id')) events = '&t=event&ec=email&ea=open' tracking_pixel = 'https://www.google-analytics.com/collect?v=1&tid' + tracking_id + '&cid' + client_id + events dashboard_link = _email_url_parser('dashboard') credit_course_link = _email_url_parser('courses', '?type=credit') # get attached branded logo logo_image = cache.get('credit.email.attached-logo') if logo_image is None: branded_logo = { 'title': 'Logo', 'path': settings.NOTIFICATION_EMAIL_EDX_LOGO, 'cid': str(uuid.uuid4()) } logo_image_id = branded_logo['cid'] logo_image = attach_image(branded_logo, 'Header Logo') if logo_image: cache.set('credit.email.attached-logo', logo_image, settings.CREDIT_NOTIFICATION_CACHE_TIMEOUT) else: # strip enclosing angle brackets from 'logo_image' cache 'Content-ID' logo_image_id = logo_image.get('Content-ID', '')[1:-1] context = { 'full_name': user.get_full_name(), 'platform_name': settings.PLATFORM_NAME, 'course_name': course_display_name, 'branded_logo': logo_image_id, 'dashboard_link': dashboard_link, 'credit_course_link': credit_course_link, 'tracking_pixel': tracking_pixel, } # create the root email message notification_msg = MIMEMultipart('related') # add 'alternative' part to root email message to encapsulate the plain and # HTML versions, so message agents can decide which they want to display. msg_alternative = MIMEMultipart('alternative') notification_msg.attach(msg_alternative) # render the credit notification templates subject = _(u'Course Credit Eligibility') # add alternative plain text message email_body_plain = render_to_string('credit_notifications/credit_eligibility_email.txt', context) msg_alternative.attach(SafeMIMEText(email_body_plain, _subtype='plain', _charset='utf-8')) # add alternative html message email_body_content = cache.get('credit.email.css-email-body') if email_body_content is None: html_file_path = file_path_finder('templates/credit_notifications/credit_eligibility_email.html') if html_file_path: with open(html_file_path, 'r') as cur_file: cur_text = cur_file.read() # use html parser to unescape html characters which are changed # by the 'pynliner' while adding inline css to html content html_parser = HTMLParser.HTMLParser() email_body_content = html_parser.unescape(with_inline_css(cur_text)) # cache the email body content before rendering it since the # email context will change for each user e.g., 'full_name' cache.set('credit.email.css-email-body', email_body_content, settings.CREDIT_NOTIFICATION_CACHE_TIMEOUT) else: email_body_content = '' email_body = Template(email_body_content).render([context]) msg_alternative.attach(SafeMIMEText(email_body, _subtype='html', _charset='utf-8')) # attach logo image if logo_image: notification_msg.attach(logo_image) # add email addresses of sender and receiver from_address = microsite.get_value('default_from_email', settings.DEFAULT_FROM_EMAIL) to_address = user.email # send the root email message msg = EmailMessage(subject, None, from_address, [to_address]) msg.attach(notification_msg) msg.send() def with_inline_css(html_without_css): """Returns html with inline css if the css file path exists else returns html with out the inline css. """ css_filepath = settings.NOTIFICATION_EMAIL_CSS if not css_filepath.startswith('/'): css_filepath = file_path_finder(settings.NOTIFICATION_EMAIL_CSS) if css_filepath: with open(css_filepath, "r") as _file: css_content = _file.read() # insert style tag in the html and run pyliner. html_with_inline_css = pynliner.fromString('<style>' + css_content + '</style>' + html_without_css) return html_with_inline_css return html_without_css def attach_image(img_dict, filename): """ Attach images in the email headers. """ img_path = img_dict['path'] if not img_path.startswith('/'): img_path = file_path_finder(img_path) if img_path: with open(img_path, 'rb') as img: msg_image = MIMEImage(img.read(), name=os.path.basename(img_path)) msg_image.add_header('Content-ID', '<{}>'.format(img_dict['cid'])) msg_image.add_header("Content-Disposition", "inline", filename=filename) return msg_image def file_path_finder(path): """ Return physical path of file if found. """ return finders.FileSystemFinder().find(path) def _email_url_parser(url_name, extra_param=None): """Parse url according to 'SITE_NAME' which will be used in the mail. Args: url_name(str): Name of the url to be parsed extra_param(str): Any extra parameters to be added with url if any Returns: str """ site_name = microsite.get_value('SITE_NAME', settings.SITE_NAME) dashboard_url_path = reverse(url_name) + extra_param if extra_param else reverse(url_name) dashboard_link_parts = ("https", site_name, dashboard_url_path, '', '', '') return urlparse.urlunparse(dashboard_link_parts)
agpl-3.0
gnowxilef/youtube-dl
youtube_dl/extractor/nfb.py
74
4486
from __future__ import unicode_literals from .common import InfoExtractor from ..utils import ( clean_html, determine_ext, int_or_none, qualities, urlencode_postdata, xpath_text, ) class NFBIE(InfoExtractor): IE_NAME = 'nfb' IE_DESC = 'National Film Board of Canada' _VALID_URL = r'https?://(?:www\.)?(?:nfb|onf)\.ca/film/(?P<id>[\da-z_-]+)' _TEST = { 'url': 'https://www.nfb.ca/film/qallunaat_why_white_people_are_funny', 'info_dict': { 'id': 'qallunaat_why_white_people_are_funny', 'ext': 'flv', 'title': 'Qallunaat! Why White People Are Funny ', 'description': 'md5:6b8e32dde3abf91e58857b174916620c', 'duration': 3128, 'creator': 'Mark Sandiford', 'uploader': 'Mark Sandiford', }, 'params': { # rtmp download 'skip_download': True, } } def _real_extract(self, url): video_id = self._match_id(url) config = self._download_xml( 'https://www.nfb.ca/film/%s/player_config' % video_id, video_id, 'Downloading player config XML', data=urlencode_postdata({'getConfig': 'true'}), headers={ 'Content-Type': 'application/x-www-form-urlencoded', 'X-NFB-Referer': 'http://www.nfb.ca/medias/flash/NFBVideoPlayer.swf' }) title, description, thumbnail, duration, uploader, author = [None] * 6 thumbnails, formats = [[]] * 2 subtitles = {} for media in config.findall('./player/stream/media'): if media.get('type') == 'posterImage': quality_key = qualities(('low', 'high')) thumbnails = [] for asset in media.findall('assets/asset'): asset_url = xpath_text(asset, 'default/url', default=None) if not asset_url: continue quality = asset.get('quality') thumbnails.append({ 'url': asset_url, 'id': quality, 'preference': quality_key(quality), }) elif media.get('type') == 'video': title = xpath_text(media, 'title', fatal=True) for asset in media.findall('assets/asset'): quality = asset.get('quality') height = int_or_none(self._search_regex( r'^(\d+)[pP]$', quality or '', 'height', default=None)) for node in asset: streamer = xpath_text(node, 'streamerURI', default=None) if not streamer: continue play_path = xpath_text(node, 'url', default=None) if not play_path: continue formats.append({ 'url': streamer, 'app': streamer.split('/', 3)[3], 'play_path': play_path, 'rtmp_live': False, 'ext': 'flv', 'format_id': '%s-%s' % (node.tag, quality) if quality else node.tag, 'height': height, }) self._sort_formats(formats) description = clean_html(xpath_text(media, 'description')) uploader = xpath_text(media, 'author') duration = int_or_none(media.get('duration')) for subtitle in media.findall('./subtitles/subtitle'): subtitle_url = xpath_text(subtitle, 'url', default=None) if not subtitle_url: continue lang = xpath_text(subtitle, 'lang', default='en') subtitles.setdefault(lang, []).append({ 'url': subtitle_url, 'ext': (subtitle.get('format') or determine_ext(subtitle_url)).lower(), }) return { 'id': video_id, 'title': title, 'description': description, 'thumbnails': thumbnails, 'duration': duration, 'creator': uploader, 'uploader': uploader, 'formats': formats, 'subtitles': subtitles, }
unlicense
bkirui/odoo
addons/membership/report/report_membership.py
313
5267
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp import tools import openerp.addons.decimal_precision as dp STATE = [ ('none', 'Non Member'), ('canceled', 'Cancelled Member'), ('old', 'Old Member'), ('waiting', 'Waiting Member'), ('invoiced', 'Invoiced Member'), ('free', 'Free Member'), ('paid', 'Paid Member'), ] class report_membership(osv.osv): '''Membership Analysis''' _name = 'report.membership' _description = __doc__ _auto = False _rec_name = 'start_date' _columns = { 'start_date': fields.date('Start Date', readonly=True), 'date_to': fields.date('End Date', readonly=True, help="End membership date"), 'num_waiting': fields.integer('# Waiting', readonly=True), 'num_invoiced': fields.integer('# Invoiced', readonly=True), 'num_paid': fields.integer('# Paid', readonly=True), 'tot_pending': fields.float('Pending Amount', digits_compute= dp.get_precision('Account'), readonly=True), 'tot_earned': fields.float('Earned Amount', digits_compute= dp.get_precision('Account'), readonly=True), 'partner_id': fields.many2one('res.partner', 'Member', readonly=True), 'associate_member_id': fields.many2one('res.partner', 'Associate Member', readonly=True), 'membership_id': fields.many2one('product.product', 'Membership Product', readonly=True), 'membership_state': fields.selection(STATE, 'Current Membership State', readonly=True), 'user_id': fields.many2one('res.users', 'Salesperson', readonly=True), 'company_id': fields.many2one('res.company', 'Company', readonly=True), 'quantity': fields.integer("Quantity", readonly=True), } def init(self, cr): '''Create the view''' tools.drop_view_if_exists(cr, 'report_membership') cr.execute(""" CREATE OR REPLACE VIEW report_membership AS ( SELECT MIN(id) AS id, partner_id, count(membership_id) as quantity, user_id, membership_state, associate_member_id, membership_amount, date_to, start_date, COUNT(num_waiting) AS num_waiting, COUNT(num_invoiced) AS num_invoiced, COUNT(num_paid) AS num_paid, SUM(tot_pending) AS tot_pending, SUM(tot_earned) AS tot_earned, membership_id, company_id FROM (SELECT MIN(p.id) AS id, p.id AS partner_id, p.user_id AS user_id, p.membership_state AS membership_state, p.associate_member AS associate_member_id, p.membership_amount AS membership_amount, p.membership_stop AS date_to, p.membership_start AS start_date, CASE WHEN ml.state = 'waiting' THEN ml.id END AS num_waiting, CASE WHEN ml.state = 'invoiced' THEN ml.id END AS num_invoiced, CASE WHEN ml.state = 'paid' THEN ml.id END AS num_paid, CASE WHEN ml.state IN ('waiting', 'invoiced') THEN SUM(il.price_subtotal) ELSE 0 END AS tot_pending, CASE WHEN ml.state = 'paid' OR p.membership_state = 'old' THEN SUM(il.price_subtotal) ELSE 0 END AS tot_earned, ml.membership_id AS membership_id, p.company_id AS company_id FROM res_partner p LEFT JOIN membership_membership_line ml ON (ml.partner = p.id) LEFT JOIN account_invoice_line il ON (ml.account_invoice_line = il.id) LEFT JOIN account_invoice ai ON (il.invoice_id = ai.id) WHERE p.membership_state != 'none' and p.active = 'true' GROUP BY p.id, p.user_id, p.membership_state, p.associate_member, p.membership_amount, p.membership_start, ml.membership_id, p.company_id, ml.state, ml.id ) AS foo GROUP BY start_date, date_to, partner_id, user_id, membership_id, company_id, membership_state, associate_member_id, membership_amount )""") # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
srluge/SickRage
lib/sqlalchemy/dialects/firebird/base.py
78
28412
# firebird/base.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """ .. dialect:: firebird :name: Firebird Firebird Dialects ----------------- Firebird offers two distinct dialects_ (not to be confused with a SQLAlchemy ``Dialect``): dialect 1 This is the old syntax and behaviour, inherited from Interbase pre-6.0. dialect 3 This is the newer and supported syntax, introduced in Interbase 6.0. The SQLAlchemy Firebird dialect detects these versions and adjusts its representation of SQL accordingly. However, support for dialect 1 is not well tested and probably has incompatibilities. Locking Behavior ---------------- Firebird locks tables aggressively. For this reason, a DROP TABLE may hang until other transactions are released. SQLAlchemy does its best to release transactions as quickly as possible. The most common cause of hanging transactions is a non-fully consumed result set, i.e.:: result = engine.execute("select * from table") row = result.fetchone() return Where above, the ``ResultProxy`` has not been fully consumed. The connection will be returned to the pool and the transactional state rolled back once the Python garbage collector reclaims the objects which hold onto the connection, which often occurs asynchronously. The above use case can be alleviated by calling ``first()`` on the ``ResultProxy`` which will fetch the first row and immediately close all remaining cursor/connection resources. RETURNING support ----------------- Firebird 2.0 supports returning a result set from inserts, and 2.1 extends that to deletes and updates. This is generically exposed by the SQLAlchemy ``returning()`` method, such as:: # INSERT..RETURNING result = table.insert().returning(table.c.col1, table.c.col2).\\ values(name='foo') print result.fetchall() # UPDATE..RETURNING raises = empl.update().returning(empl.c.id, empl.c.salary).\\ where(empl.c.sales>100).\\ values(dict(salary=empl.c.salary * 1.1)) print raises.fetchall() .. _dialects: http://mc-computing.com/Databases/Firebird/SQL_Dialect.html """ import datetime from sqlalchemy import schema as sa_schema from sqlalchemy import exc, types as sqltypes, sql, util from sqlalchemy.sql import expression from sqlalchemy.engine import base, default, reflection from sqlalchemy.sql import compiler from sqlalchemy.types import (BIGINT, BLOB, DATE, FLOAT, INTEGER, NUMERIC, SMALLINT, TEXT, TIME, TIMESTAMP, Integer) RESERVED_WORDS = set([ "active", "add", "admin", "after", "all", "alter", "and", "any", "as", "asc", "ascending", "at", "auto", "avg", "before", "begin", "between", "bigint", "bit_length", "blob", "both", "by", "case", "cast", "char", "character", "character_length", "char_length", "check", "close", "collate", "column", "commit", "committed", "computed", "conditional", "connect", "constraint", "containing", "count", "create", "cross", "cstring", "current", "current_connection", "current_date", "current_role", "current_time", "current_timestamp", "current_transaction", "current_user", "cursor", "database", "date", "day", "dec", "decimal", "declare", "default", "delete", "desc", "descending", "disconnect", "distinct", "do", "domain", "double", "drop", "else", "end", "entry_point", "escape", "exception", "execute", "exists", "exit", "external", "extract", "fetch", "file", "filter", "float", "for", "foreign", "from", "full", "function", "gdscode", "generator", "gen_id", "global", "grant", "group", "having", "hour", "if", "in", "inactive", "index", "inner", "input_type", "insensitive", "insert", "int", "integer", "into", "is", "isolation", "join", "key", "leading", "left", "length", "level", "like", "long", "lower", "manual", "max", "maximum_segment", "merge", "min", "minute", "module_name", "month", "names", "national", "natural", "nchar", "no", "not", "null", "numeric", "octet_length", "of", "on", "only", "open", "option", "or", "order", "outer", "output_type", "overflow", "page", "pages", "page_size", "parameter", "password", "plan", "position", "post_event", "precision", "primary", "privileges", "procedure", "protected", "rdb$db_key", "read", "real", "record_version", "recreate", "recursive", "references", "release", "reserv", "reserving", "retain", "returning_values", "returns", "revoke", "right", "rollback", "rows", "row_count", "savepoint", "schema", "second", "segment", "select", "sensitive", "set", "shadow", "shared", "singular", "size", "smallint", "snapshot", "some", "sort", "sqlcode", "stability", "start", "starting", "starts", "statistics", "sub_type", "sum", "suspend", "table", "then", "time", "timestamp", "to", "trailing", "transaction", "trigger", "trim", "uncommitted", "union", "unique", "update", "upper", "user", "using", "value", "values", "varchar", "variable", "varying", "view", "wait", "when", "where", "while", "with", "work", "write", "year", ]) class _StringType(sqltypes.String): """Base for Firebird string types.""" def __init__(self, charset=None, **kw): self.charset = charset super(_StringType, self).__init__(**kw) class VARCHAR(_StringType, sqltypes.VARCHAR): """Firebird VARCHAR type""" __visit_name__ = 'VARCHAR' def __init__(self, length=None, **kwargs): super(VARCHAR, self).__init__(length=length, **kwargs) class CHAR(_StringType, sqltypes.CHAR): """Firebird CHAR type""" __visit_name__ = 'CHAR' def __init__(self, length=None, **kwargs): super(CHAR, self).__init__(length=length, **kwargs) class _FBDateTime(sqltypes.DateTime): def bind_processor(self, dialect): def process(value): if type(value) == datetime.date: return datetime.datetime(value.year, value.month, value.day) else: return value return process colspecs = { sqltypes.DateTime: _FBDateTime } ischema_names = { 'SHORT': SMALLINT, 'LONG': INTEGER, 'QUAD': FLOAT, 'FLOAT': FLOAT, 'DATE': DATE, 'TIME': TIME, 'TEXT': TEXT, 'INT64': BIGINT, 'DOUBLE': FLOAT, 'TIMESTAMP': TIMESTAMP, 'VARYING': VARCHAR, 'CSTRING': CHAR, 'BLOB': BLOB, } # TODO: date conversion types (should be implemented as _FBDateTime, # _FBDate, etc. as bind/result functionality is required) class FBTypeCompiler(compiler.GenericTypeCompiler): def visit_boolean(self, type_): return self.visit_SMALLINT(type_) def visit_datetime(self, type_): return self.visit_TIMESTAMP(type_) def visit_TEXT(self, type_): return "BLOB SUB_TYPE 1" def visit_BLOB(self, type_): return "BLOB SUB_TYPE 0" def _extend_string(self, type_, basic): charset = getattr(type_, 'charset', None) if charset is None: return basic else: return '%s CHARACTER SET %s' % (basic, charset) def visit_CHAR(self, type_): basic = super(FBTypeCompiler, self).visit_CHAR(type_) return self._extend_string(type_, basic) def visit_VARCHAR(self, type_): if not type_.length: raise exc.CompileError( "VARCHAR requires a length on dialect %s" % self.dialect.name) basic = super(FBTypeCompiler, self).visit_VARCHAR(type_) return self._extend_string(type_, basic) class FBCompiler(sql.compiler.SQLCompiler): """Firebird specific idiosyncrasies""" ansi_bind_rules = True #def visit_contains_op_binary(self, binary, operator, **kw): # cant use CONTAINING b.c. it's case insensitive. #def visit_notcontains_op_binary(self, binary, operator, **kw): # cant use NOT CONTAINING b.c. it's case insensitive. def visit_now_func(self, fn, **kw): return "CURRENT_TIMESTAMP" def visit_startswith_op_binary(self, binary, operator, **kw): return '%s STARTING WITH %s' % ( binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) def visit_notstartswith_op_binary(self, binary, operator, **kw): return '%s NOT STARTING WITH %s' % ( binary.left._compiler_dispatch(self, **kw), binary.right._compiler_dispatch(self, **kw)) def visit_mod_binary(self, binary, operator, **kw): return "mod(%s, %s)" % ( self.process(binary.left, **kw), self.process(binary.right, **kw)) def visit_alias(self, alias, asfrom=False, **kwargs): if self.dialect._version_two: return super(FBCompiler, self).\ visit_alias(alias, asfrom=asfrom, **kwargs) else: # Override to not use the AS keyword which FB 1.5 does not like if asfrom: alias_name = isinstance(alias.name, expression._truncated_label) and \ self._truncated_identifier("alias", alias.name) or alias.name return self.process( alias.original, asfrom=asfrom, **kwargs) + \ " " + \ self.preparer.format_alias(alias, alias_name) else: return self.process(alias.original, **kwargs) def visit_substring_func(self, func, **kw): s = self.process(func.clauses.clauses[0]) start = self.process(func.clauses.clauses[1]) if len(func.clauses.clauses) > 2: length = self.process(func.clauses.clauses[2]) return "SUBSTRING(%s FROM %s FOR %s)" % (s, start, length) else: return "SUBSTRING(%s FROM %s)" % (s, start) def visit_length_func(self, function, **kw): if self.dialect._version_two: return "char_length" + self.function_argspec(function) else: return "strlen" + self.function_argspec(function) visit_char_length_func = visit_length_func def function_argspec(self, func, **kw): # TODO: this probably will need to be # narrowed to a fixed list, some no-arg functions # may require parens - see similar example in the oracle # dialect if func.clauses is not None and len(func.clauses): return self.process(func.clause_expr, **kw) else: return "" def default_from(self): return " FROM rdb$database" def visit_sequence(self, seq): return "gen_id(%s, 1)" % self.preparer.format_sequence(seq) def get_select_precolumns(self, select): """Called when building a ``SELECT`` statement, position is just before column list Firebird puts the limit and offset right after the ``SELECT``... """ result = "" if select._limit: result += "FIRST %s " % self.process(sql.literal(select._limit)) if select._offset: result += "SKIP %s " % self.process(sql.literal(select._offset)) if select._distinct: result += "DISTINCT " return result def limit_clause(self, select): """Already taken care of in the `get_select_precolumns` method.""" return "" def returning_clause(self, stmt, returning_cols): columns = [ self._label_select_column(None, c, True, False, {}) for c in expression._select_iterables(returning_cols) ] return 'RETURNING ' + ', '.join(columns) class FBDDLCompiler(sql.compiler.DDLCompiler): """Firebird syntactic idiosyncrasies""" def visit_create_sequence(self, create): """Generate a ``CREATE GENERATOR`` statement for the sequence.""" # no syntax for these # http://www.firebirdsql.org/manual/generatorguide-sqlsyntax.html if create.element.start is not None: raise NotImplemented( "Firebird SEQUENCE doesn't support START WITH") if create.element.increment is not None: raise NotImplemented( "Firebird SEQUENCE doesn't support INCREMENT BY") if self.dialect._version_two: return "CREATE SEQUENCE %s" % \ self.preparer.format_sequence(create.element) else: return "CREATE GENERATOR %s" % \ self.preparer.format_sequence(create.element) def visit_drop_sequence(self, drop): """Generate a ``DROP GENERATOR`` statement for the sequence.""" if self.dialect._version_two: return "DROP SEQUENCE %s" % \ self.preparer.format_sequence(drop.element) else: return "DROP GENERATOR %s" % \ self.preparer.format_sequence(drop.element) class FBIdentifierPreparer(sql.compiler.IdentifierPreparer): """Install Firebird specific reserved words.""" reserved_words = RESERVED_WORDS illegal_initial_characters = compiler.ILLEGAL_INITIAL_CHARACTERS.union(['_']) def __init__(self, dialect): super(FBIdentifierPreparer, self).__init__(dialect, omit_schema=True) class FBExecutionContext(default.DefaultExecutionContext): def fire_sequence(self, seq, type_): """Get the next value from the sequence using ``gen_id()``.""" return self._execute_scalar( "SELECT gen_id(%s, 1) FROM rdb$database" % self.dialect.identifier_preparer.format_sequence(seq), type_ ) class FBDialect(default.DefaultDialect): """Firebird dialect""" name = 'firebird' max_identifier_length = 31 supports_sequences = True sequences_optional = False supports_default_values = True postfetch_lastrowid = False supports_native_boolean = False requires_name_normalize = True supports_empty_insert = False statement_compiler = FBCompiler ddl_compiler = FBDDLCompiler preparer = FBIdentifierPreparer type_compiler = FBTypeCompiler execution_ctx_cls = FBExecutionContext colspecs = colspecs ischema_names = ischema_names construct_arguments = [] # defaults to dialect ver. 3, # will be autodetected off upon # first connect _version_two = True def initialize(self, connection): super(FBDialect, self).initialize(connection) self._version_two = ('firebird' in self.server_version_info and \ self.server_version_info >= (2, ) ) or \ ('interbase' in self.server_version_info and \ self.server_version_info >= (6, ) ) if not self._version_two: # TODO: whatever other pre < 2.0 stuff goes here self.ischema_names = ischema_names.copy() self.ischema_names['TIMESTAMP'] = sqltypes.DATE self.colspecs = { sqltypes.DateTime: sqltypes.DATE } self.implicit_returning = self._version_two and \ self.__dict__.get('implicit_returning', True) def normalize_name(self, name): # Remove trailing spaces: FB uses a CHAR() type, # that is padded with spaces name = name and name.rstrip() if name is None: return None elif name.upper() == name and \ not self.identifier_preparer._requires_quotes(name.lower()): return name.lower() else: return name def denormalize_name(self, name): if name is None: return None elif name.lower() == name and \ not self.identifier_preparer._requires_quotes(name.lower()): return name.upper() else: return name def has_table(self, connection, table_name, schema=None): """Return ``True`` if the given table exists, ignoring the `schema`.""" tblqry = """ SELECT 1 AS has_table FROM rdb$database WHERE EXISTS (SELECT rdb$relation_name FROM rdb$relations WHERE rdb$relation_name=?) """ c = connection.execute(tblqry, [self.denormalize_name(table_name)]) return c.first() is not None def has_sequence(self, connection, sequence_name, schema=None): """Return ``True`` if the given sequence (generator) exists.""" genqry = """ SELECT 1 AS has_sequence FROM rdb$database WHERE EXISTS (SELECT rdb$generator_name FROM rdb$generators WHERE rdb$generator_name=?) """ c = connection.execute(genqry, [self.denormalize_name(sequence_name)]) return c.first() is not None @reflection.cache def get_table_names(self, connection, schema=None, **kw): # there are two queries commonly mentioned for this. # this one, using view_blr, is at the Firebird FAQ among other places: # http://www.firebirdfaq.org/faq174/ s = """ select rdb$relation_name from rdb$relations where rdb$view_blr is null and (rdb$system_flag is null or rdb$system_flag = 0); """ # the other query is this one. It's not clear if there's really # any difference between these two. This link: # http://www.alberton.info/firebird_sql_meta_info.html#.Ur3vXfZGni8 # states them as interchangeable. Some discussion at [ticket:2898] # SELECT DISTINCT rdb$relation_name # FROM rdb$relation_fields # WHERE rdb$system_flag=0 AND rdb$view_context IS NULL return [self.normalize_name(row[0]) for row in connection.execute(s)] @reflection.cache def get_view_names(self, connection, schema=None, **kw): # see http://www.firebirdfaq.org/faq174/ s = """ select rdb$relation_name from rdb$relations where rdb$view_blr is not null and (rdb$system_flag is null or rdb$system_flag = 0); """ return [self.normalize_name(row[0]) for row in connection.execute(s)] @reflection.cache def get_view_definition(self, connection, view_name, schema=None, **kw): qry = """ SELECT rdb$view_source AS view_source FROM rdb$relations WHERE rdb$relation_name=? """ rp = connection.execute(qry, [self.denormalize_name(view_name)]) row = rp.first() if row: return row['view_source'] else: return None @reflection.cache def get_pk_constraint(self, connection, table_name, schema=None, **kw): # Query to extract the PK/FK constrained fields of the given table keyqry = """ SELECT se.rdb$field_name AS fname FROM rdb$relation_constraints rc JOIN rdb$index_segments se ON rc.rdb$index_name=se.rdb$index_name WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? """ tablename = self.denormalize_name(table_name) # get primary key fields c = connection.execute(keyqry, ["PRIMARY KEY", tablename]) pkfields = [self.normalize_name(r['fname']) for r in c.fetchall()] return {'constrained_columns': pkfields, 'name': None} @reflection.cache def get_column_sequence(self, connection, table_name, column_name, schema=None, **kw): tablename = self.denormalize_name(table_name) colname = self.denormalize_name(column_name) # Heuristic-query to determine the generator associated to a PK field genqry = """ SELECT trigdep.rdb$depended_on_name AS fgenerator FROM rdb$dependencies tabdep JOIN rdb$dependencies trigdep ON tabdep.rdb$dependent_name=trigdep.rdb$dependent_name AND trigdep.rdb$depended_on_type=14 AND trigdep.rdb$dependent_type=2 JOIN rdb$triggers trig ON trig.rdb$trigger_name=tabdep.rdb$dependent_name WHERE tabdep.rdb$depended_on_name=? AND tabdep.rdb$depended_on_type=0 AND trig.rdb$trigger_type=1 AND tabdep.rdb$field_name=? AND (SELECT count(*) FROM rdb$dependencies trigdep2 WHERE trigdep2.rdb$dependent_name = trigdep.rdb$dependent_name) = 2 """ genr = connection.execute(genqry, [tablename, colname]).first() if genr is not None: return dict(name=self.normalize_name(genr['fgenerator'])) @reflection.cache def get_columns(self, connection, table_name, schema=None, **kw): # Query to extract the details of all the fields of the given table tblqry = """ SELECT r.rdb$field_name AS fname, r.rdb$null_flag AS null_flag, t.rdb$type_name AS ftype, f.rdb$field_sub_type AS stype, f.rdb$field_length/ COALESCE(cs.rdb$bytes_per_character,1) AS flen, f.rdb$field_precision AS fprec, f.rdb$field_scale AS fscale, COALESCE(r.rdb$default_source, f.rdb$default_source) AS fdefault FROM rdb$relation_fields r JOIN rdb$fields f ON r.rdb$field_source=f.rdb$field_name JOIN rdb$types t ON t.rdb$type=f.rdb$field_type AND t.rdb$field_name='RDB$FIELD_TYPE' LEFT JOIN rdb$character_sets cs ON f.rdb$character_set_id=cs.rdb$character_set_id WHERE f.rdb$system_flag=0 AND r.rdb$relation_name=? ORDER BY r.rdb$field_position """ # get the PK, used to determine the eventual associated sequence pk_constraint = self.get_pk_constraint(connection, table_name) pkey_cols = pk_constraint['constrained_columns'] tablename = self.denormalize_name(table_name) # get all of the fields for this table c = connection.execute(tblqry, [tablename]) cols = [] while True: row = c.fetchone() if row is None: break name = self.normalize_name(row['fname']) orig_colname = row['fname'] # get the data type colspec = row['ftype'].rstrip() coltype = self.ischema_names.get(colspec) if coltype is None: util.warn("Did not recognize type '%s' of column '%s'" % (colspec, name)) coltype = sqltypes.NULLTYPE elif issubclass(coltype, Integer) and row['fprec'] != 0: coltype = NUMERIC( precision=row['fprec'], scale=row['fscale'] * -1) elif colspec in ('VARYING', 'CSTRING'): coltype = coltype(row['flen']) elif colspec == 'TEXT': coltype = TEXT(row['flen']) elif colspec == 'BLOB': if row['stype'] == 1: coltype = TEXT() else: coltype = BLOB() else: coltype = coltype() # does it have a default value? defvalue = None if row['fdefault'] is not None: # the value comes down as "DEFAULT 'value'": there may be # more than one whitespace around the "DEFAULT" keyword # and it may also be lower case # (see also http://tracker.firebirdsql.org/browse/CORE-356) defexpr = row['fdefault'].lstrip() assert defexpr[:8].rstrip().upper() == \ 'DEFAULT', "Unrecognized default value: %s" % \ defexpr defvalue = defexpr[8:].strip() if defvalue == 'NULL': # Redundant defvalue = None col_d = { 'name': name, 'type': coltype, 'nullable': not bool(row['null_flag']), 'default': defvalue, 'autoincrement': defvalue is None } if orig_colname.lower() == orig_colname: col_d['quote'] = True # if the PK is a single field, try to see if its linked to # a sequence thru a trigger if len(pkey_cols) == 1 and name == pkey_cols[0]: seq_d = self.get_column_sequence(connection, tablename, name) if seq_d is not None: col_d['sequence'] = seq_d cols.append(col_d) return cols @reflection.cache def get_foreign_keys(self, connection, table_name, schema=None, **kw): # Query to extract the details of each UK/FK of the given table fkqry = """ SELECT rc.rdb$constraint_name AS cname, cse.rdb$field_name AS fname, ix2.rdb$relation_name AS targetrname, se.rdb$field_name AS targetfname FROM rdb$relation_constraints rc JOIN rdb$indices ix1 ON ix1.rdb$index_name=rc.rdb$index_name JOIN rdb$indices ix2 ON ix2.rdb$index_name=ix1.rdb$foreign_key JOIN rdb$index_segments cse ON cse.rdb$index_name=ix1.rdb$index_name JOIN rdb$index_segments se ON se.rdb$index_name=ix2.rdb$index_name AND se.rdb$field_position=cse.rdb$field_position WHERE rc.rdb$constraint_type=? AND rc.rdb$relation_name=? ORDER BY se.rdb$index_name, se.rdb$field_position """ tablename = self.denormalize_name(table_name) c = connection.execute(fkqry, ["FOREIGN KEY", tablename]) fks = util.defaultdict(lambda: { 'name': None, 'constrained_columns': [], 'referred_schema': None, 'referred_table': None, 'referred_columns': [] }) for row in c: cname = self.normalize_name(row['cname']) fk = fks[cname] if not fk['name']: fk['name'] = cname fk['referred_table'] = self.normalize_name(row['targetrname']) fk['constrained_columns'].append( self.normalize_name(row['fname'])) fk['referred_columns'].append( self.normalize_name(row['targetfname'])) return list(fks.values()) @reflection.cache def get_indexes(self, connection, table_name, schema=None, **kw): qry = """ SELECT ix.rdb$index_name AS index_name, ix.rdb$unique_flag AS unique_flag, ic.rdb$field_name AS field_name FROM rdb$indices ix JOIN rdb$index_segments ic ON ix.rdb$index_name=ic.rdb$index_name LEFT OUTER JOIN rdb$relation_constraints ON rdb$relation_constraints.rdb$index_name = ic.rdb$index_name WHERE ix.rdb$relation_name=? AND ix.rdb$foreign_key IS NULL AND rdb$relation_constraints.rdb$constraint_type IS NULL ORDER BY index_name, ic.rdb$field_position """ c = connection.execute(qry, [self.denormalize_name(table_name)]) indexes = util.defaultdict(dict) for row in c: indexrec = indexes[row['index_name']] if 'name' not in indexrec: indexrec['name'] = self.normalize_name(row['index_name']) indexrec['column_names'] = [] indexrec['unique'] = bool(row['unique_flag']) indexrec['column_names'].append( self.normalize_name(row['field_name'])) return list(indexes.values())
gpl-3.0
tylertian/Openstack
openstack F/nova/nova/tests/api/openstack/volume/test_router.py
7
3825
# Copyright 2011 Denali Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova.api.openstack import volume from nova.api.openstack.volume import snapshots from nova.api.openstack.volume import versions from nova.api.openstack.volume import volumes from nova.api.openstack import wsgi from nova import flags from nova.openstack.common import log as logging from nova import test from nova.tests.api.openstack import fakes FLAGS = flags.FLAGS LOG = logging.getLogger(__name__) class FakeController(object): def __init__(self, ext_mgr=None): self.ext_mgr = ext_mgr def index(self, req): return {} def detail(self, req): return {} def create_resource(ext_mgr): return wsgi.Resource(FakeController(ext_mgr)) def create_volume_resource(ext_mgr): return wsgi.Resource(FakeController(ext_mgr)) class VolumeRouterTestCase(test.TestCase): def setUp(self): super(VolumeRouterTestCase, self).setUp() # NOTE(vish): versions is just returning text so, no need to stub. self.stubs.Set(snapshots, 'create_resource', create_resource) self.stubs.Set(volumes, 'create_resource', create_volume_resource) self.app = volume.APIRouter() def test_versions(self): req = fakes.HTTPRequest.blank('') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(302, response.status_int) req = fakes.HTTPRequest.blank('/') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int) def test_versions_dispatch(self): req = fakes.HTTPRequest.blank('/') req.method = 'GET' req.content_type = 'application/json' resource = versions.Versions() result = resource.dispatch(resource.index, req, {}) self.assertTrue(result) def test_volumes(self): req = fakes.HTTPRequest.blank('/fake/volumes') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int) def test_volumes_detail(self): req = fakes.HTTPRequest.blank('/fake/volumes/detail') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int) def test_types(self): req = fakes.HTTPRequest.blank('/fake/types') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int) def test_snapshots(self): req = fakes.HTTPRequest.blank('/fake/snapshots') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int) def test_snapshots_detail(self): req = fakes.HTTPRequest.blank('/fake/snapshots/detail') req.method = 'GET' req.content_type = 'application/json' response = req.get_response(self.app) self.assertEqual(200, response.status_int)
apache-2.0
tumbl3w33d/ansible
test/units/modules/net_tools/nios/test_nios_host_record.py
21
5386
# This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.modules.net_tools.nios import nios_host_record from ansible.module_utils.net_tools.nios import api from units.compat.mock import patch, MagicMock, Mock from .test_nios_module import TestNiosModule, load_fixture class TestNiosHostRecordModule(TestNiosModule): module = nios_host_record def setUp(self): super(TestNiosHostRecordModule, self).setUp() self.module = MagicMock(name='ansible.modules.net_tools.nios.nios_host_record.WapiModule') self.module.check_mode = False self.module.params = {'provider': None} self.mock_wapi = patch('ansible.modules.net_tools.nios.nios_host_record.WapiModule') self.exec_command = self.mock_wapi.start() self.mock_wapi_run = patch('ansible.modules.net_tools.nios.nios_host_record.WapiModule.run') self.mock_wapi_run.start() self.load_config = self.mock_wapi_run.start() def tearDown(self): super(TestNiosHostRecordModule, self).tearDown() self.mock_wapi.stop() def _get_wapi(self, test_object): wapi = api.WapiModule(self.module) wapi.get_object = Mock(name='get_object', return_value=test_object) wapi.create_object = Mock(name='create_object') wapi.update_object = Mock(name='update_object') wapi.delete_object = Mock(name='delete_object') return wapi def load_fixtures(self, commands=None): self.exec_command.return_value = (0, load_fixture('nios_result.txt').strip(), None) self.load_config.return_value = dict(diff=None, session='session') def test_nios_host_record_create(self): self.module.params = {'provider': None, 'state': 'present', 'name': 'ansible', 'comment': None, 'extattrs': None} test_object = None test_spec = { "name": {"ib_req": True}, "comment": {}, "extattrs": {} } wapi = self._get_wapi(test_object) print("WAPI: ", wapi) res = wapi.run('testobject', test_spec) self.assertTrue(res['changed']) wapi.create_object.assert_called_once_with('testobject', {'name': self.module._check_type_dict().__getitem__()}) def test_nios_host_record_remove(self): self.module.params = {'provider': None, 'state': 'absent', 'name': 'ansible', 'comment': None, 'extattrs': None} ref = "record:host/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false" test_object = [{ "comment": "test comment", "_ref": ref, "name": "ansible", "extattrs": {'Site': {'value': 'test'}} }] test_spec = { "name": {"ib_req": True}, "comment": {}, "extattrs": {} } wapi = self._get_wapi(test_object) res = wapi.run('testobject', test_spec) self.assertTrue(res['changed']) wapi.delete_object.assert_called_once_with(ref) def test_nios_host_record_update_comment(self): self.module.params = {'provider': None, 'state': 'present', 'name': 'default', 'comment': 'updated comment', 'extattrs': None} test_object = [ { "comment": "test comment", "_ref": "record:host/ZG5zLm5ldHdvcmtfdmlldyQw:default/true", "name": "default", "extattrs": {} } ] test_spec = { "name": {"ib_req": True}, "comment": {}, "extattrs": {} } wapi = self._get_wapi(test_object) res = wapi.run('testobject', test_spec) self.assertTrue(res['changed']) wapi.update_object.called_once_with(test_object) def test_nios_host_record_update_record_name(self): self.module.params = {'provider': None, 'state': 'present', 'name': {'new_name': 'default', 'old_name': 'old_default'}, 'comment': 'comment', 'extattrs': None} test_object = [ { "comment": "test comment", "_ref": "record:host/ZG5zLm5ldHdvcmtfdmlldyQw:default/true", "name": "default", "old_name": "old_default", "extattrs": {} } ] test_spec = { "name": {"ib_req": True}, "comment": {}, "extattrs": {} } wapi = self._get_wapi(test_object) res = wapi.run('testobject', test_spec) self.assertTrue(res['changed']) wapi.update_object.called_once_with(test_object)
gpl-3.0
adviti/melange
thirdparty/google_appengine/lib/django_1_2/tests/modeltests/m2o_recursive/tests.py
92
1679
from django.test import TestCase from models import Category, Person class ManyToOneRecursiveTests(TestCase): def setUp(self): self.r = Category(id=None, name='Root category', parent=None) self.r.save() self.c = Category(id=None, name='Child category', parent=self.r) self.c.save() def test_m2o_recursive(self): self.assertQuerysetEqual(self.r.child_set.all(), ['<Category: Child category>']) self.assertEqual(self.r.child_set.get(name__startswith='Child').id, self.c.id) self.assertEqual(self.r.parent, None) self.assertQuerysetEqual(self.c.child_set.all(), []) self.assertEqual(self.c.parent.id, self.r.id) class MultipleManyToOneRecursiveTests(TestCase): def setUp(self): self.dad = Person(full_name='John Smith Senior', mother=None, father=None) self.dad.save() self.mom = Person(full_name='Jane Smith', mother=None, father=None) self.mom.save() self.kid = Person(full_name='John Smith Junior', mother=self.mom, father=self.dad) self.kid.save() def test_m2o_recursive2(self): self.assertEqual(self.kid.mother.id, self.mom.id) self.assertEqual(self.kid.father.id, self.dad.id) self.assertQuerysetEqual(self.dad.fathers_child_set.all(), ['<Person: John Smith Junior>']) self.assertQuerysetEqual(self.mom.mothers_child_set.all(), ['<Person: John Smith Junior>']) self.assertQuerysetEqual(self.kid.mothers_child_set.all(), []) self.assertQuerysetEqual(self.kid.fathers_child_set.all(), [])
apache-2.0
gantsign/ansible-role-java
molecule/java-max-lts/tests/test_role.py
1
1422
import pytest import re def test_java(host): cmd = host.run('. /etc/profile && java -version') assert cmd.rc == 0 m = re.search('(?:java|openjdk) version "([0-9]+)', cmd.stderr) assert m is not None java_version = m.group(1) assert '11' == java_version def test_javac(host): cmd = host.run('. /etc/profile && javac -version') assert cmd.rc == 0 m = re.search('javac ([0-9]+)', cmd.stdout) assert m is not None java_version = m.group(1) assert '11' == java_version @pytest.mark.parametrize('version_dir_pattern', [ 'jdk-11(\\.[0-9]+){,3}(\\+[0-9]+)?$' ]) def test_java_installed(host, version_dir_pattern): java_home = host.check_output('find %s | grep --color=never -E %s', '/opt/java/', version_dir_pattern) java_exe = host.file(java_home + '/bin/java') assert java_exe.exists assert java_exe.is_file assert java_exe.user == 'root' assert java_exe.group == 'root' assert oct(java_exe.mode) == '0o755' @pytest.mark.parametrize('fact_group_name', [ 'java' ]) def test_facts_installed(host, fact_group_name): fact_file = host.file('/etc/ansible/facts.d/' + fact_group_name + '.fact') assert fact_file.exists assert fact_file.is_file assert fact_file.user == 'root' assert fact_file.group == 'root' assert oct(fact_file.mode) == '0o644'
mit
hyperized/ansible
test/units/modules/network/fortios/test_fortios_switch_controller_qos_ip_dscp_map.py
21
8181
# Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <https://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest from mock import ANY from ansible.module_utils.network.fortios.fortios import FortiOSHandler try: from ansible.modules.network.fortios import fortios_switch_controller_qos_ip_dscp_map except ImportError: pytest.skip("Could not load required modules for testing", allow_module_level=True) @pytest.fixture(autouse=True) def connection_mock(mocker): connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_switch_controller_qos_ip_dscp_map.Connection') return connection_class_mock fos_instance = FortiOSHandler(connection_mock) def test_switch_controller_qos_ip_dscp_map_creation(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'switch_controller_qos_ip_dscp_map': { 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) expected_data = { 'description': 'test_value_3', 'name': 'default_name_4' } set_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_switch_controller_qos_ip_dscp_map_creation_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'switch_controller_qos_ip_dscp_map': { 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) expected_data = { 'description': 'test_value_3', 'name': 'default_name_4' } set_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_switch_controller_qos_ip_dscp_map_removal(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result) input_data = { 'username': 'admin', 'state': 'absent', 'switch_controller_qos_ip_dscp_map': { 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) delete_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', mkey=ANY, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200 def test_switch_controller_qos_ip_dscp_map_deletion_fails(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500} delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result) input_data = { 'username': 'admin', 'state': 'absent', 'switch_controller_qos_ip_dscp_map': { 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) delete_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', mkey=ANY, vdom='root') schema_method_mock.assert_not_called() assert is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 500 def test_switch_controller_qos_ip_dscp_map_idempotent(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'switch_controller_qos_ip_dscp_map': { 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) expected_data = { 'description': 'test_value_3', 'name': 'default_name_4' } set_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert not changed assert response['status'] == 'error' assert response['http_status'] == 404 def test_switch_controller_qos_ip_dscp_map_filter_foreign_attributes(mocker): schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema') set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200} set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result) input_data = { 'username': 'admin', 'state': 'present', 'switch_controller_qos_ip_dscp_map': { 'random_attribute_not_valid': 'tag', 'description': 'test_value_3', 'name': 'default_name_4' }, 'vdom': 'root'} is_error, changed, response = fortios_switch_controller_qos_ip_dscp_map.fortios_switch_controller_qos(input_data, fos_instance) expected_data = { 'description': 'test_value_3', 'name': 'default_name_4' } set_method_mock.assert_called_with('switch-controller.qos', 'ip-dscp-map', data=expected_data, vdom='root') schema_method_mock.assert_not_called() assert not is_error assert changed assert response['status'] == 'success' assert response['http_status'] == 200
gpl-3.0
montoyjh/pymatgen
pymatgen/analysis/structure_prediction/tests/test_substitutor.py
3
1823
# coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. import unittest import os import json from pymatgen.core.periodic_table import Specie from pymatgen.core.composition import Composition from pymatgen.analysis.structure_prediction.substitutor import Substitutor from pymatgen.util.testing import PymatgenTest def get_table(): """ Loads a lightweight lambda table for use in unit tests to reduce initialization time, and make unit tests insensitive to changes in the default lambda table. """ data_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..", "..", 'test_files', "struct_predictor") json_file = os.path.join(data_dir, 'test_lambda.json') with open(json_file) as f: lambda_table = json.load(f) return lambda_table class SubstitutorTest(PymatgenTest): def setUp(self): self.s = Substitutor(threshold=1e-3, lambda_table=get_table(), alpha= -5.) def test_substitutor(self): s_list = [Specie('O', -2), Specie('Li', 1)] subs = self.s.pred_from_list(s_list) self.assertEqual(len(subs), 4 , 'incorrect number of substitutions') c = Composition({'O2-': 1, 'Li1+': 2}) subs = self.s.pred_from_comp(c) self.assertEqual(len(subs), 4 , 'incorrect number of substitutions') structures = [{"structure": PymatgenTest.get_structure("Li2O"), "id": "pmgtest"}] subs = self.s.pred_from_structures(["Na+", "O2-"], structures) self.assertEqual(subs[0].formula, "Na2 O1") def test_as_dict(self): Substitutor.from_dict(self.s.as_dict()) if __name__ == "__main__": unittest.main()
mit
kikocorreoso/brython
www/tests/compression/test_lz77.py
2
2225
import os from lz77 import LZ77 texts = [ "veridique ! dominique pique nique en tunique.", """Pleurez, doux alcyons, ô vous, oiseaux sacrés, Oiseaux chers à Thétis, doux alcyons, pleurez. Elle a vécu, Myrto, la jeune Tarentine. Un vaisseau la portait aux bords de Camarine. Là l'hymen, les chansons, les flûtes, lentement, Devaient la reconduire au seuil de son amant. Une clef vigilante a pour cette journée Dans le cèdre enfermé sa robe d'hyménée Et l'or dont au festin ses bras seraient parés Et pour ses blonds cheveux les parfums préparés. Mais, seule sur la proue, invoquant les étoiles, Le vent impétueux qui soufflait dans les voiles L'enveloppe. Étonnée, et loin des matelots, Elle crie, elle tombe, elle est au sein des flots. Elle est au sein des flots, la jeune Tarentine. Son beau corps a roulé sous la vague marine. Thétis, les yeux en pleurs, dans le creux d'un rocher Aux monstres dévorants eut soin de la cacher. Par ses ordres bientôt les belles Néréides L'élèvent au-dessus des demeures humides, Le portent au rivage, et dans ce monument L'ont, au cap du Zéphir, déposé mollement. Puis de loin à grands cris appelant leurs compagnes, Et les Nymphes des bois, des sources, des montagnes, Toutes frappant leur sein et traînant un long deuil, Répétèrent : « hélas ! » autour de son cercueil. Hélas ! chez ton amant tu n'es point ramenée. Tu n'as point revêtu ta robe d'hyménée. L'or autour de tes bras n'a point serré de nœuds. Les doux parfums n'ont point coulé sur tes cheveux.""" ] def up_dir(n): res = os.getcwd() for _ in range(n): res = os.path.dirname(res) return res this_dir = up_dir(3) files = [ os.path.join(this_dir, "www", "src", "unicode.txt"), "du cote de chez swann.txt" ] for file in files: with open(file, "rb") as f: text = f.read() texts.append(text) lz = LZ77() for text in texts: if isinstance(text, str): size = 15 text = text.encode("utf-8") else: size = 32 * 1024 data = list(lz.compress(text, size, 3)) print("compress ok", len(data)) decomp = lz.decompress(data) print(len(decomp), len(text)) assert decomp == text
bsd-3-clause
debsankha/bedtime-programming
ls222/visual-lv.py
1
3375
from math import * import thread import random import time import visual cat_catch_rate=5*10**-4 #parameter cat_efficiency=0.8 #parameter a=0.2 #will get from slider c=0.2 #will get from slider mouse_no=1000 cat_no=1000 t=0 tmax=400 dt=1 class rodent: def __init__(self): self.time_from_last_childbirth=0 class felix: def __init__(self): self.size=0 self.is_virgin=1 self.reproduction_gap=0 self.time_from_last_childbirth=0 self.age=0 def visualize(catn,mousen): # while True: num=40 size=10 catno=catn*num**2/(catn+mousen) cats=random.sample(range(num**2),catno) for i in range(num**2): if i in cats: dic[i].color=visual.color.red else : dic[i].color=visual.color.green mouse_size=20 #ind parameter cat_mature_size=60 #ind parameter timeli=[] miceli=[] catli=[] mice=[rodent() for i in range(mouse_no)] cats=[felix() for i in range(cat_no)] catn=len(cats) mousen=len(mice) dic={} num=40 size=10 catno=catn*num**2/(catn+mousen) disp_cats=random.sample(range(num**2),catno) #if self.wTree.get_widget("anim").get_active()==1: #print 'yay!' for i in range(num**2): coords=((i%num)*size*2-num*size,(i/num)*size*2-num*size) if i in disp_cats: dic[i]=visual.sphere(pos=coords,radius=size,color=visual.color.red) else : dic[i]=visual.sphere(pos=coords,radius=size,color=visual.color.green) #print self.dic catn=len(cats) mousen=len(mice) data=open('tempdata.dat','w') timestart=time.time() while (len(mice)>0 or len(cats)>0) and t<tmax and (time.time()-timestart)<60: #print time.time()-timestart catn=len(cats) mousen=len(mice) #if self.wTree.get_widget("anim").get_active()==1: # print 'yay!' visualize(catn,mousen) # thread.start_new_thread(self.visualize,(catn,mousen)) for mouse in mice: if mouse.time_from_last_childbirth>=1/a: mouse.time_from_last_childbirth=0 mice.append(rodent()) mouse.time_from_last_childbirth+=dt ind=0 while ind<len(cats): cat=cats[ind] cat.age+=dt num=cat_catch_rate*dt*len(mice) for i in range(int(num)): caught=random.randint(0,len(mice)-1) cat.size+=mouse_size*cat_efficiency #size increases mice.pop(caught) if (num-int(num))>random.uniform(0,1): caught=random.randint(0,len(mice)-1) cat.size+=mouse_size*cat_efficiency #size increases mice.pop(caught) if cat.size>cat_mature_size: if cat.is_virgin: cat.is_virgin=0 cat.reproduction_gap=cat.age cats.append(felix()) else : if cat.time_from_last_childbirth>cat.reproduction_gap: cats.append(felix()) cat.time_from_last_childbirth=0 if cat.is_virgin==0: cat.time_from_last_childbirth+=dt if len(cats)>0: if c*dt*2*atan(0.05*len(cats))/pi>random.uniform(0,1): cats.pop(ind) else : ind+=1 else : ind+=1 timeli.append(t) miceli.append(len(mice)) catli.append(len(cats)) print t,'\t',len(mice),'\t',len(cats) print >> data, t,'\t',len(mice),'\t',len(cats) t+=dt
gpl-3.0
jpmpentwater/cvxpy
examples/expr_trees/1D_convolution.py
12
1453
#!/usr/bin/env python from cvxpy import * import numpy as np import random from math import pi, sqrt, exp def gauss(n=11,sigma=1): r = range(-int(n/2),int(n/2)+1) return [1 / (sigma * sqrt(2*pi)) * exp(-float(x)**2/(2*sigma**2)) for x in r] np.random.seed(5) random.seed(5) DENSITY = 0.008 n = 1000 x = Variable(n) # Create sparse signal. signal = np.zeros(n) nnz = 0 for i in range(n): if random.random() < DENSITY: signal[i] = random.uniform(0, 100) nnz += 1 # Gaussian kernel. m = 1001 kernel = gauss(m, m/10) # Noisy signal. std = 1 noise = np.random.normal(scale=std, size=n+m-1) noisy_signal = conv(kernel, signal) #+ noise gamma = Parameter(sign="positive") fit = norm(conv(kernel, x) - noisy_signal, 2) regularization = norm(x, 1) constraints = [x >= 0] gamma.value = 0.06 prob = Problem(Minimize(fit), constraints) solver_options = {"NORMALIZE": True, "MAX_ITERS": 2500, "EPS":1e-3} result = prob.solve(solver=SCS, verbose=True, NORMALIZE=True, MAX_ITERS=2500) # Get problem matrix. data, dims = prob.get_problem_data(solver=SCS) # Plot result and fit. import matplotlib.pyplot as plt plt.plot(range(n), signal, label="true signal") plt.plot(range(n), np.asarray(noisy_signal.value[:n, 0]), label="noisy convolution") plt.plot(range(n), np.asarray(x.value[:,0]), label="recovered signal") plt.legend(loc='upper right') plt.show()
gpl-3.0
mpeuster/estate
experiments/scale-down-hack/pox/pox/forwarding/l3_learning.py
36
12330
# Copyright 2012-2013 James McCauley # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ A stupid L3 switch For each switch: 1) Keep a table that maps IP addresses to MAC addresses and switch ports. Stock this table using information from ARP and IP packets. 2) When you see an ARP query, try to answer it using information in the table from step 1. If the info in the table is old, just flood the query. 3) Flood all other ARPs. 4) When you see an IP packet, if you know the destination port (because it's in the table from step 1), install a flow for it. """ from pox.core import core import pox log = core.getLogger() from pox.lib.packet.ethernet import ethernet, ETHER_BROADCAST from pox.lib.packet.ipv4 import ipv4 from pox.lib.packet.arp import arp from pox.lib.addresses import IPAddr, EthAddr from pox.lib.util import str_to_bool, dpid_to_str from pox.lib.recoco import Timer import pox.openflow.libopenflow_01 as of from pox.lib.revent import * import time # Timeout for flows FLOW_IDLE_TIMEOUT = 10 # Timeout for ARP entries ARP_TIMEOUT = 60 * 2 # Maximum number of packet to buffer on a switch for an unknown IP MAX_BUFFERED_PER_IP = 5 # Maximum time to hang on to a buffer for an unknown IP in seconds MAX_BUFFER_TIME = 5 class Entry (object): """ Not strictly an ARP entry. We use the port to determine which port to forward traffic out of. We use the MAC to answer ARP replies. We use the timeout so that if an entry is older than ARP_TIMEOUT, we flood the ARP request rather than try to answer it ourselves. """ def __init__ (self, port, mac): self.timeout = time.time() + ARP_TIMEOUT self.port = port self.mac = mac def __eq__ (self, other): if type(other) == tuple: return (self.port,self.mac)==other else: return (self.port,self.mac)==(other.port,other.mac) def __ne__ (self, other): return not self.__eq__(other) def isExpired (self): if self.port == of.OFPP_NONE: return False return time.time() > self.timeout def dpid_to_mac (dpid): return EthAddr("%012x" % (dpid & 0xffFFffFFffFF,)) class l3_switch (EventMixin): def __init__ (self, fakeways = [], arp_for_unknowns = False): # These are "fake gateways" -- we'll answer ARPs for them with MAC # of the switch they're connected to. self.fakeways = set(fakeways) # If this is true and we see a packet for an unknown # host, we'll ARP for it. self.arp_for_unknowns = arp_for_unknowns # (dpid,IP) -> expire_time # We use this to keep from spamming ARPs self.outstanding_arps = {} # (dpid,IP) -> [(expire_time,buffer_id,in_port), ...] # These are buffers we've gotten at this datapath for this IP which # we can't deliver because we don't know where they go. self.lost_buffers = {} # For each switch, we map IP addresses to Entries self.arpTable = {} # This timer handles expiring stuff self._expire_timer = Timer(5, self._handle_expiration, recurring=True) self.listenTo(core) def _handle_expiration (self): # Called by a timer so that we can remove old items. empty = [] for k,v in self.lost_buffers.iteritems(): dpid,ip = k for item in list(v): expires_at,buffer_id,in_port = item if expires_at < time.time(): # This packet is old. Tell this switch to drop it. v.remove(item) po = of.ofp_packet_out(buffer_id = buffer_id, in_port = in_port) core.openflow.sendToDPID(dpid, po) if len(v) == 0: empty.append(k) # Remove empty buffer bins for k in empty: del self.lost_buffers[k] def _send_lost_buffers (self, dpid, ipaddr, macaddr, port): """ We may have "lost" buffers -- packets we got but didn't know where to send at the time. We may know now. Try and see. """ if (dpid,ipaddr) in self.lost_buffers: # Yup! bucket = self.lost_buffers[(dpid,ipaddr)] del self.lost_buffers[(dpid,ipaddr)] log.debug("Sending %i buffered packets to %s from %s" % (len(bucket),ipaddr,dpid_to_str(dpid))) for _,buffer_id,in_port in bucket: po = of.ofp_packet_out(buffer_id=buffer_id,in_port=in_port) po.actions.append(of.ofp_action_dl_addr.set_dst(macaddr)) po.actions.append(of.ofp_action_output(port = port)) core.openflow.sendToDPID(dpid, po) def _handle_GoingUpEvent (self, event): self.listenTo(core.openflow) log.debug("Up...") def _handle_PacketIn (self, event): dpid = event.connection.dpid inport = event.port packet = event.parsed if not packet.parsed: log.warning("%i %i ignoring unparsed packet", dpid, inport) return if dpid not in self.arpTable: # New switch -- create an empty table self.arpTable[dpid] = {} for fake in self.fakeways: self.arpTable[dpid][IPAddr(fake)] = Entry(of.OFPP_NONE, dpid_to_mac(dpid)) if packet.type == ethernet.LLDP_TYPE: # Ignore LLDP packets return if isinstance(packet.next, ipv4): log.debug("%i %i IP %s => %s", dpid,inport, packet.next.srcip,packet.next.dstip) # Send any waiting packets... self._send_lost_buffers(dpid, packet.next.srcip, packet.src, inport) # Learn or update port/MAC info if packet.next.srcip in self.arpTable[dpid]: if self.arpTable[dpid][packet.next.srcip] != (inport, packet.src): log.info("%i %i RE-learned %s", dpid,inport,packet.next.srcip) else: log.debug("%i %i learned %s", dpid,inport,str(packet.next.srcip)) self.arpTable[dpid][packet.next.srcip] = Entry(inport, packet.src) # Try to forward dstaddr = packet.next.dstip if dstaddr in self.arpTable[dpid]: # We have info about what port to send it out on... prt = self.arpTable[dpid][dstaddr].port mac = self.arpTable[dpid][dstaddr].mac if prt == inport: log.warning("%i %i not sending packet for %s back out of the " + "input port" % (dpid, inport, str(dstaddr))) else: log.debug("%i %i installing flow for %s => %s out port %i" % (dpid, inport, packet.next.srcip, dstaddr, prt)) actions = [] actions.append(of.ofp_action_dl_addr.set_dst(mac)) actions.append(of.ofp_action_output(port = prt)) match = of.ofp_match.from_packet(packet, inport) match.dl_src = None # Wildcard source MAC msg = of.ofp_flow_mod(command=of.OFPFC_ADD, idle_timeout=FLOW_IDLE_TIMEOUT, hard_timeout=of.OFP_FLOW_PERMANENT, buffer_id=event.ofp.buffer_id, actions=actions, match=of.ofp_match.from_packet(packet, inport)) event.connection.send(msg.pack()) elif self.arp_for_unknowns: # We don't know this destination. # First, we track this buffer so that we can try to resend it later # if we learn the destination, second we ARP for the destination, # which should ultimately result in it responding and us learning # where it is # Add to tracked buffers if (dpid,dstaddr) not in self.lost_buffers: self.lost_buffers[(dpid,dstaddr)] = [] bucket = self.lost_buffers[(dpid,dstaddr)] entry = (time.time() + MAX_BUFFER_TIME,event.ofp.buffer_id,inport) bucket.append(entry) while len(bucket) > MAX_BUFFERED_PER_IP: del bucket[0] # Expire things from our outstanding ARP list... self.outstanding_arps = {k:v for k,v in self.outstanding_arps.iteritems() if v > time.time()} # Check if we've already ARPed recently if (dpid,dstaddr) in self.outstanding_arps: # Oop, we've already done this one recently. return # And ARP... self.outstanding_arps[(dpid,dstaddr)] = time.time() + 4 r = arp() r.hwtype = r.HW_TYPE_ETHERNET r.prototype = r.PROTO_TYPE_IP r.hwlen = 6 r.protolen = r.protolen r.opcode = r.REQUEST r.hwdst = ETHER_BROADCAST r.protodst = dstaddr r.hwsrc = packet.src r.protosrc = packet.next.srcip e = ethernet(type=ethernet.ARP_TYPE, src=packet.src, dst=ETHER_BROADCAST) e.set_payload(r) log.debug("%i %i ARPing for %s on behalf of %s" % (dpid, inport, str(r.protodst), str(r.protosrc))) msg = of.ofp_packet_out() msg.data = e.pack() msg.actions.append(of.ofp_action_output(port = of.OFPP_FLOOD)) msg.in_port = inport event.connection.send(msg) elif isinstance(packet.next, arp): a = packet.next log.debug("%i %i ARP %s %s => %s", dpid, inport, {arp.REQUEST:"request",arp.REPLY:"reply"}.get(a.opcode, 'op:%i' % (a.opcode,)), str(a.protosrc), str(a.protodst)) if a.prototype == arp.PROTO_TYPE_IP: if a.hwtype == arp.HW_TYPE_ETHERNET: if a.protosrc != 0: # Learn or update port/MAC info if a.protosrc in self.arpTable[dpid]: if self.arpTable[dpid][a.protosrc] != (inport, packet.src): log.info("%i %i RE-learned %s", dpid,inport,str(a.protosrc)) else: log.debug("%i %i learned %s", dpid,inport,str(a.protosrc)) self.arpTable[dpid][a.protosrc] = Entry(inport, packet.src) # Send any waiting packets... self._send_lost_buffers(dpid, a.protosrc, packet.src, inport) if a.opcode == arp.REQUEST: # Maybe we can answer if a.protodst in self.arpTable[dpid]: # We have an answer... if not self.arpTable[dpid][a.protodst].isExpired(): # .. and it's relatively current, so we'll reply ourselves r = arp() r.hwtype = a.hwtype r.prototype = a.prototype r.hwlen = a.hwlen r.protolen = a.protolen r.opcode = arp.REPLY r.hwdst = a.hwsrc r.protodst = a.protosrc r.protosrc = a.protodst r.hwsrc = self.arpTable[dpid][a.protodst].mac e = ethernet(type=packet.type, src=dpid_to_mac(dpid), dst=a.hwsrc) e.set_payload(r) log.debug("%i %i answering ARP for %s" % (dpid, inport, str(r.protosrc))) msg = of.ofp_packet_out() msg.data = e.pack() msg.actions.append(of.ofp_action_output(port = of.OFPP_IN_PORT)) msg.in_port = inport event.connection.send(msg) return # Didn't know how to answer or otherwise handle this ARP, so just flood it log.debug("%i %i flooding ARP %s %s => %s" % (dpid, inport, {arp.REQUEST:"request",arp.REPLY:"reply"}.get(a.opcode, 'op:%i' % (a.opcode,)), str(a.protosrc), str(a.protodst))) msg = of.ofp_packet_out(in_port = inport, data = event.ofp, action = of.ofp_action_output(port = of.OFPP_FLOOD)) event.connection.send(msg) def launch (fakeways="", arp_for_unknowns=None): fakeways = fakeways.replace(","," ").split() fakeways = [IPAddr(x) for x in fakeways] if arp_for_unknowns is None: arp_for_unknowns = len(fakeways) > 0 else: arp_for_unknowns = str_to_bool(arp_for_unknowns) core.registerNew(l3_switch, fakeways, arp_for_unknowns)
apache-2.0
jos4uke/getSeqFlankBlatHit
lib/python2.7/site-packages/Cython/Compiler/ParseTreeTransforms.py
2
117197
from __future__ import absolute_import import copy import cython cython.declare(PyrexTypes=object, Naming=object, ExprNodes=object, Nodes=object, Options=object, UtilNodes=object, LetNode=object, LetRefNode=object, TreeFragment=object, EncodedString=object, error=object, warning=object, copy=object) from . import PyrexTypes from . import Naming from . import ExprNodes from . import Nodes from . import Options from . import Builtin from .Visitor import VisitorTransform, TreeVisitor from .Visitor import CythonTransform, EnvTransform, ScopeTrackingTransform from .UtilNodes import LetNode, LetRefNode, ResultRefNode from .TreeFragment import TreeFragment from .StringEncoding import EncodedString from .Errors import error, warning, CompileError, InternalError from .Code import UtilityCode class NameNodeCollector(TreeVisitor): """Collect all NameNodes of a (sub-)tree in the ``name_nodes`` attribute. """ def __init__(self): super(NameNodeCollector, self).__init__() self.name_nodes = [] def visit_NameNode(self, node): self.name_nodes.append(node) def visit_Node(self, node): self._visitchildren(node, None) class SkipDeclarations(object): """ Variable and function declarations can often have a deep tree structure, and yet most transformations don't need to descend to this depth. Declaration nodes are removed after AnalyseDeclarationsTransform, so there is no need to use this for transformations after that point. """ def visit_CTypeDefNode(self, node): return node def visit_CVarDefNode(self, node): return node def visit_CDeclaratorNode(self, node): return node def visit_CBaseTypeNode(self, node): return node def visit_CEnumDefNode(self, node): return node def visit_CStructOrUnionDefNode(self, node): return node class NormalizeTree(CythonTransform): """ This transform fixes up a few things after parsing in order to make the parse tree more suitable for transforms. a) After parsing, blocks with only one statement will be represented by that statement, not by a StatListNode. When doing transforms this is annoying and inconsistent, as one cannot in general remove a statement in a consistent way and so on. This transform wraps any single statements in a StatListNode containing a single statement. b) The PassStatNode is a noop and serves no purpose beyond plugging such one-statement blocks; i.e., once parsed a ` "pass" can just as well be represented using an empty StatListNode. This means less special cases to worry about in subsequent transforms (one always checks to see if a StatListNode has no children to see if the block is empty). """ def __init__(self, context): super(NormalizeTree, self).__init__(context) self.is_in_statlist = False self.is_in_expr = False def visit_ExprNode(self, node): stacktmp = self.is_in_expr self.is_in_expr = True self.visitchildren(node) self.is_in_expr = stacktmp return node def visit_StatNode(self, node, is_listcontainer=False): stacktmp = self.is_in_statlist self.is_in_statlist = is_listcontainer self.visitchildren(node) self.is_in_statlist = stacktmp if not self.is_in_statlist and not self.is_in_expr: return Nodes.StatListNode(pos=node.pos, stats=[node]) else: return node def visit_StatListNode(self, node): self.is_in_statlist = True self.visitchildren(node) self.is_in_statlist = False return node def visit_ParallelAssignmentNode(self, node): return self.visit_StatNode(node, True) def visit_CEnumDefNode(self, node): return self.visit_StatNode(node, True) def visit_CStructOrUnionDefNode(self, node): return self.visit_StatNode(node, True) def visit_PassStatNode(self, node): """Eliminate PassStatNode""" if not self.is_in_statlist: return Nodes.StatListNode(pos=node.pos, stats=[]) else: return [] def visit_ExprStatNode(self, node): """Eliminate useless string literals""" if node.expr.is_string_literal: return self.visit_PassStatNode(node) else: return self.visit_StatNode(node) def visit_CDeclaratorNode(self, node): return node class PostParseError(CompileError): pass # error strings checked by unit tests, so define them ERR_CDEF_INCLASS = 'Cannot assign default value to fields in cdef classes, structs or unions' ERR_BUF_DEFAULTS = 'Invalid buffer defaults specification (see docs)' ERR_INVALID_SPECIALATTR_TYPE = 'Special attributes must not have a type declared' class PostParse(ScopeTrackingTransform): """ Basic interpretation of the parse tree, as well as validity checking that can be done on a very basic level on the parse tree (while still not being a problem with the basic syntax, as such). Specifically: - Default values to cdef assignments are turned into single assignments following the declaration (everywhere but in class bodies, where they raise a compile error) - Interpret some node structures into Python runtime values. Some nodes take compile-time arguments (currently: TemplatedTypeNode[args] and __cythonbufferdefaults__ = {args}), which should be interpreted. This happens in a general way and other steps should be taken to ensure validity. Type arguments cannot be interpreted in this way. - For __cythonbufferdefaults__ the arguments are checked for validity. TemplatedTypeNode has its directives interpreted: Any first positional argument goes into the "dtype" attribute, any "ndim" keyword argument goes into the "ndim" attribute and so on. Also it is checked that the directive combination is valid. - __cythonbufferdefaults__ attributes are parsed and put into the type information. Note: Currently Parsing.py does a lot of interpretation and reorganization that can be refactored into this transform if a more pure Abstract Syntax Tree is wanted. """ def __init__(self, context): super(PostParse, self).__init__(context) self.specialattribute_handlers = { '__cythonbufferdefaults__' : self.handle_bufferdefaults } def visit_ModuleNode(self, node): self.lambda_counter = 1 self.genexpr_counter = 1 return super(PostParse, self).visit_ModuleNode(node) def visit_LambdaNode(self, node): # unpack a lambda expression into the corresponding DefNode lambda_id = self.lambda_counter self.lambda_counter += 1 node.lambda_name = EncodedString(u'lambda%d' % lambda_id) collector = YieldNodeCollector() collector.visitchildren(node.result_expr) if collector.yields or isinstance(node.result_expr, ExprNodes.YieldExprNode): body = Nodes.ExprStatNode( node.result_expr.pos, expr=node.result_expr) else: body = Nodes.ReturnStatNode( node.result_expr.pos, value=node.result_expr) node.def_node = Nodes.DefNode( node.pos, name=node.name, lambda_name=node.lambda_name, args=node.args, star_arg=node.star_arg, starstar_arg=node.starstar_arg, body=body, doc=None) self.visitchildren(node) return node def visit_GeneratorExpressionNode(self, node): # unpack a generator expression into the corresponding DefNode genexpr_id = self.genexpr_counter self.genexpr_counter += 1 node.genexpr_name = EncodedString(u'genexpr%d' % genexpr_id) node.def_node = Nodes.DefNode(node.pos, name=node.name, doc=None, args=[], star_arg=None, starstar_arg=None, body=node.loop) self.visitchildren(node) return node # cdef variables def handle_bufferdefaults(self, decl): if not isinstance(decl.default, ExprNodes.DictNode): raise PostParseError(decl.pos, ERR_BUF_DEFAULTS) self.scope_node.buffer_defaults_node = decl.default self.scope_node.buffer_defaults_pos = decl.pos def visit_CVarDefNode(self, node): # This assumes only plain names and pointers are assignable on # declaration. Also, it makes use of the fact that a cdef decl # must appear before the first use, so we don't have to deal with # "i = 3; cdef int i = i" and can simply move the nodes around. try: self.visitchildren(node) stats = [node] newdecls = [] for decl in node.declarators: declbase = decl while isinstance(declbase, Nodes.CPtrDeclaratorNode): declbase = declbase.base if isinstance(declbase, Nodes.CNameDeclaratorNode): if declbase.default is not None: if self.scope_type in ('cclass', 'pyclass', 'struct'): if isinstance(self.scope_node, Nodes.CClassDefNode): handler = self.specialattribute_handlers.get(decl.name) if handler: if decl is not declbase: raise PostParseError(decl.pos, ERR_INVALID_SPECIALATTR_TYPE) handler(decl) continue # Remove declaration raise PostParseError(decl.pos, ERR_CDEF_INCLASS) first_assignment = self.scope_type != 'module' stats.append(Nodes.SingleAssignmentNode(node.pos, lhs=ExprNodes.NameNode(node.pos, name=declbase.name), rhs=declbase.default, first=first_assignment)) declbase.default = None newdecls.append(decl) node.declarators = newdecls return stats except PostParseError, e: # An error in a cdef clause is ok, simply remove the declaration # and try to move on to report more errors self.context.nonfatal_error(e) return None # Split parallel assignments (a,b = b,a) into separate partial # assignments that are executed rhs-first using temps. This # restructuring must be applied before type analysis so that known # types on rhs and lhs can be matched directly. It is required in # the case that the types cannot be coerced to a Python type in # order to assign from a tuple. def visit_SingleAssignmentNode(self, node): self.visitchildren(node) return self._visit_assignment_node(node, [node.lhs, node.rhs]) def visit_CascadedAssignmentNode(self, node): self.visitchildren(node) return self._visit_assignment_node(node, node.lhs_list + [node.rhs]) def _visit_assignment_node(self, node, expr_list): """Flatten parallel assignments into separate single assignments or cascaded assignments. """ if sum([ 1 for expr in expr_list if expr.is_sequence_constructor or expr.is_string_literal ]) < 2: # no parallel assignments => nothing to do return node expr_list_list = [] flatten_parallel_assignments(expr_list, expr_list_list) temp_refs = [] eliminate_rhs_duplicates(expr_list_list, temp_refs) nodes = [] for expr_list in expr_list_list: lhs_list = expr_list[:-1] rhs = expr_list[-1] if len(lhs_list) == 1: node = Nodes.SingleAssignmentNode(rhs.pos, lhs = lhs_list[0], rhs = rhs) else: node = Nodes.CascadedAssignmentNode(rhs.pos, lhs_list = lhs_list, rhs = rhs) nodes.append(node) if len(nodes) == 1: assign_node = nodes[0] else: assign_node = Nodes.ParallelAssignmentNode(nodes[0].pos, stats = nodes) if temp_refs: duplicates_and_temps = [ (temp.expression, temp) for temp in temp_refs ] sort_common_subsequences(duplicates_and_temps) for _, temp_ref in duplicates_and_temps[::-1]: assign_node = LetNode(temp_ref, assign_node) return assign_node def _flatten_sequence(self, seq, result): for arg in seq.args: if arg.is_sequence_constructor: self._flatten_sequence(arg, result) else: result.append(arg) return result def visit_DelStatNode(self, node): self.visitchildren(node) node.args = self._flatten_sequence(node, []) return node def visit_ExceptClauseNode(self, node): if node.is_except_as: # except-as must delete NameNode target at the end del_target = Nodes.DelStatNode( node.pos, args=[ExprNodes.NameNode( node.target.pos, name=node.target.name)], ignore_nonexisting=True) node.body = Nodes.StatListNode( node.pos, stats=[Nodes.TryFinallyStatNode( node.pos, body=node.body, finally_clause=Nodes.StatListNode( node.pos, stats=[del_target]))]) self.visitchildren(node) return node def eliminate_rhs_duplicates(expr_list_list, ref_node_sequence): """Replace rhs items by LetRefNodes if they appear more than once. Creates a sequence of LetRefNodes that set up the required temps and appends them to ref_node_sequence. The input list is modified in-place. """ seen_nodes = set() ref_nodes = {} def find_duplicates(node): if node.is_literal or node.is_name: # no need to replace those; can't include attributes here # as their access is not necessarily side-effect free return if node in seen_nodes: if node not in ref_nodes: ref_node = LetRefNode(node) ref_nodes[node] = ref_node ref_node_sequence.append(ref_node) else: seen_nodes.add(node) if node.is_sequence_constructor: for item in node.args: find_duplicates(item) for expr_list in expr_list_list: rhs = expr_list[-1] find_duplicates(rhs) if not ref_nodes: return def substitute_nodes(node): if node in ref_nodes: return ref_nodes[node] elif node.is_sequence_constructor: node.args = list(map(substitute_nodes, node.args)) return node # replace nodes inside of the common subexpressions for node in ref_nodes: if node.is_sequence_constructor: node.args = list(map(substitute_nodes, node.args)) # replace common subexpressions on all rhs items for expr_list in expr_list_list: expr_list[-1] = substitute_nodes(expr_list[-1]) def sort_common_subsequences(items): """Sort items/subsequences so that all items and subsequences that an item contains appear before the item itself. This is needed because each rhs item must only be evaluated once, so its value must be evaluated first and then reused when packing sequences that contain it. This implies a partial order, and the sort must be stable to preserve the original order as much as possible, so we use a simple insertion sort (which is very fast for short sequences, the normal case in practice). """ def contains(seq, x): for item in seq: if item is x: return True elif item.is_sequence_constructor and contains(item.args, x): return True return False def lower_than(a,b): return b.is_sequence_constructor and contains(b.args, a) for pos, item in enumerate(items): key = item[1] # the ResultRefNode which has already been injected into the sequences new_pos = pos for i in xrange(pos-1, -1, -1): if lower_than(key, items[i][0]): new_pos = i if new_pos != pos: for i in xrange(pos, new_pos, -1): items[i] = items[i-1] items[new_pos] = item def unpack_string_to_character_literals(literal): chars = [] pos = literal.pos stype = literal.__class__ sval = literal.value sval_type = sval.__class__ for char in sval: cval = sval_type(char) chars.append(stype(pos, value=cval, constant_result=cval)) return chars def flatten_parallel_assignments(input, output): # The input is a list of expression nodes, representing the LHSs # and RHS of one (possibly cascaded) assignment statement. For # sequence constructors, rearranges the matching parts of both # sides into a list of equivalent assignments between the # individual elements. This transformation is applied # recursively, so that nested structures get matched as well. rhs = input[-1] if (not (rhs.is_sequence_constructor or isinstance(rhs, ExprNodes.UnicodeNode)) or not sum([lhs.is_sequence_constructor for lhs in input[:-1]])): output.append(input) return complete_assignments = [] if rhs.is_sequence_constructor: rhs_args = rhs.args elif rhs.is_string_literal: rhs_args = unpack_string_to_character_literals(rhs) rhs_size = len(rhs_args) lhs_targets = [ [] for _ in xrange(rhs_size) ] starred_assignments = [] for lhs in input[:-1]: if not lhs.is_sequence_constructor: if lhs.is_starred: error(lhs.pos, "starred assignment target must be in a list or tuple") complete_assignments.append(lhs) continue lhs_size = len(lhs.args) starred_targets = sum([1 for expr in lhs.args if expr.is_starred]) if starred_targets > 1: error(lhs.pos, "more than 1 starred expression in assignment") output.append([lhs,rhs]) continue elif lhs_size - starred_targets > rhs_size: error(lhs.pos, "need more than %d value%s to unpack" % (rhs_size, (rhs_size != 1) and 's' or '')) output.append([lhs,rhs]) continue elif starred_targets: map_starred_assignment(lhs_targets, starred_assignments, lhs.args, rhs_args) elif lhs_size < rhs_size: error(lhs.pos, "too many values to unpack (expected %d, got %d)" % (lhs_size, rhs_size)) output.append([lhs,rhs]) continue else: for targets, expr in zip(lhs_targets, lhs.args): targets.append(expr) if complete_assignments: complete_assignments.append(rhs) output.append(complete_assignments) # recursively flatten partial assignments for cascade, rhs in zip(lhs_targets, rhs_args): if cascade: cascade.append(rhs) flatten_parallel_assignments(cascade, output) # recursively flatten starred assignments for cascade in starred_assignments: if cascade[0].is_sequence_constructor: flatten_parallel_assignments(cascade, output) else: output.append(cascade) def map_starred_assignment(lhs_targets, starred_assignments, lhs_args, rhs_args): # Appends the fixed-position LHS targets to the target list that # appear left and right of the starred argument. # # The starred_assignments list receives a new tuple # (lhs_target, rhs_values_list) that maps the remaining arguments # (those that match the starred target) to a list. # left side of the starred target for i, (targets, expr) in enumerate(zip(lhs_targets, lhs_args)): if expr.is_starred: starred = i lhs_remaining = len(lhs_args) - i - 1 break targets.append(expr) else: raise InternalError("no starred arg found when splitting starred assignment") # right side of the starred target for i, (targets, expr) in enumerate(zip(lhs_targets[-lhs_remaining:], lhs_args[starred + 1:])): targets.append(expr) # the starred target itself, must be assigned a (potentially empty) list target = lhs_args[starred].target # unpack starred node starred_rhs = rhs_args[starred:] if lhs_remaining: starred_rhs = starred_rhs[:-lhs_remaining] if starred_rhs: pos = starred_rhs[0].pos else: pos = target.pos starred_assignments.append([ target, ExprNodes.ListNode(pos=pos, args=starred_rhs)]) class PxdPostParse(CythonTransform, SkipDeclarations): """ Basic interpretation/validity checking that should only be done on pxd trees. A lot of this checking currently happens in the parser; but what is listed below happens here. - "def" functions are let through only if they fill the getbuffer/releasebuffer slots - cdef functions are let through only if they are on the top level and are declared "inline" """ ERR_INLINE_ONLY = "function definition in pxd file must be declared 'cdef inline'" ERR_NOGO_WITH_INLINE = "inline function definition in pxd file cannot be '%s'" def __call__(self, node): self.scope_type = 'pxd' return super(PxdPostParse, self).__call__(node) def visit_CClassDefNode(self, node): old = self.scope_type self.scope_type = 'cclass' self.visitchildren(node) self.scope_type = old return node def visit_FuncDefNode(self, node): # FuncDefNode always come with an implementation (without # an imp they are CVarDefNodes..) err = self.ERR_INLINE_ONLY if (isinstance(node, Nodes.DefNode) and self.scope_type == 'cclass' and node.name in ('__getbuffer__', '__releasebuffer__')): err = None # allow these slots if isinstance(node, Nodes.CFuncDefNode): if (u'inline' in node.modifiers and self.scope_type in ('pxd', 'cclass')): node.inline_in_pxd = True if node.visibility != 'private': err = self.ERR_NOGO_WITH_INLINE % node.visibility elif node.api: err = self.ERR_NOGO_WITH_INLINE % 'api' else: err = None # allow inline function else: err = self.ERR_INLINE_ONLY if err: self.context.nonfatal_error(PostParseError(node.pos, err)) return None else: return node class InterpretCompilerDirectives(CythonTransform, SkipDeclarations): """ After parsing, directives can be stored in a number of places: - #cython-comments at the top of the file (stored in ModuleNode) - Command-line arguments overriding these - @cython.directivename decorators - with cython.directivename: statements This transform is responsible for interpreting these various sources and store the directive in two ways: - Set the directives attribute of the ModuleNode for global directives. - Use a CompilerDirectivesNode to override directives for a subtree. (The first one is primarily to not have to modify with the tree structure, so that ModuleNode stay on top.) The directives are stored in dictionaries from name to value in effect. Each such dictionary is always filled in for all possible directives, using default values where no value is given by the user. The available directives are controlled in Options.py. Note that we have to run this prior to analysis, and so some minor duplication of functionality has to occur: We manually track cimports and which names the "cython" module may have been imported to. """ unop_method_nodes = { 'typeof': ExprNodes.TypeofNode, 'operator.address': ExprNodes.AmpersandNode, 'operator.dereference': ExprNodes.DereferenceNode, 'operator.preincrement' : ExprNodes.inc_dec_constructor(True, '++'), 'operator.predecrement' : ExprNodes.inc_dec_constructor(True, '--'), 'operator.postincrement': ExprNodes.inc_dec_constructor(False, '++'), 'operator.postdecrement': ExprNodes.inc_dec_constructor(False, '--'), # For backwards compatability. 'address': ExprNodes.AmpersandNode, } binop_method_nodes = { 'operator.comma' : ExprNodes.c_binop_constructor(','), } special_methods = set(['declare', 'union', 'struct', 'typedef', 'sizeof', 'cast', 'pointer', 'compiled', 'NULL', 'fused_type', 'parallel']) special_methods.update(unop_method_nodes.keys()) valid_parallel_directives = set([ "parallel", "prange", "threadid", # "threadsavailable", ]) def __init__(self, context, compilation_directive_defaults): super(InterpretCompilerDirectives, self).__init__(context) self.cython_module_names = set() self.directive_names = {'staticmethod': 'staticmethod'} self.parallel_directives = {} directives = copy.deepcopy(Options.directive_defaults) for key, value in compilation_directive_defaults.items(): directives[unicode(key)] = copy.deepcopy(value) self.directives = directives def check_directive_scope(self, pos, directive, scope): legal_scopes = Options.directive_scopes.get(directive, None) if legal_scopes and scope not in legal_scopes: self.context.nonfatal_error(PostParseError(pos, 'The %s compiler directive ' 'is not allowed in %s scope' % (directive, scope))) return False else: if (directive not in Options.directive_defaults and directive not in Options.directive_types): error(pos, "Invalid directive: '%s'." % (directive,)) return True # Set up processing and handle the cython: comments. def visit_ModuleNode(self, node): for key, value in node.directive_comments.items(): if not self.check_directive_scope(node.pos, key, 'module'): self.wrong_scope_error(node.pos, key, 'module') del node.directive_comments[key] self.module_scope = node.scope self.directives.update(node.directive_comments) node.directives = self.directives node.parallel_directives = self.parallel_directives self.visitchildren(node) node.cython_module_names = self.cython_module_names return node # The following four functions track imports and cimports that # begin with "cython" def is_cython_directive(self, name): return (name in Options.directive_types or name in self.special_methods or PyrexTypes.parse_basic_type(name)) def is_parallel_directive(self, full_name, pos): """ Checks to see if fullname (e.g. cython.parallel.prange) is a valid parallel directive. If it is a star import it also updates the parallel_directives. """ result = (full_name + ".").startswith("cython.parallel.") if result: directive = full_name.split('.') if full_name == u"cython.parallel": self.parallel_directives[u"parallel"] = u"cython.parallel" elif full_name == u"cython.parallel.*": for name in self.valid_parallel_directives: self.parallel_directives[name] = u"cython.parallel.%s" % name elif (len(directive) != 3 or directive[-1] not in self.valid_parallel_directives): error(pos, "No such directive: %s" % full_name) self.module_scope.use_utility_code( UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c")) return result def visit_CImportStatNode(self, node): if node.module_name == u"cython": self.cython_module_names.add(node.as_name or u"cython") elif node.module_name.startswith(u"cython."): if node.module_name.startswith(u"cython.parallel."): error(node.pos, node.module_name + " is not a module") if node.module_name == u"cython.parallel": if node.as_name and node.as_name != u"cython": self.parallel_directives[node.as_name] = node.module_name else: self.cython_module_names.add(u"cython") self.parallel_directives[ u"cython.parallel"] = node.module_name self.module_scope.use_utility_code( UtilityCode.load_cached("InitThreads", "ModuleSetupCode.c")) elif node.as_name: self.directive_names[node.as_name] = node.module_name[7:] else: self.cython_module_names.add(u"cython") # if this cimport was a compiler directive, we don't # want to leave the cimport node sitting in the tree return None return node def visit_FromCImportStatNode(self, node): if not node.relative_level and ( node.module_name == u"cython" or node.module_name.startswith(u"cython.")): submodule = (node.module_name + u".")[7:] newimp = [] for pos, name, as_name, kind in node.imported_names: full_name = submodule + name qualified_name = u"cython." + full_name if self.is_parallel_directive(qualified_name, node.pos): # from cython cimport parallel, or # from cython.parallel cimport parallel, prange, ... self.parallel_directives[as_name or name] = qualified_name elif self.is_cython_directive(full_name): self.directive_names[as_name or name] = full_name if kind is not None: self.context.nonfatal_error(PostParseError(pos, "Compiler directive imports must be plain imports")) else: newimp.append((pos, name, as_name, kind)) if not newimp: return None node.imported_names = newimp return node def visit_FromImportStatNode(self, node): if (node.module.module_name.value == u"cython") or \ node.module.module_name.value.startswith(u"cython."): submodule = (node.module.module_name.value + u".")[7:] newimp = [] for name, name_node in node.items: full_name = submodule + name qualified_name = u"cython." + full_name if self.is_parallel_directive(qualified_name, node.pos): self.parallel_directives[name_node.name] = qualified_name elif self.is_cython_directive(full_name): self.directive_names[name_node.name] = full_name else: newimp.append((name, name_node)) if not newimp: return None node.items = newimp return node def visit_SingleAssignmentNode(self, node): if isinstance(node.rhs, ExprNodes.ImportNode): module_name = node.rhs.module_name.value is_parallel = (module_name + u".").startswith(u"cython.parallel.") if module_name != u"cython" and not is_parallel: return node module_name = node.rhs.module_name.value as_name = node.lhs.name node = Nodes.CImportStatNode(node.pos, module_name = module_name, as_name = as_name) node = self.visit_CImportStatNode(node) else: self.visitchildren(node) return node def visit_NameNode(self, node): if node.name in self.cython_module_names: node.is_cython_module = True else: node.cython_attribute = self.directive_names.get(node.name) return node def try_to_parse_directives(self, node): # If node is the contents of an directive (in a with statement or # decorator), returns a list of (directivename, value) pairs. # Otherwise, returns None if isinstance(node, ExprNodes.CallNode): self.visit(node.function) optname = node.function.as_cython_attribute() if optname: directivetype = Options.directive_types.get(optname) if directivetype: args, kwds = node.explicit_args_kwds() directives = [] key_value_pairs = [] if kwds is not None and directivetype is not dict: for keyvalue in kwds.key_value_pairs: key, value = keyvalue sub_optname = "%s.%s" % (optname, key.value) if Options.directive_types.get(sub_optname): directives.append(self.try_to_parse_directive(sub_optname, [value], None, keyvalue.pos)) else: key_value_pairs.append(keyvalue) if not key_value_pairs: kwds = None else: kwds.key_value_pairs = key_value_pairs if directives and not kwds and not args: return directives directives.append(self.try_to_parse_directive(optname, args, kwds, node.function.pos)) return directives elif isinstance(node, (ExprNodes.AttributeNode, ExprNodes.NameNode)): self.visit(node) optname = node.as_cython_attribute() if optname: directivetype = Options.directive_types.get(optname) if directivetype is bool: return [(optname, True)] elif directivetype is None: return [(optname, None)] else: raise PostParseError( node.pos, "The '%s' directive should be used as a function call." % optname) return None def try_to_parse_directive(self, optname, args, kwds, pos): directivetype = Options.directive_types.get(optname) if len(args) == 1 and isinstance(args[0], ExprNodes.NoneNode): return optname, Options.directive_defaults[optname] elif directivetype is bool: if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.BoolNode): raise PostParseError(pos, 'The %s directive takes one compile-time boolean argument' % optname) return (optname, args[0].value) elif directivetype is int: if kwds is not None or len(args) != 1 or not isinstance(args[0], ExprNodes.IntNode): raise PostParseError(pos, 'The %s directive takes one compile-time integer argument' % optname) return (optname, int(args[0].value)) elif directivetype is str: if kwds is not None or len(args) != 1 or not isinstance( args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)): raise PostParseError(pos, 'The %s directive takes one compile-time string argument' % optname) return (optname, str(args[0].value)) elif directivetype is type: if kwds is not None or len(args) != 1: raise PostParseError(pos, 'The %s directive takes one type argument' % optname) return (optname, args[0]) elif directivetype is dict: if len(args) != 0: raise PostParseError(pos, 'The %s directive takes no prepositional arguments' % optname) return optname, dict([(key.value, value) for key, value in kwds.key_value_pairs]) elif directivetype is list: if kwds and len(kwds) != 0: raise PostParseError(pos, 'The %s directive takes no keyword arguments' % optname) return optname, [ str(arg.value) for arg in args ] elif callable(directivetype): if kwds is not None or len(args) != 1 or not isinstance( args[0], (ExprNodes.StringNode, ExprNodes.UnicodeNode)): raise PostParseError(pos, 'The %s directive takes one compile-time string argument' % optname) return (optname, directivetype(optname, str(args[0].value))) else: assert False def visit_with_directives(self, body, directives): olddirectives = self.directives newdirectives = copy.copy(olddirectives) newdirectives.update(directives) self.directives = newdirectives assert isinstance(body, Nodes.StatListNode), body retbody = self.visit_Node(body) directive = Nodes.CompilerDirectivesNode(pos=retbody.pos, body=retbody, directives=newdirectives) self.directives = olddirectives return directive # Handle decorators def visit_FuncDefNode(self, node): directives = self._extract_directives(node, 'function') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CVarDefNode(self, node): directives = self._extract_directives(node, 'function') if not directives: return node for name, value in directives.iteritems(): if name == 'locals': node.directive_locals = value elif name not in ('final', 'staticmethod'): self.context.nonfatal_error(PostParseError( node.pos, "Cdef functions can only take cython.locals(), " "staticmethod, or final decorators, got %s." % name)) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CClassDefNode(self, node): directives = self._extract_directives(node, 'cclass') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_CppClassNode(self, node): directives = self._extract_directives(node, 'cppclass') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def visit_PyClassDefNode(self, node): directives = self._extract_directives(node, 'class') if not directives: return self.visit_Node(node) body = Nodes.StatListNode(node.pos, stats=[node]) return self.visit_with_directives(body, directives) def _extract_directives(self, node, scope_name): if not node.decorators: return {} # Split the decorators into two lists -- real decorators and directives directives = [] realdecs = [] both = [] for dec in node.decorators: new_directives = self.try_to_parse_directives(dec.decorator) if new_directives is not None: for directive in new_directives: if self.check_directive_scope(node.pos, directive[0], scope_name): name, value = directive if self.directives.get(name, object()) != value: directives.append(directive) if directive[0] == 'staticmethod': both.append(dec) else: realdecs.append(dec) if realdecs and isinstance(node, (Nodes.CFuncDefNode, Nodes.CClassDefNode, Nodes.CVarDefNode)): raise PostParseError(realdecs[0].pos, "Cdef functions/classes cannot take arbitrary decorators.") else: node.decorators = realdecs + both # merge or override repeated directives optdict = {} directives.reverse() # Decorators coming first take precedence for directive in directives: name, value = directive if name in optdict: old_value = optdict[name] # keywords and arg lists can be merged, everything # else overrides completely if isinstance(old_value, dict): old_value.update(value) elif isinstance(old_value, list): old_value.extend(value) else: optdict[name] = value else: optdict[name] = value return optdict # Handle with statements def visit_WithStatNode(self, node): directive_dict = {} for directive in self.try_to_parse_directives(node.manager) or []: if directive is not None: if node.target is not None: self.context.nonfatal_error( PostParseError(node.pos, "Compiler directive with statements cannot contain 'as'")) else: name, value = directive if name in ('nogil', 'gil'): # special case: in pure mode, "with nogil" spells "with cython.nogil" node = Nodes.GILStatNode(node.pos, state = name, body = node.body) return self.visit_Node(node) if self.check_directive_scope(node.pos, name, 'with statement'): directive_dict[name] = value if directive_dict: return self.visit_with_directives(node.body, directive_dict) return self.visit_Node(node) class ParallelRangeTransform(CythonTransform, SkipDeclarations): """ Transform cython.parallel stuff. The parallel_directives come from the module node, set there by InterpretCompilerDirectives. x = cython.parallel.threadavailable() -> ParallelThreadAvailableNode with nogil, cython.parallel.parallel(): -> ParallelWithBlockNode print cython.parallel.threadid() -> ParallelThreadIdNode for i in cython.parallel.prange(...): -> ParallelRangeNode ... """ # a list of names, maps 'cython.parallel.prange' in the code to # ['cython', 'parallel', 'prange'] parallel_directive = None # Indicates whether a namenode in an expression is the cython module namenode_is_cython_module = False # Keep track of whether we are the context manager of a 'with' statement in_context_manager_section = False # One of 'prange' or 'with parallel'. This is used to disallow closely # nested 'with parallel:' blocks state = None directive_to_node = { u"cython.parallel.parallel": Nodes.ParallelWithBlockNode, # u"cython.parallel.threadsavailable": ExprNodes.ParallelThreadsAvailableNode, u"cython.parallel.threadid": ExprNodes.ParallelThreadIdNode, u"cython.parallel.prange": Nodes.ParallelRangeNode, } def node_is_parallel_directive(self, node): return node.name in self.parallel_directives or node.is_cython_module def get_directive_class_node(self, node): """ Figure out which parallel directive was used and return the associated Node class. E.g. for a cython.parallel.prange() call we return ParallelRangeNode """ if self.namenode_is_cython_module: directive = '.'.join(self.parallel_directive) else: directive = self.parallel_directives[self.parallel_directive[0]] directive = '%s.%s' % (directive, '.'.join(self.parallel_directive[1:])) directive = directive.rstrip('.') cls = self.directive_to_node.get(directive) if cls is None and not (self.namenode_is_cython_module and self.parallel_directive[0] != 'parallel'): error(node.pos, "Invalid directive: %s" % directive) self.namenode_is_cython_module = False self.parallel_directive = None return cls def visit_ModuleNode(self, node): """ If any parallel directives were imported, copy them over and visit the AST """ if node.parallel_directives: self.parallel_directives = node.parallel_directives return self.visit_Node(node) # No parallel directives were imported, so they can't be used :) return node def visit_NameNode(self, node): if self.node_is_parallel_directive(node): self.parallel_directive = [node.name] self.namenode_is_cython_module = node.is_cython_module return node def visit_AttributeNode(self, node): self.visitchildren(node) if self.parallel_directive: self.parallel_directive.append(node.attribute) return node def visit_CallNode(self, node): self.visit(node.function) if not self.parallel_directive: return node # We are a parallel directive, replace this node with the # corresponding ParallelSomethingSomething node if isinstance(node, ExprNodes.GeneralCallNode): args = node.positional_args.args kwargs = node.keyword_args else: args = node.args kwargs = {} parallel_directive_class = self.get_directive_class_node(node) if parallel_directive_class: # Note: in case of a parallel() the body is set by # visit_WithStatNode node = parallel_directive_class(node.pos, args=args, kwargs=kwargs) return node def visit_WithStatNode(self, node): "Rewrite with cython.parallel.parallel() blocks" newnode = self.visit(node.manager) if isinstance(newnode, Nodes.ParallelWithBlockNode): if self.state == 'parallel with': error(node.manager.pos, "Nested parallel with blocks are disallowed") self.state = 'parallel with' body = self.visit(node.body) self.state = None newnode.body = body return newnode elif self.parallel_directive: parallel_directive_class = self.get_directive_class_node(node) if not parallel_directive_class: # There was an error, stop here and now return None if parallel_directive_class is Nodes.ParallelWithBlockNode: error(node.pos, "The parallel directive must be called") return None node.body = self.visit(node.body) return node def visit_ForInStatNode(self, node): "Rewrite 'for i in cython.parallel.prange(...):'" self.visit(node.iterator) self.visit(node.target) in_prange = isinstance(node.iterator.sequence, Nodes.ParallelRangeNode) previous_state = self.state if in_prange: # This will replace the entire ForInStatNode, so copy the # attributes parallel_range_node = node.iterator.sequence parallel_range_node.target = node.target parallel_range_node.body = node.body parallel_range_node.else_clause = node.else_clause node = parallel_range_node if not isinstance(node.target, ExprNodes.NameNode): error(node.target.pos, "Can only iterate over an iteration variable") self.state = 'prange' self.visit(node.body) self.state = previous_state self.visit(node.else_clause) return node def visit(self, node): "Visit a node that may be None" if node is not None: return super(ParallelRangeTransform, self).visit(node) class WithTransform(CythonTransform, SkipDeclarations): def visit_WithStatNode(self, node): self.visitchildren(node, 'body') pos = node.pos body, target, manager = node.body, node.target, node.manager node.enter_call = ExprNodes.SimpleCallNode( pos, function=ExprNodes.AttributeNode( pos, obj=ExprNodes.CloneNode(manager), attribute=EncodedString('__enter__'), is_special_lookup=True), args=[], is_temp=True) if target is not None: body = Nodes.StatListNode( pos, stats=[ Nodes.WithTargetAssignmentStatNode( pos, lhs=target, with_node=node), body]) excinfo_target = ExprNodes.TupleNode(pos, slow=True, args=[ ExprNodes.ExcValueNode(pos) for _ in range(3)]) except_clause = Nodes.ExceptClauseNode( pos, body=Nodes.IfStatNode( pos, if_clauses=[ Nodes.IfClauseNode( pos, condition=ExprNodes.NotNode( pos, operand=ExprNodes.WithExitCallNode( pos, with_stat=node, test_if_run=False, args=excinfo_target)), body=Nodes.ReraiseStatNode(pos), ), ], else_clause=None), pattern=None, target=None, excinfo_target=excinfo_target, ) node.body = Nodes.TryFinallyStatNode( pos, body=Nodes.TryExceptStatNode( pos, body=body, except_clauses=[except_clause], else_clause=None, ), finally_clause=Nodes.ExprStatNode( pos, expr=ExprNodes.WithExitCallNode( pos, with_stat=node, test_if_run=True, args=ExprNodes.TupleNode( pos, args=[ExprNodes.NoneNode(pos) for _ in range(3)] ))), handle_error_case=False, ) return node def visit_ExprNode(self, node): # With statements are never inside expressions. return node class DecoratorTransform(ScopeTrackingTransform, SkipDeclarations): """Originally, this was the only place where decorators were transformed into the corresponding calling code. Now, this is done directly in DefNode and PyClassDefNode to avoid reassignments to the function/class name - except for cdef class methods. For those, the reassignment is required as methods are originally defined in the PyMethodDef struct. The IndirectionNode allows DefNode to override the decorator """ def visit_DefNode(self, func_node): scope_type = self.scope_type func_node = self.visit_FuncDefNode(func_node) if scope_type != 'cclass' or not func_node.decorators: return func_node return self.handle_decorators(func_node, func_node.decorators, func_node.name) def handle_decorators(self, node, decorators, name): decorator_result = ExprNodes.NameNode(node.pos, name = name) for decorator in decorators[::-1]: decorator_result = ExprNodes.SimpleCallNode( decorator.pos, function = decorator.decorator, args = [decorator_result]) name_node = ExprNodes.NameNode(node.pos, name = name) reassignment = Nodes.SingleAssignmentNode( node.pos, lhs = name_node, rhs = decorator_result) reassignment = Nodes.IndirectionNode([reassignment]) node.decorator_indirection = reassignment return [node, reassignment] class CnameDirectivesTransform(CythonTransform, SkipDeclarations): """ Only part of the CythonUtilityCode pipeline. Must be run before DecoratorTransform in case this is a decorator for a cdef class. It filters out @cname('my_cname') decorators and rewrites them to CnameDecoratorNodes. """ def handle_function(self, node): if not getattr(node, 'decorators', None): return self.visit_Node(node) for i, decorator in enumerate(node.decorators): decorator = decorator.decorator if (isinstance(decorator, ExprNodes.CallNode) and decorator.function.is_name and decorator.function.name == 'cname'): args, kwargs = decorator.explicit_args_kwds() if kwargs: raise AssertionError( "cname decorator does not take keyword arguments") if len(args) != 1: raise AssertionError( "cname decorator takes exactly one argument") if not (args[0].is_literal and args[0].type == Builtin.str_type): raise AssertionError( "argument to cname decorator must be a string literal") cname = args[0].compile_time_value(None).decode('UTF-8') del node.decorators[i] node = Nodes.CnameDecoratorNode(pos=node.pos, node=node, cname=cname) break return self.visit_Node(node) visit_FuncDefNode = handle_function visit_CClassDefNode = handle_function visit_CEnumDefNode = handle_function visit_CStructOrUnionDefNode = handle_function class ForwardDeclareTypes(CythonTransform): def visit_CompilerDirectivesNode(self, node): env = self.module_scope old = env.directives env.directives = node.directives self.visitchildren(node) env.directives = old return node def visit_ModuleNode(self, node): self.module_scope = node.scope self.module_scope.directives = node.directives self.visitchildren(node) return node def visit_CDefExternNode(self, node): old_cinclude_flag = self.module_scope.in_cinclude self.module_scope.in_cinclude = 1 self.visitchildren(node) self.module_scope.in_cinclude = old_cinclude_flag return node def visit_CEnumDefNode(self, node): node.declare(self.module_scope) return node def visit_CStructOrUnionDefNode(self, node): if node.name not in self.module_scope.entries: node.declare(self.module_scope) return node def visit_CClassDefNode(self, node): if node.class_name not in self.module_scope.entries: node.declare(self.module_scope) return node class AnalyseDeclarationsTransform(EnvTransform): basic_property = TreeFragment(u""" property NAME: def __get__(self): return ATTR def __set__(self, value): ATTR = value """, level='c_class', pipeline=[NormalizeTree(None)]) basic_pyobject_property = TreeFragment(u""" property NAME: def __get__(self): return ATTR def __set__(self, value): ATTR = value def __del__(self): ATTR = None """, level='c_class', pipeline=[NormalizeTree(None)]) basic_property_ro = TreeFragment(u""" property NAME: def __get__(self): return ATTR """, level='c_class', pipeline=[NormalizeTree(None)]) struct_or_union_wrapper = TreeFragment(u""" cdef class NAME: cdef TYPE value def __init__(self, MEMBER=None): cdef int count count = 0 INIT_ASSIGNMENTS if IS_UNION and count > 1: raise ValueError, "At most one union member should be specified." def __str__(self): return STR_FORMAT % MEMBER_TUPLE def __repr__(self): return REPR_FORMAT % MEMBER_TUPLE """, pipeline=[NormalizeTree(None)]) init_assignment = TreeFragment(u""" if VALUE is not None: ATTR = VALUE count += 1 """, pipeline=[NormalizeTree(None)]) fused_function = None in_lambda = 0 def __call__(self, root): # needed to determine if a cdef var is declared after it's used. self.seen_vars_stack = [] self.fused_error_funcs = set() super_class = super(AnalyseDeclarationsTransform, self) self._super_visit_FuncDefNode = super_class.visit_FuncDefNode return super_class.__call__(root) def visit_NameNode(self, node): self.seen_vars_stack[-1].add(node.name) return node def visit_ModuleNode(self, node): self.seen_vars_stack.append(set()) node.analyse_declarations(self.current_env()) self.visitchildren(node) self.seen_vars_stack.pop() return node def visit_LambdaNode(self, node): self.in_lambda += 1 node.analyse_declarations(self.current_env()) self.visitchildren(node) self.in_lambda -= 1 return node def visit_CClassDefNode(self, node): node = self.visit_ClassDefNode(node) if node.scope and node.scope.implemented and node.body: stats = [] for entry in node.scope.var_entries: if entry.needs_property: property = self.create_Property(entry) property.analyse_declarations(node.scope) self.visit(property) stats.append(property) if stats: node.body.stats += stats return node def _handle_fused_def_decorators(self, old_decorators, env, node): """ Create function calls to the decorators and reassignments to the function. """ # Delete staticmethod and classmethod decorators, this is # handled directly by the fused function object. decorators = [] for decorator in old_decorators: func = decorator.decorator if (not func.is_name or func.name not in ('staticmethod', 'classmethod') or env.lookup_here(func.name)): # not a static or classmethod decorators.append(decorator) if decorators: transform = DecoratorTransform(self.context) def_node = node.node _, reassignments = transform.handle_decorators( def_node, decorators, def_node.name) reassignments.analyse_declarations(env) node = [node, reassignments] return node def _handle_def(self, decorators, env, node): "Handle def or cpdef fused functions" # Create PyCFunction nodes for each specialization node.stats.insert(0, node.py_func) node.py_func = self.visit(node.py_func) node.update_fused_defnode_entry(env) pycfunc = ExprNodes.PyCFunctionNode.from_defnode(node.py_func, True) pycfunc = ExprNodes.ProxyNode(pycfunc.coerce_to_temp(env)) node.resulting_fused_function = pycfunc # Create assignment node for our def function node.fused_func_assignment = self._create_assignment( node.py_func, ExprNodes.CloneNode(pycfunc), env) if decorators: node = self._handle_fused_def_decorators(decorators, env, node) return node def _create_fused_function(self, env, node): "Create a fused function for a DefNode with fused arguments" from . import FusedNode if self.fused_function or self.in_lambda: if self.fused_function not in self.fused_error_funcs: if self.in_lambda: error(node.pos, "Fused lambdas not allowed") else: error(node.pos, "Cannot nest fused functions") self.fused_error_funcs.add(self.fused_function) node.body = Nodes.PassStatNode(node.pos) for arg in node.args: if arg.type.is_fused: arg.type = arg.type.get_fused_types()[0] return node decorators = getattr(node, 'decorators', None) node = FusedNode.FusedCFuncDefNode(node, env) self.fused_function = node self.visitchildren(node) self.fused_function = None if node.py_func: node = self._handle_def(decorators, env, node) return node def _handle_nogil_cleanup(self, lenv, node): "Handle cleanup for 'with gil' blocks in nogil functions." if lenv.nogil and lenv.has_with_gil_block: # Acquire the GIL for cleanup in 'nogil' functions, by wrapping # the entire function body in try/finally. # The corresponding release will be taken care of by # Nodes.FuncDefNode.generate_function_definitions() node.body = Nodes.NogilTryFinallyStatNode( node.body.pos, body=node.body, finally_clause=Nodes.EnsureGILNode(node.body.pos)) def _handle_fused(self, node): if node.is_generator and node.has_fused_arguments: node.has_fused_arguments = False error(node.pos, "Fused generators not supported") node.gbody = Nodes.StatListNode(node.pos, stats=[], body=Nodes.PassStatNode(node.pos)) return node.has_fused_arguments def visit_FuncDefNode(self, node): """ Analyse a function and its body, as that hasn't happend yet. Also analyse the directive_locals set by @cython.locals(). Then, if we are a function with fused arguments, replace the function (after it has declared itself in the symbol table!) with a FusedCFuncDefNode, and analyse its children (which are in turn normal functions). If we're a normal function, just analyse the body of the function. """ env = self.current_env() self.seen_vars_stack.append(set()) lenv = node.local_scope node.declare_arguments(lenv) # @cython.locals(...) for var, type_node in node.directive_locals.items(): if not lenv.lookup_here(var): # don't redeclare args type = type_node.analyse_as_type(lenv) if type: lenv.declare_var(var, type, type_node.pos) else: error(type_node.pos, "Not a type") if self._handle_fused(node): node = self._create_fused_function(env, node) else: node.body.analyse_declarations(lenv) self._handle_nogil_cleanup(lenv, node) self._super_visit_FuncDefNode(node) self.seen_vars_stack.pop() return node def visit_DefNode(self, node): node = self.visit_FuncDefNode(node) env = self.current_env() if (not isinstance(node, Nodes.DefNode) or node.fused_py_func or node.is_generator_body or not node.needs_assignment_synthesis(env)): return node return [node, self._synthesize_assignment(node, env)] def visit_GeneratorBodyDefNode(self, node): return self.visit_FuncDefNode(node) def _synthesize_assignment(self, node, env): # Synthesize assignment node and put it right after defnode genv = env while genv.is_py_class_scope or genv.is_c_class_scope: genv = genv.outer_scope if genv.is_closure_scope: rhs = node.py_cfunc_node = ExprNodes.InnerFunctionNode( node.pos, def_node=node, pymethdef_cname=node.entry.pymethdef_cname, code_object=ExprNodes.CodeObjectNode(node)) else: binding = self.current_directives.get('binding') rhs = ExprNodes.PyCFunctionNode.from_defnode(node, binding) node.code_object = rhs.code_object if env.is_py_class_scope: rhs.binding = True node.is_cyfunction = rhs.binding return self._create_assignment(node, rhs, env) def _create_assignment(self, def_node, rhs, env): if def_node.decorators: for decorator in def_node.decorators[::-1]: rhs = ExprNodes.SimpleCallNode( decorator.pos, function = decorator.decorator, args = [rhs]) def_node.decorators = None assmt = Nodes.SingleAssignmentNode( def_node.pos, lhs=ExprNodes.NameNode(def_node.pos, name=def_node.name), rhs=rhs) assmt.analyse_declarations(env) return assmt def visit_ScopedExprNode(self, node): env = self.current_env() node.analyse_declarations(env) # the node may or may not have a local scope if node.has_local_scope: self.seen_vars_stack.append(set(self.seen_vars_stack[-1])) self.enter_scope(node, node.expr_scope) node.analyse_scoped_declarations(node.expr_scope) self.visitchildren(node) self.exit_scope() self.seen_vars_stack.pop() else: node.analyse_scoped_declarations(env) self.visitchildren(node) return node def visit_TempResultFromStatNode(self, node): self.visitchildren(node) node.analyse_declarations(self.current_env()) return node def visit_CppClassNode(self, node): if node.visibility == 'extern': return None else: return self.visit_ClassDefNode(node) def visit_CStructOrUnionDefNode(self, node): # Create a wrapper node if needed. # We want to use the struct type information (so it can't happen # before this phase) but also create new objects to be declared # (so it can't happen later). # Note that we don't return the original node, as it is # never used after this phase. if True: # private (default) return None self_value = ExprNodes.AttributeNode( pos = node.pos, obj = ExprNodes.NameNode(pos=node.pos, name=u"self"), attribute = EncodedString(u"value")) var_entries = node.entry.type.scope.var_entries attributes = [] for entry in var_entries: attributes.append(ExprNodes.AttributeNode(pos = entry.pos, obj = self_value, attribute = entry.name)) # __init__ assignments init_assignments = [] for entry, attr in zip(var_entries, attributes): # TODO: branch on visibility init_assignments.append(self.init_assignment.substitute({ u"VALUE": ExprNodes.NameNode(entry.pos, name = entry.name), u"ATTR": attr, }, pos = entry.pos)) # create the class str_format = u"%s(%s)" % (node.entry.type.name, ("%s, " * len(attributes))[:-2]) wrapper_class = self.struct_or_union_wrapper.substitute({ u"INIT_ASSIGNMENTS": Nodes.StatListNode(node.pos, stats = init_assignments), u"IS_UNION": ExprNodes.BoolNode(node.pos, value = not node.entry.type.is_struct), u"MEMBER_TUPLE": ExprNodes.TupleNode(node.pos, args=attributes), u"STR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format)), u"REPR_FORMAT": ExprNodes.StringNode(node.pos, value = EncodedString(str_format.replace("%s", "%r"))), }, pos = node.pos).stats[0] wrapper_class.class_name = node.name wrapper_class.shadow = True class_body = wrapper_class.body.stats # fix value type assert isinstance(class_body[0].base_type, Nodes.CSimpleBaseTypeNode) class_body[0].base_type.name = node.name # fix __init__ arguments init_method = class_body[1] assert isinstance(init_method, Nodes.DefNode) and init_method.name == '__init__' arg_template = init_method.args[1] if not node.entry.type.is_struct: arg_template.kw_only = True del init_method.args[1] for entry, attr in zip(var_entries, attributes): arg = copy.deepcopy(arg_template) arg.declarator.name = entry.name init_method.args.append(arg) # setters/getters for entry, attr in zip(var_entries, attributes): # TODO: branch on visibility if entry.type.is_pyobject: template = self.basic_pyobject_property else: template = self.basic_property property = template.substitute({ u"ATTR": attr, }, pos = entry.pos).stats[0] property.name = entry.name wrapper_class.body.stats.append(property) wrapper_class.analyse_declarations(self.current_env()) return self.visit_CClassDefNode(wrapper_class) # Some nodes are no longer needed after declaration # analysis and can be dropped. The analysis was performed # on these nodes in a seperate recursive process from the # enclosing function or module, so we can simply drop them. def visit_CDeclaratorNode(self, node): # necessary to ensure that all CNameDeclaratorNodes are visited. self.visitchildren(node) return node def visit_CTypeDefNode(self, node): return node def visit_CBaseTypeNode(self, node): return None def visit_CEnumDefNode(self, node): if node.visibility == 'public': return node else: return None def visit_CNameDeclaratorNode(self, node): if node.name in self.seen_vars_stack[-1]: entry = self.current_env().lookup(node.name) if (entry is None or entry.visibility != 'extern' and not entry.scope.is_c_class_scope): warning(node.pos, "cdef variable '%s' declared after it is used" % node.name, 2) self.visitchildren(node) return node def visit_CVarDefNode(self, node): # to ensure all CNameDeclaratorNodes are visited. self.visitchildren(node) return None def visit_CnameDecoratorNode(self, node): child_node = self.visit(node.node) if not child_node: return None if type(child_node) is list: # Assignment synthesized node.child_node = child_node[0] return [node] + child_node[1:] node.node = child_node return node def create_Property(self, entry): if entry.visibility == 'public': if entry.type.is_pyobject: template = self.basic_pyobject_property else: template = self.basic_property elif entry.visibility == 'readonly': template = self.basic_property_ro property = template.substitute({ u"ATTR": ExprNodes.AttributeNode(pos=entry.pos, obj=ExprNodes.NameNode(pos=entry.pos, name="self"), attribute=entry.name), }, pos=entry.pos).stats[0] property.name = entry.name property.doc = entry.doc return property class CalculateQualifiedNamesTransform(EnvTransform): """ Calculate and store the '__qualname__' and the global module name on some nodes. """ def visit_ModuleNode(self, node): self.module_name = self.global_scope().qualified_name self.qualified_name = [] _super = super(CalculateQualifiedNamesTransform, self) self._super_visit_FuncDefNode = _super.visit_FuncDefNode self._super_visit_ClassDefNode = _super.visit_ClassDefNode self.visitchildren(node) return node def _set_qualname(self, node, name=None): if name: qualname = self.qualified_name[:] qualname.append(name) else: qualname = self.qualified_name node.qualname = EncodedString('.'.join(qualname)) node.module_name = self.module_name def _append_entry(self, entry): if entry.is_pyglobal and not entry.is_pyclass_attr: self.qualified_name = [entry.name] else: self.qualified_name.append(entry.name) def visit_ClassNode(self, node): self._set_qualname(node, node.name) self.visitchildren(node) return node def visit_PyClassNamespaceNode(self, node): # class name was already added by parent node self._set_qualname(node) self.visitchildren(node) return node def visit_PyCFunctionNode(self, node): self._set_qualname(node, node.def_node.name) self.visitchildren(node) return node def visit_DefNode(self, node): self._set_qualname(node, node.name) return self.visit_FuncDefNode(node) def visit_FuncDefNode(self, node): orig_qualified_name = self.qualified_name[:] if getattr(node, 'name', None) == '<lambda>': self.qualified_name.append('<lambda>') else: self._append_entry(node.entry) self.qualified_name.append('<locals>') self._super_visit_FuncDefNode(node) self.qualified_name = orig_qualified_name return node def visit_ClassDefNode(self, node): orig_qualified_name = self.qualified_name[:] entry = (getattr(node, 'entry', None) or # PyClass self.current_env().lookup_here(node.name)) # CClass self._append_entry(entry) self._super_visit_ClassDefNode(node) self.qualified_name = orig_qualified_name return node class AnalyseExpressionsTransform(CythonTransform): def visit_ModuleNode(self, node): node.scope.infer_types() node.body = node.body.analyse_expressions(node.scope) self.visitchildren(node) return node def visit_FuncDefNode(self, node): node.local_scope.infer_types() node.body = node.body.analyse_expressions(node.local_scope) self.visitchildren(node) return node def visit_ScopedExprNode(self, node): if node.has_local_scope: node.expr_scope.infer_types() node = node.analyse_scoped_expressions(node.expr_scope) self.visitchildren(node) return node def visit_IndexNode(self, node): """ Replace index nodes used to specialize cdef functions with fused argument types with the Attribute- or NameNode referring to the function. We then need to copy over the specialization properties to the attribute or name node. Because the indexing might be a Python indexing operation on a fused function, or (usually) a Cython indexing operation, we need to re-analyse the types. """ self.visit_Node(node) if node.is_fused_index and not node.type.is_error: node = node.base elif node.memslice_ellipsis_noop: # memoryviewslice[...] expression, drop the IndexNode node = node.base return node class FindInvalidUseOfFusedTypes(CythonTransform): def visit_FuncDefNode(self, node): # Errors related to use in functions with fused args will already # have been detected if not node.has_fused_arguments: if not node.is_generator_body and node.return_type.is_fused: error(node.pos, "Return type is not specified as argument type") else: self.visitchildren(node) return node def visit_ExprNode(self, node): if node.type and node.type.is_fused: error(node.pos, "Invalid use of fused types, type cannot be specialized") else: self.visitchildren(node) return node class ExpandInplaceOperators(EnvTransform): def visit_InPlaceAssignmentNode(self, node): lhs = node.lhs rhs = node.rhs if lhs.type.is_cpp_class: # No getting around this exact operator here. return node if isinstance(lhs, ExprNodes.IndexNode) and lhs.is_buffer_access: # There is code to handle this case. return node env = self.current_env() def side_effect_free_reference(node, setting=False): if isinstance(node, ExprNodes.NameNode): return node, [] elif node.type.is_pyobject and not setting: node = LetRefNode(node) return node, [node] elif isinstance(node, ExprNodes.IndexNode): if node.is_buffer_access: raise ValueError("Buffer access") base, temps = side_effect_free_reference(node.base) index = LetRefNode(node.index) return ExprNodes.IndexNode(node.pos, base=base, index=index), temps + [index] elif isinstance(node, ExprNodes.AttributeNode): obj, temps = side_effect_free_reference(node.obj) return ExprNodes.AttributeNode(node.pos, obj=obj, attribute=node.attribute), temps else: node = LetRefNode(node) return node, [node] try: lhs, let_ref_nodes = side_effect_free_reference(lhs, setting=True) except ValueError: return node dup = lhs.__class__(**lhs.__dict__) binop = ExprNodes.binop_node(node.pos, operator = node.operator, operand1 = dup, operand2 = rhs, inplace=True) # Manually analyse types for new node. lhs.analyse_target_types(env) dup.analyse_types(env) binop.analyse_operation(env) node = Nodes.SingleAssignmentNode( node.pos, lhs = lhs, rhs=binop.coerce_to(lhs.type, env)) # Use LetRefNode to avoid side effects. let_ref_nodes.reverse() for t in let_ref_nodes: node = LetNode(t, node) return node def visit_ExprNode(self, node): # In-place assignments can't happen within an expression. return node class AdjustDefByDirectives(CythonTransform, SkipDeclarations): """ Adjust function and class definitions by the decorator directives: @cython.cfunc @cython.cclass @cython.ccall @cython.inline """ def visit_ModuleNode(self, node): self.directives = node.directives self.in_py_class = False self.visitchildren(node) return node def visit_CompilerDirectivesNode(self, node): old_directives = self.directives self.directives = node.directives self.visitchildren(node) self.directives = old_directives return node def visit_DefNode(self, node): modifiers = [] if 'inline' in self.directives: modifiers.append('inline') if 'ccall' in self.directives: node = node.as_cfunction( overridable=True, returns=self.directives.get('returns'), modifiers=modifiers) return self.visit(node) if 'cfunc' in self.directives: if self.in_py_class: error(node.pos, "cfunc directive is not allowed here") else: node = node.as_cfunction( overridable=False, returns=self.directives.get('returns'), modifiers=modifiers) return self.visit(node) if 'inline' in modifiers: error(node.pos, "Python functions cannot be declared 'inline'") self.visitchildren(node) return node def visit_PyClassDefNode(self, node): if 'cclass' in self.directives: node = node.as_cclass() return self.visit(node) else: old_in_pyclass = self.in_py_class self.in_py_class = True self.visitchildren(node) self.in_py_class = old_in_pyclass return node def visit_CClassDefNode(self, node): old_in_pyclass = self.in_py_class self.in_py_class = False self.visitchildren(node) self.in_py_class = old_in_pyclass return node class AlignFunctionDefinitions(CythonTransform): """ This class takes the signatures from a .pxd file and applies them to the def methods in a .py file. """ def visit_ModuleNode(self, node): self.scope = node.scope self.directives = node.directives self.imported_names = set() # hack, see visit_FromImportStatNode() self.visitchildren(node) return node def visit_PyClassDefNode(self, node): pxd_def = self.scope.lookup(node.name) if pxd_def: if pxd_def.is_cclass: return self.visit_CClassDefNode(node.as_cclass(), pxd_def) elif not pxd_def.scope or not pxd_def.scope.is_builtin_scope: error(node.pos, "'%s' redeclared" % node.name) if pxd_def.pos: error(pxd_def.pos, "previous declaration here") return None return node def visit_CClassDefNode(self, node, pxd_def=None): if pxd_def is None: pxd_def = self.scope.lookup(node.class_name) if pxd_def: outer_scope = self.scope self.scope = pxd_def.type.scope self.visitchildren(node) if pxd_def: self.scope = outer_scope return node def visit_DefNode(self, node): pxd_def = self.scope.lookup(node.name) if pxd_def and (not pxd_def.scope or not pxd_def.scope.is_builtin_scope): if not pxd_def.is_cfunction: error(node.pos, "'%s' redeclared" % node.name) if pxd_def.pos: error(pxd_def.pos, "previous declaration here") return None node = node.as_cfunction(pxd_def) elif (self.scope.is_module_scope and self.directives['auto_cpdef'] and not node.name in self.imported_names and node.is_cdef_func_compatible()): # FIXME: cpdef-ing should be done in analyse_declarations() node = node.as_cfunction(scope=self.scope) # Enable this when nested cdef functions are allowed. # self.visitchildren(node) return node def visit_FromImportStatNode(self, node): # hack to prevent conditional import fallback functions from # being cdpef-ed (global Python variables currently conflict # with imports) if self.scope.is_module_scope: for name, _ in node.items: self.imported_names.add(name) return node def visit_ExprNode(self, node): # ignore lambdas and everything else that appears in expressions return node class RemoveUnreachableCode(CythonTransform): def visit_StatListNode(self, node): if not self.current_directives['remove_unreachable']: return node self.visitchildren(node) for idx, stat in enumerate(node.stats): idx += 1 if stat.is_terminator: if idx < len(node.stats): if self.current_directives['warn.unreachable']: warning(node.stats[idx].pos, "Unreachable code", 2) node.stats = node.stats[:idx] node.is_terminator = True break return node def visit_IfClauseNode(self, node): self.visitchildren(node) if node.body.is_terminator: node.is_terminator = True return node def visit_IfStatNode(self, node): self.visitchildren(node) if node.else_clause and node.else_clause.is_terminator: for clause in node.if_clauses: if not clause.is_terminator: break else: node.is_terminator = True return node def visit_TryExceptStatNode(self, node): self.visitchildren(node) if node.body.is_terminator and node.else_clause: if self.current_directives['warn.unreachable']: warning(node.else_clause.pos, "Unreachable code", 2) node.else_clause = None return node class YieldNodeCollector(TreeVisitor): def __init__(self): super(YieldNodeCollector, self).__init__() self.yields = [] self.returns = [] self.has_return_value = False def visit_Node(self, node): self.visitchildren(node) def visit_YieldExprNode(self, node): self.yields.append(node) self.visitchildren(node) def visit_ReturnStatNode(self, node): self.visitchildren(node) if node.value: self.has_return_value = True self.returns.append(node) def visit_ClassDefNode(self, node): pass def visit_FuncDefNode(self, node): pass def visit_LambdaNode(self, node): pass def visit_GeneratorExpressionNode(self, node): pass class MarkClosureVisitor(CythonTransform): def visit_ModuleNode(self, node): self.needs_closure = False self.visitchildren(node) return node def visit_FuncDefNode(self, node): self.needs_closure = False self.visitchildren(node) node.needs_closure = self.needs_closure self.needs_closure = True collector = YieldNodeCollector() collector.visitchildren(node) if collector.yields: if isinstance(node, Nodes.CFuncDefNode): # Will report error later return node for i, yield_expr in enumerate(collector.yields, 1): yield_expr.label_num = i for retnode in collector.returns: retnode.in_generator = True gbody = Nodes.GeneratorBodyDefNode( pos=node.pos, name=node.name, body=node.body) generator = Nodes.GeneratorDefNode( pos=node.pos, name=node.name, args=node.args, star_arg=node.star_arg, starstar_arg=node.starstar_arg, doc=node.doc, decorators=node.decorators, gbody=gbody, lambda_name=node.lambda_name) return generator return node def visit_CFuncDefNode(self, node): self.visit_FuncDefNode(node) if node.needs_closure and node.overridable: error(node.pos, "closures inside cpdef functions not yet supported") return node def visit_LambdaNode(self, node): self.needs_closure = False self.visitchildren(node) node.needs_closure = self.needs_closure self.needs_closure = True return node def visit_ClassDefNode(self, node): self.visitchildren(node) self.needs_closure = True return node class CreateClosureClasses(CythonTransform): # Output closure classes in module scope for all functions # that really need it. def __init__(self, context): super(CreateClosureClasses, self).__init__(context) self.path = [] self.in_lambda = False def visit_ModuleNode(self, node): self.module_scope = node.scope self.visitchildren(node) return node def find_entries_used_in_closures(self, node): from_closure = [] in_closure = [] for name, entry in node.local_scope.entries.items(): if entry.from_closure: from_closure.append((name, entry)) elif entry.in_closure: in_closure.append((name, entry)) return from_closure, in_closure def create_class_from_scope(self, node, target_module_scope, inner_node=None): # move local variables into closure if node.is_generator: for entry in node.local_scope.entries.values(): if not entry.from_closure: entry.in_closure = True from_closure, in_closure = self.find_entries_used_in_closures(node) in_closure.sort() # Now from the begining node.needs_closure = False node.needs_outer_scope = False func_scope = node.local_scope cscope = node.entry.scope while cscope.is_py_class_scope or cscope.is_c_class_scope: cscope = cscope.outer_scope if not from_closure and (self.path or inner_node): if not inner_node: if not node.py_cfunc_node: raise InternalError("DefNode does not have assignment node") inner_node = node.py_cfunc_node inner_node.needs_self_code = False node.needs_outer_scope = False if node.is_generator: pass elif not in_closure and not from_closure: return elif not in_closure: func_scope.is_passthrough = True func_scope.scope_class = cscope.scope_class node.needs_outer_scope = True return as_name = '%s_%s' % ( target_module_scope.next_id(Naming.closure_class_prefix), node.entry.cname) entry = target_module_scope.declare_c_class( name=as_name, pos=node.pos, defining=True, implementing=True) entry.type.is_final_type = True func_scope.scope_class = entry class_scope = entry.type.scope class_scope.is_internal = True if Options.closure_freelist_size: class_scope.directives['freelist'] = Options.closure_freelist_size if from_closure: assert cscope.is_closure_scope class_scope.declare_var(pos=node.pos, name=Naming.outer_scope_cname, cname=Naming.outer_scope_cname, type=cscope.scope_class.type, is_cdef=True) node.needs_outer_scope = True for name, entry in in_closure: closure_entry = class_scope.declare_var(pos=entry.pos, name=entry.name, cname=entry.cname, type=entry.type, is_cdef=True) if entry.is_declared_generic: closure_entry.is_declared_generic = 1 node.needs_closure = True # Do it here because other classes are already checked target_module_scope.check_c_class(func_scope.scope_class) def visit_LambdaNode(self, node): if not isinstance(node.def_node, Nodes.DefNode): # fused function, an error has been previously issued return node was_in_lambda = self.in_lambda self.in_lambda = True self.create_class_from_scope(node.def_node, self.module_scope, node) self.visitchildren(node) self.in_lambda = was_in_lambda return node def visit_FuncDefNode(self, node): if self.in_lambda: self.visitchildren(node) return node if node.needs_closure or self.path: self.create_class_from_scope(node, self.module_scope) self.path.append(node) self.visitchildren(node) self.path.pop() return node def visit_GeneratorBodyDefNode(self, node): self.visitchildren(node) return node def visit_CFuncDefNode(self, node): if not node.overridable: return self.visit_FuncDefNode(node) else: self.visitchildren(node) return node class GilCheck(VisitorTransform): """ Call `node.gil_check(env)` on each node to make sure we hold the GIL when we need it. Raise an error when on Python operations inside a `nogil` environment. Additionally, raise exceptions for closely nested with gil or with nogil statements. The latter would abort Python. """ def __call__(self, root): self.env_stack = [root.scope] self.nogil = False # True for 'cdef func() nogil:' functions, as the GIL may be held while # calling this function (thus contained 'nogil' blocks may be valid). self.nogil_declarator_only = False return super(GilCheck, self).__call__(root) def visit_FuncDefNode(self, node): self.env_stack.append(node.local_scope) was_nogil = self.nogil self.nogil = node.local_scope.nogil if self.nogil: self.nogil_declarator_only = True if self.nogil and node.nogil_check: node.nogil_check(node.local_scope) self.visitchildren(node) # This cannot be nested, so it doesn't need backup/restore self.nogil_declarator_only = False self.env_stack.pop() self.nogil = was_nogil return node def visit_GILStatNode(self, node): if self.nogil and node.nogil_check: node.nogil_check() was_nogil = self.nogil self.nogil = (node.state == 'nogil') if was_nogil == self.nogil and not self.nogil_declarator_only: if not was_nogil: error(node.pos, "Trying to acquire the GIL while it is " "already held.") else: error(node.pos, "Trying to release the GIL while it was " "previously released.") if isinstance(node.finally_clause, Nodes.StatListNode): # The finally clause of the GILStatNode is a GILExitNode, # which is wrapped in a StatListNode. Just unpack that. node.finally_clause, = node.finally_clause.stats self.visitchildren(node) self.nogil = was_nogil return node def visit_ParallelRangeNode(self, node): if node.nogil: node.nogil = False node = Nodes.GILStatNode(node.pos, state='nogil', body=node) return self.visit_GILStatNode(node) if not self.nogil: error(node.pos, "prange() can only be used without the GIL") # Forget about any GIL-related errors that may occur in the body return None node.nogil_check(self.env_stack[-1]) self.visitchildren(node) return node def visit_ParallelWithBlockNode(self, node): if not self.nogil: error(node.pos, "The parallel section may only be used without " "the GIL") return None if node.nogil_check: # It does not currently implement this, but test for it anyway to # avoid potential future surprises node.nogil_check(self.env_stack[-1]) self.visitchildren(node) return node def visit_TryFinallyStatNode(self, node): """ Take care of try/finally statements in nogil code sections. """ if not self.nogil or isinstance(node, Nodes.GILStatNode): return self.visit_Node(node) node.nogil_check = None node.is_try_finally_in_nogil = True self.visitchildren(node) return node def visit_Node(self, node): if self.env_stack and self.nogil and node.nogil_check: node.nogil_check(self.env_stack[-1]) self.visitchildren(node) node.in_nogil_context = self.nogil return node class TransformBuiltinMethods(EnvTransform): """ Replace Cython's own cython.* builtins by the corresponding tree nodes. """ def visit_SingleAssignmentNode(self, node): if node.declaration_only: return None else: self.visitchildren(node) return node def visit_AttributeNode(self, node): self.visitchildren(node) return self.visit_cython_attribute(node) def visit_NameNode(self, node): return self.visit_cython_attribute(node) def visit_cython_attribute(self, node): attribute = node.as_cython_attribute() if attribute: if attribute == u'compiled': node = ExprNodes.BoolNode(node.pos, value=True) elif attribute == u'__version__': from .. import __version__ as version node = ExprNodes.StringNode(node.pos, value=EncodedString(version)) elif attribute == u'NULL': node = ExprNodes.NullNode(node.pos) elif attribute in (u'set', u'frozenset', u'staticmethod'): node = ExprNodes.NameNode(node.pos, name=EncodedString(attribute), entry=self.current_env().builtin_scope().lookup_here(attribute)) elif PyrexTypes.parse_basic_type(attribute): pass elif self.context.cython_scope.lookup_qualified_name(attribute): pass else: error(node.pos, u"'%s' not a valid cython attribute or is being used incorrectly" % attribute) return node def visit_ExecStatNode(self, node): lenv = self.current_env() self.visitchildren(node) if len(node.args) == 1: node.args.append(ExprNodes.GlobalsExprNode(node.pos)) if not lenv.is_module_scope: node.args.append( ExprNodes.LocalsExprNode( node.pos, self.current_scope_node(), lenv)) return node def _inject_locals(self, node, func_name): # locals()/dir()/vars() builtins lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry: # not the builtin return node pos = node.pos if func_name in ('locals', 'vars'): if func_name == 'locals' and len(node.args) > 0: error(self.pos, "Builtin 'locals()' called with wrong number of args, expected 0, got %d" % len(node.args)) return node elif func_name == 'vars': if len(node.args) > 1: error(self.pos, "Builtin 'vars()' called with wrong number of args, expected 0-1, got %d" % len(node.args)) if len(node.args) > 0: return node # nothing to do return ExprNodes.LocalsExprNode(pos, self.current_scope_node(), lenv) else: # dir() if len(node.args) > 1: error(self.pos, "Builtin 'dir()' called with wrong number of args, expected 0-1, got %d" % len(node.args)) if len(node.args) > 0: # optimised in Builtin.py return node if lenv.is_py_class_scope or lenv.is_module_scope: if lenv.is_py_class_scope: pyclass = self.current_scope_node() locals_dict = ExprNodes.CloneNode(pyclass.dict) else: locals_dict = ExprNodes.GlobalsExprNode(pos) return ExprNodes.SortedDictKeysNode(locals_dict) local_names = sorted(var.name for var in lenv.entries.values() if var.name) items = [ExprNodes.IdentifierStringNode(pos, value=var) for var in local_names] return ExprNodes.ListNode(pos, args=items) def visit_PrimaryCmpNode(self, node): # special case: for in/not-in test, we do not need to sort locals() self.visitchildren(node) if node.operator in 'not_in': # in/not_in if isinstance(node.operand2, ExprNodes.SortedDictKeysNode): arg = node.operand2.arg if isinstance(arg, ExprNodes.NoneCheckNode): arg = arg.arg node.operand2 = arg return node def visit_CascadedCmpNode(self, node): return self.visit_PrimaryCmpNode(node) def _inject_eval(self, node, func_name): lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry or len(node.args) != 1: return node # Inject globals and locals node.args.append(ExprNodes.GlobalsExprNode(node.pos)) if not lenv.is_module_scope: node.args.append( ExprNodes.LocalsExprNode( node.pos, self.current_scope_node(), lenv)) return node def _inject_super(self, node, func_name): lenv = self.current_env() entry = lenv.lookup_here(func_name) if entry or node.args: return node # Inject no-args super def_node = self.current_scope_node() if (not isinstance(def_node, Nodes.DefNode) or not def_node.args or len(self.env_stack) < 2): return node class_node, class_scope = self.env_stack[-2] if class_scope.is_py_class_scope: def_node.requires_classobj = True class_node.class_cell.is_active = True node.args = [ ExprNodes.ClassCellNode( node.pos, is_generator=def_node.is_generator), ExprNodes.NameNode(node.pos, name=def_node.args[0].name) ] elif class_scope.is_c_class_scope: node.args = [ ExprNodes.NameNode( node.pos, name=class_node.scope.name, entry=class_node.entry), ExprNodes.NameNode(node.pos, name=def_node.args[0].name) ] return node def visit_SimpleCallNode(self, node): # cython.foo function = node.function.as_cython_attribute() if function: if function in InterpretCompilerDirectives.unop_method_nodes: if len(node.args) != 1: error(node.function.pos, u"%s() takes exactly one argument" % function) else: node = InterpretCompilerDirectives.unop_method_nodes[function]( node.function.pos, operand=node.args[0]) elif function in InterpretCompilerDirectives.binop_method_nodes: if len(node.args) != 2: error(node.function.pos, u"%s() takes exactly two arguments" % function) else: node = InterpretCompilerDirectives.binop_method_nodes[function]( node.function.pos, operand1=node.args[0], operand2=node.args[1]) elif function == u'cast': if len(node.args) != 2: error(node.function.pos, u"cast() takes exactly two arguments") else: type = node.args[0].analyse_as_type(self.current_env()) if type: node = ExprNodes.TypecastNode(node.function.pos, type=type, operand=node.args[1]) else: error(node.args[0].pos, "Not a type") elif function == u'sizeof': if len(node.args) != 1: error(node.function.pos, u"sizeof() takes exactly one argument") else: type = node.args[0].analyse_as_type(self.current_env()) if type: node = ExprNodes.SizeofTypeNode(node.function.pos, arg_type=type) else: node = ExprNodes.SizeofVarNode(node.function.pos, operand=node.args[0]) elif function == 'cmod': if len(node.args) != 2: error(node.function.pos, u"cmod() takes exactly two arguments") else: node = ExprNodes.binop_node(node.function.pos, '%', node.args[0], node.args[1]) node.cdivision = True elif function == 'cdiv': if len(node.args) != 2: error(node.function.pos, u"cdiv() takes exactly two arguments") else: node = ExprNodes.binop_node(node.function.pos, '/', node.args[0], node.args[1]) node.cdivision = True elif function == u'set': node.function = ExprNodes.NameNode(node.pos, name=EncodedString('set')) elif function == u'staticmethod': node.function = ExprNodes.NameNode(node.pos, name=EncodedString('staticmethod')) elif self.context.cython_scope.lookup_qualified_name(function): pass else: error(node.function.pos, u"'%s' not a valid cython language construct" % function) self.visitchildren(node) if isinstance(node, ExprNodes.SimpleCallNode) and node.function.is_name: func_name = node.function.name if func_name in ('dir', 'locals', 'vars'): return self._inject_locals(node, func_name) if func_name == 'eval': return self._inject_eval(node, func_name) if func_name == 'super': return self._inject_super(node, func_name) return node class ReplaceFusedTypeChecks(VisitorTransform): """ This is not a transform in the pipeline. It is invoked on the specific versions of a cdef function with fused argument types. It filters out any type branches that don't match. e.g. if fused_t is mytype: ... elif fused_t in other_fused_type: ... """ def __init__(self, local_scope): super(ReplaceFusedTypeChecks, self).__init__() self.local_scope = local_scope # defer the import until now to avoid circular import time dependencies from .Optimize import ConstantFolding self.transform = ConstantFolding(reevaluate=True) def visit_IfStatNode(self, node): """ Filters out any if clauses with false compile time type check expression. """ self.visitchildren(node) return self.transform(node) def visit_PrimaryCmpNode(self, node): type1 = node.operand1.analyse_as_type(self.local_scope) type2 = node.operand2.analyse_as_type(self.local_scope) if type1 and type2: false_node = ExprNodes.BoolNode(node.pos, value=False) true_node = ExprNodes.BoolNode(node.pos, value=True) type1 = self.specialize_type(type1, node.operand1.pos) op = node.operator if op in ('is', 'is_not', '==', '!='): type2 = self.specialize_type(type2, node.operand2.pos) is_same = type1.same_as(type2) eq = op in ('is', '==') if (is_same and eq) or (not is_same and not eq): return true_node elif op in ('in', 'not_in'): # We have to do an instance check directly, as operand2 # needs to be a fused type and not a type with a subtype # that is fused. First unpack the typedef if isinstance(type2, PyrexTypes.CTypedefType): type2 = type2.typedef_base_type if type1.is_fused: error(node.operand1.pos, "Type is fused") elif not type2.is_fused: error(node.operand2.pos, "Can only use 'in' or 'not in' on a fused type") else: types = PyrexTypes.get_specialized_types(type2) for specialized_type in types: if type1.same_as(specialized_type): if op == 'in': return true_node else: return false_node if op == 'not_in': return true_node return false_node return node def specialize_type(self, type, pos): try: return type.specialize(self.local_scope.fused_to_specific) except KeyError: error(pos, "Type is not specific") return type def visit_Node(self, node): self.visitchildren(node) return node class DebugTransform(CythonTransform): """ Write debug information for this Cython module. """ def __init__(self, context, options, result): super(DebugTransform, self).__init__(context) self.visited = set() # our treebuilder and debug output writer # (see Cython.Debugger.debug_output.CythonDebugWriter) self.tb = self.context.gdb_debug_outputwriter #self.c_output_file = options.output_file self.c_output_file = result.c_file # Closure support, basically treat nested functions as if the AST were # never nested self.nested_funcdefs = [] # tells visit_NameNode whether it should register step-into functions self.register_stepinto = False def visit_ModuleNode(self, node): self.tb.module_name = node.full_module_name attrs = dict( module_name=node.full_module_name, filename=node.pos[0].filename, c_filename=self.c_output_file) self.tb.start('Module', attrs) # serialize functions self.tb.start('Functions') # First, serialize functions normally... self.visitchildren(node) # ... then, serialize nested functions for nested_funcdef in self.nested_funcdefs: self.visit_FuncDefNode(nested_funcdef) self.register_stepinto = True self.serialize_modulenode_as_function(node) self.register_stepinto = False self.tb.end('Functions') # 2.3 compatibility. Serialize global variables self.tb.start('Globals') entries = {} for k, v in node.scope.entries.iteritems(): if (v.qualified_name not in self.visited and not v.name.startswith('__pyx_') and not v.type.is_cfunction and not v.type.is_extension_type): entries[k]= v self.serialize_local_variables(entries) self.tb.end('Globals') # self.tb.end('Module') # end Module after the line number mapping in # Cython.Compiler.ModuleNode.ModuleNode._serialize_lineno_map return node def visit_FuncDefNode(self, node): self.visited.add(node.local_scope.qualified_name) if getattr(node, 'is_wrapper', False): return node if self.register_stepinto: self.nested_funcdefs.append(node) return node # node.entry.visibility = 'extern' if node.py_func is None: pf_cname = '' else: pf_cname = node.py_func.entry.func_cname attrs = dict( name=node.entry.name or getattr(node, 'name', '<unknown>'), cname=node.entry.func_cname, pf_cname=pf_cname, qualified_name=node.local_scope.qualified_name, lineno=str(node.pos[1])) self.tb.start('Function', attrs=attrs) self.tb.start('Locals') self.serialize_local_variables(node.local_scope.entries) self.tb.end('Locals') self.tb.start('Arguments') for arg in node.local_scope.arg_entries: self.tb.start(arg.name) self.tb.end(arg.name) self.tb.end('Arguments') self.tb.start('StepIntoFunctions') self.register_stepinto = True self.visitchildren(node) self.register_stepinto = False self.tb.end('StepIntoFunctions') self.tb.end('Function') return node def visit_NameNode(self, node): if (self.register_stepinto and node.type is not None and node.type.is_cfunction and getattr(node, 'is_called', False) and node.entry.func_cname is not None): # don't check node.entry.in_cinclude, as 'cdef extern: ...' # declared functions are not 'in_cinclude'. # This means we will list called 'cdef' functions as # "step into functions", but this is not an issue as they will be # recognized as Cython functions anyway. attrs = dict(name=node.entry.func_cname) self.tb.start('StepIntoFunction', attrs=attrs) self.tb.end('StepIntoFunction') self.visitchildren(node) return node def serialize_modulenode_as_function(self, node): """ Serialize the module-level code as a function so the debugger will know it's a "relevant frame" and it will know where to set the breakpoint for 'break modulename'. """ name = node.full_module_name.rpartition('.')[-1] cname_py2 = 'init' + name cname_py3 = 'PyInit_' + name py2_attrs = dict( name=name, cname=cname_py2, pf_cname='', # Ignore the qualified_name, breakpoints should be set using # `cy break modulename:lineno` for module-level breakpoints. qualified_name='', lineno='1', is_initmodule_function="True", ) py3_attrs = dict(py2_attrs, cname=cname_py3) self._serialize_modulenode_as_function(node, py2_attrs) self._serialize_modulenode_as_function(node, py3_attrs) def _serialize_modulenode_as_function(self, node, attrs): self.tb.start('Function', attrs=attrs) self.tb.start('Locals') self.serialize_local_variables(node.scope.entries) self.tb.end('Locals') self.tb.start('Arguments') self.tb.end('Arguments') self.tb.start('StepIntoFunctions') self.register_stepinto = True self.visitchildren(node) self.register_stepinto = False self.tb.end('StepIntoFunctions') self.tb.end('Function') def serialize_local_variables(self, entries): for entry in entries.values(): if not entry.cname: # not a local variable continue if entry.type.is_pyobject: vartype = 'PythonObject' else: vartype = 'CObject' if entry.from_closure: # We're dealing with a closure where a variable from an outer # scope is accessed, get it from the scope object. cname = '%s->%s' % (Naming.cur_scope_cname, entry.outer_entry.cname) qname = '%s.%s.%s' % (entry.scope.outer_scope.qualified_name, entry.scope.name, entry.name) elif entry.in_closure: cname = '%s->%s' % (Naming.cur_scope_cname, entry.cname) qname = entry.qualified_name else: cname = entry.cname qname = entry.qualified_name if not entry.pos: # this happens for variables that are not in the user's code, # e.g. for the global __builtins__, __doc__, etc. We can just # set the lineno to 0 for those. lineno = '0' else: lineno = str(entry.pos[1]) attrs = dict( name=entry.name, cname=cname, qualified_name=qname, type=vartype, lineno=lineno) self.tb.start('LocalVar', attrs) self.tb.end('LocalVar')
gpl-2.0
daizhengy/RDS
trove/guestagent/strategies/backup/experimental/couchbase_impl.py
4
4107
# Copyright (c) 2014 eBay Software Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # import json from trove.common import exception from trove.common import utils from trove.guestagent.datastore.experimental.couchbase import service from trove.guestagent.datastore.experimental.couchbase import system from trove.guestagent.strategies.backup import base from trove.common.i18n import _ from trove.openstack.common import log as logging LOG = logging.getLogger(__name__) OUTFILE = '/tmp' + system.BUCKETS_JSON class CbBackup(base.BackupRunner): """ Implementation of Backup Strategy for Couchbase. """ __strategy_name__ = 'cbbackup' pre_backup_commands = [ ['rm', '-rf', system.COUCHBASE_DUMP_DIR], ['mkdir', '-p', system.COUCHBASE_DUMP_DIR], ] post_backup_commands = [ ['rm', '-rf', system.COUCHBASE_DUMP_DIR], ] @property def cmd(self): """ Creates backup dump dir, tars it up, and encrypts it. """ cmd = 'tar cpPf - ' + system.COUCHBASE_DUMP_DIR return cmd + self.zip_cmd + self.encrypt_cmd def _save_buckets_config(self, password): url = system.COUCHBASE_REST_API + '/pools/default/buckets' utils.execute_with_timeout('curl -u root:' + password + ' ' + url + ' > ' + OUTFILE, shell=True, timeout=300) def _backup(self, password): utils.execute_with_timeout('/opt/couchbase/bin/cbbackup', system.COUCHBASE_REST_API, system.COUCHBASE_DUMP_DIR, '-u', 'root', '-p', password, timeout=600) def _run_pre_backup(self): try: for cmd in self.pre_backup_commands: utils.execute_with_timeout(*cmd) root = service.CouchbaseRootAccess() pw = root.get_password() self._save_buckets_config(pw) with open(OUTFILE, "r") as f: out = f.read() if out != "[]": d = json.loads(out) all_memcached = True for i in range(len(d)): bucket_type = d[i]["bucketType"] if bucket_type != "memcached": all_memcached = False break if not all_memcached: self._backup(pw) else: LOG.info(_("All buckets are memcached. " "Skipping backup.")) utils.execute_with_timeout('mv', OUTFILE, system.COUCHBASE_DUMP_DIR) if pw != "password": # Not default password, backup generated root password utils.execute_with_timeout('cp', '-p', system.pwd_file, system.COUCHBASE_DUMP_DIR, run_as_root=True, root_helper='sudo') except exception.ProcessExecutionError as p: LOG.error(p) raise p def _run_post_backup(self): try: for cmd in self.post_backup_commands: utils.execute_with_timeout(*cmd) except exception.ProcessExecutionError as p: LOG.error(p) raise p
apache-2.0
damiendallimore/SplunkModularInputsPythonFramework
implementations/twilio_alert/bin/twilio/rest/resources/task_router/workspaces.py
38
3347
from .. import NextGenInstanceResource, NextGenListResource from .statistics import Statistics class Workspace(NextGenInstanceResource): """ A Workspace resource. See the `TaskRouter API reference <https://www.twilio.com/docs/taskrouter/workspaces>_` for more information. .. attribute:: sid The unique ID of the Workspace .. attribute:: account_sid The ID of the account that owns this Workspace .. attribute:: friendly_name Human readable description of this workspace (for example "Sales Call Center" or "Customer Support Team") .. attribute:: default_activity_sid The ID of the default :class:`Activity` that will be used when new Workers are created in this Workspace. .. attribute:: default_activity_name The human readable name of the default activity. Read only. .. attribute:: timeout_activity_sid The ID of the Activity that will be assigned to a Worker when a :class:`Task` reservation times out without a response. .. attribute:: timeout_activity_name The human readable name of the timeout activity. Read only. .. attribute:: event_callback_url An optional URL where the Workspace will publish events. You can use this to gather data for reporting. .. attribute:: date_created The time the Workspace was created, given as UTC in ISO 8601 format. .. attribute:: date_updated The time the Workspace was last updated, given as UTC in ISO 8601 format. """ subresources = [ Statistics, ] def delete(self): """ Delete a workspace. """ return self.parent.delete_instance(self.name) def update(self, **kwargs): """ Update a workspace. """ return self.parent.update_instance(self.name, kwargs) class Workspaces(NextGenListResource): """ A list of Workspace resources """ name = "Workspaces" instance = Workspace def create(self, friendly_name, **kwargs): """ Create a Workspace. :param friendly_name: Human readable description of this workspace (for example "Customer Support" or "2014 Election Campaign"). :param event_callback_url: If provided, the Workspace will publish events to this URL. You can use this to gather data for reporting. See Workspace Events for more information. :param template: One of the available template names. Will pre-configure this Workspace with the Workflow and Activities specified in the template. Currently "FIFO" is the only available template, which will configure Work Distribution Service with a set of default activities and a single queue for first-in, first-out distribution. """ kwargs['friendly_name'] = friendly_name return self.create_instance(kwargs) def delete(self, sid): """ Delete the given workspace """ return self.delete_instance(sid) def update(self, sid, **kwargs): """ Update a :class:`Workspace` with the given parameters. All the parameters are describe above in :meth:`create` """ return self.update_instance(sid, kwargs)
apache-2.0
ScaDS/ORC-Schlange
ORCSchlange/command/db.py
1
3559
"""The db commands.""" import sys from ORCSchlange.command import BaseCommand, really def check_date(d): """Check if a string is a valide date of the form "YYYY-MM-DD". :param d: The date string that is checked. :return: True if it is a not valid date string. """ if len(d) != 10: return True split = d.split("-") return len(split) != 3 or not split[0].isdecimal() or not split[1].isdecimal() or not split[2].isdecimal() class DbCommand(BaseCommand): """The class that contains all db commands.""" def add(self): """Add an new orcid to the db.""" self.open() self.debug("Validate orchid") self.args.orchid = self.args.orchid.replace("-", "") if len(self.args.orchid) != 16: self.error("Invalide orchid") self.close() sys.exit(1) self.debug("Validate start") if check_date(self.args.start): self.error("Invalide start") self.close() sys.exit(1) if self.args.stop: self.debug("Stop found") self.debug("Validate stop") if check_date(self.args.stop): self.error("Invalide stop") sys.exit(1) self.debug("Add orcid") add_code = self.db.add_user(self.args.orchid, self.args.start, self.args.stop) if add_code == 1: self.error("Doubled orchid entry. Nothing have been added.") elif add_code == 2: self.error("No db found. Try to use \"db create\" command first.") else: self.debug("Add orcid") add_code = self.db.add_user(self.args.orchid, self.args.start, None) if add_code == 1: self.error("Doubled orchid entry. Nothing have been added.") elif add_code == 2: self.error("No db found. Try to use \"db create\" command first.") self.close() def prints(self): """Prints all orcids that are in the db.""" self.open() for orc in self.db.get_orcids(): print(orc) self.close() def clean(self): """Clean the db i.e. delet all orcids in the db. It is ask if the db realy should be dropped. If the answer is yes al entries are deleted. """ question = "Do you really want to delete the complete db? (Y/N)\n" if really(question): self.open() self.debug("Drop old DB") self.db.drop_db() self.debug("Create new DB") self.db.create_db() self.close() def create(self): """Create an empty db. It is necessary before any add function.""" self.open() self.debug("Create new DB") if not self.db.create_db(): self.error("DB already exists") self.close() def create_test(self): """Drop old db and create the test DB with three entries.""" question = "Do you really want to delete the complete db and create a db with test entries? (Y/N)\n" if really(question): self.open() self.debug("Create test DB") self.db.create_test_db() self.close() def add_conf(self): """Insert an config information and overwrite old entry.""" self.open() self.debug("Insert config in DB") self.db.add_config(self.args.cliend_id, self.args.clien_secret, self.args.auth, self.args.api) self.close()
apache-2.0
kg-bot/SupyBot
plugins/Trivia/test.py
1
1915
### # Copyright (c) 2011, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### from supybot.test import * class TriviaTestCase(ChannelPluginTestCase): plugins = ('Trivia',) def testStartStop(self): self.assertRegexp('start', '...#1 of 10:.*') self.assertResponse('stop', 'Trivia stopping.') self.assertError('stop') # vim:set shiftwidth=4 tabstop=4 expandtab textwidth=79:
gpl-3.0
egabancho/invenio
invenio/legacy/refextract/linker.py
1
3721
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2014 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. from invenio.legacy.bibrank.citation_indexer import \ get_recids_matching_query as bibrank_search, \ standardize_report_number from invenio.modules.indexer.tokenizers.BibIndexJournalTokenizer import \ CFG_JOURNAL_PUBINFO_STANDARD_FORM from invenio.legacy.bibrank.tag_based_indexer import load_config from invenio.legacy.search_engine import get_collection_reclist, get_fieldvalues from intbitset import intbitset def config_cache(cache={}): if 'config' not in cache: cache['config'] = load_config('citation') return cache['config'] def get_recids_matching_query(p, f, m='e'): """Return list of recIDs matching query for pattern and field.""" config = config_cache() recids = bibrank_search(p=p, f=f, config=config, m=m) return list(recids) def format_journal(format_string, mappings): """format the publ infostring according to the format""" def replace(char, data): return data.get(char, char) for c in mappings.keys(): format_string = format_string.replace(c, replace(c, mappings)) return format_string def find_journal(citation_element): tags_values = { '773__p': citation_element['title'], '773__v': citation_element['volume'], '773__c': citation_element['page'], '773__y': citation_element['year'], } journal_string = format_journal( CFG_JOURNAL_PUBINFO_STANDARD_FORM, tags_values) return get_recids_matching_query(journal_string, 'journal') def find_reportnumber(citation_element): reportnumber = standardize_report_number(citation_element['report_num']) return get_recids_matching_query(reportnumber, 'reportnumber') def find_doi(citation_element): doi_string = citation_element['doi_string'] return get_recids_matching_query(doi_string, 'doi') def find_referenced_recid(citation_element): el_type = citation_element['type'] if el_type in FINDERS: return FINDERS[el_type](citation_element) return [] def find_book(citation_element): books_recids = get_collection_reclist('Books') search_string = citation_element['title'] recids = intbitset(get_recids_matching_query(search_string, 'title')) recids &= books_recids if len(recids) == 1: return recids if 'year' in citation_element: for recid in recids: year_tags = get_fieldvalues(recid, '269__c') for tag in year_tags: if tag == citation_element['year']: return [recid] return [] def find_isbn(citation_element): books_recids = get_collection_reclist('Books') recids = intbitset(get_recids_matching_query(citation_element['ISBN'], 'isbn')) return list(recids & books_recids) FINDERS = { 'JOURNAL': find_journal, 'REPORTNUMBER': find_reportnumber, 'DOI': find_doi, 'BOOK': find_book, 'ISBN': find_isbn, }
gpl-2.0
DhiaEddineSaidi/python-social-auth
social/backends/clef.py
50
1768
""" Clef OAuth support. This contribution adds support for Clef OAuth service. The settings SOCIAL_AUTH_CLEF_KEY and SOCIAL_AUTH_CLEF_SECRET must be defined with the values given by Clef application registration process. """ from social.backends.oauth import BaseOAuth2 class ClefOAuth2(BaseOAuth2): """Clef OAuth authentication backend""" name = 'clef' AUTHORIZATION_URL = 'https://clef.io/iframes/qr' ACCESS_TOKEN_URL = 'https://clef.io/api/v1/authorize' ACCESS_TOKEN_METHOD = 'POST' SCOPE_SEPARATOR = ',' def auth_params(self, *args, **kwargs): params = super(ClefOAuth2, self).auth_params(*args, **kwargs) params['app_id'] = params.pop('client_id') params['redirect_url'] = params.pop('redirect_uri') return params def get_user_id(self, response, details): return details.get('info').get('id') def get_user_details(self, response): """Return user details from Github account""" info = response.get('info') fullname, first_name, last_name = self.get_user_names( first_name=info.get('first_name'), last_name=info.get('last_name') ) email = info.get('email', '') if email: username = email.split('@', 1)[0] else: username = info.get('id') return { 'username': username, 'email': email, 'fullname': fullname, 'first_name': first_name, 'last_name': last_name, 'phone_number': info.get('phone_number', '') } def user_data(self, access_token, *args, **kwargs): return self.get_json('https://clef.io/api/v1/info', params={'access_token': access_token})
bsd-3-clause
aduggan/rpi-linux
tools/perf/scripts/python/sctop.py
1996
2102
# system call top # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Periodically displays system-wide system call totals, broken down by # syscall. If a [comm] arg is specified, only syscalls called by # [comm] are displayed. If an [interval] arg is specified, the display # will be refreshed every [interval] seconds. The default interval is # 3 seconds. import os, sys, thread, time sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s sctop.py [comm] [interval]\n"; for_comm = None default_interval = 3 interval = default_interval if len(sys.argv) > 3: sys.exit(usage) if len(sys.argv) > 2: for_comm = sys.argv[1] interval = int(sys.argv[2]) elif len(sys.argv) > 1: try: interval = int(sys.argv[1]) except ValueError: for_comm = sys.argv[1] interval = default_interval syscalls = autodict() def trace_begin(): thread.start_new_thread(print_syscall_totals, (interval,)) pass def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, common_callchain, id, args): if for_comm is not None: if common_comm != for_comm: return try: syscalls[id] += 1 except TypeError: syscalls[id] = 1 def syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): raw_syscalls__sys_enter(**locals()) def print_syscall_totals(interval): while 1: clear_term() if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "----------"), for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \ reverse = True): try: print "%-40s %10d\n" % (syscall_name(id), val), except TypeError: pass syscalls.clear() time.sleep(interval)
gpl-2.0
skg-net/ansible
test/units/module_utils/network/ftd/test_configuration.py
9
5568
# Copyright (c) 2018 Cisco and/or its affiliates. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from ansible.compat.tests import mock from ansible.compat.tests.mock import call, patch from ansible.module_utils.network.ftd.configuration import iterate_over_pageable_resource, BaseConfigurationResource class TestBaseConfigurationResource(object): @patch.object(BaseConfigurationResource, 'send_request') def test_get_objects_by_filter_with_multiple_filters(self, send_request_mock): objects = [ {'name': 'obj1', 'type': 1, 'foo': {'bar': 'buzz'}}, {'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}}, {'name': 'obj3', 'type': 2, 'foo': {'bar': 'buzz'}} ] resource = BaseConfigurationResource(None) send_request_mock.side_effect = [{'items': objects}, {'items': []}] assert objects == resource.get_objects_by_filter('/objects', {}) send_request_mock.side_effect = [{'items': objects}, {'items': []}] assert [objects[0]] == resource.get_objects_by_filter('/objects', {'name': 'obj1'}) send_request_mock.side_effect = [{'items': objects}, {'items': []}] assert [objects[1]] == resource.get_objects_by_filter('/objects', {'type': 1, 'foo': {'bar': 'buz'}}) @patch.object(BaseConfigurationResource, 'send_request') def test_get_objects_by_filter_with_multiple_responses(self, send_request_mock): send_request_mock.side_effect = [ {'items': [ {'name': 'obj1', 'type': 'foo'}, {'name': 'obj2', 'type': 'bar'} ]}, {'items': [ {'name': 'obj3', 'type': 'foo'} ]}, {'items': []} ] resource = BaseConfigurationResource(None) assert [{'name': 'obj1', 'type': 'foo'}, {'name': 'obj3', 'type': 'foo'}] == resource.get_objects_by_filter( '/objects', {'type': 'foo'}) class TestIterateOverPageableResource(object): def test_iterate_over_pageable_resource_with_no_items(self): resource_func = mock.Mock(return_value={'items': []}) items = iterate_over_pageable_resource(resource_func) assert [] == list(items) def test_iterate_over_pageable_resource_with_one_page(self): resource_func = mock.Mock(side_effect=[ {'items': ['foo', 'bar']}, {'items': []}, ]) items = iterate_over_pageable_resource(resource_func) assert ['foo', 'bar'] == list(items) resource_func.assert_has_calls([ call(query_params={'offset': 0, 'limit': 10}), call(query_params={'offset': 10, 'limit': 10}) ]) def test_iterate_over_pageable_resource_with_multiple_pages(self): resource_func = mock.Mock(side_effect=[ {'items': ['foo']}, {'items': ['bar']}, {'items': ['buzz']}, {'items': []}, ]) items = iterate_over_pageable_resource(resource_func) assert ['foo', 'bar', 'buzz'] == list(items) def test_iterate_over_pageable_resource_should_preserve_query_params(self): resource_func = mock.Mock(return_value={'items': []}) items = iterate_over_pageable_resource(resource_func, {'filter': 'name:123'}) assert [] == list(items) resource_func.assert_called_once_with(query_params={'filter': 'name:123', 'offset': 0, 'limit': 10}) def test_iterate_over_pageable_resource_should_preserve_limit(self): resource_func = mock.Mock(side_effect=[ {'items': ['foo']}, {'items': []}, ]) items = iterate_over_pageable_resource(resource_func, {'limit': 1}) assert ['foo'] == list(items) resource_func.assert_has_calls([ call(query_params={'offset': 0, 'limit': 1}), call(query_params={'offset': 1, 'limit': 1}) ]) def test_iterate_over_pageable_resource_should_preserve_offset(self): resource_func = mock.Mock(side_effect=[ {'items': ['foo']}, {'items': []}, ]) items = iterate_over_pageable_resource(resource_func, {'offset': 3}) assert ['foo'] == list(items) resource_func.assert_has_calls([ call(query_params={'offset': 3, 'limit': 10}), call(query_params={'offset': 13, 'limit': 10}) ]) def test_iterate_over_pageable_resource_should_pass_with_string_offset_and_limit(self): resource_func = mock.Mock(side_effect=[ {'items': ['foo']}, {'items': []}, ]) items = iterate_over_pageable_resource(resource_func, {'offset': '1', 'limit': '1'}) assert ['foo'] == list(items) resource_func.assert_has_calls([ call(query_params={'offset': '1', 'limit': '1'}), call(query_params={'offset': 2, 'limit': '1'}) ])
gpl-3.0
mxrrow/zaicoin
src/deps/boost/tools/build/v2/test/generator_selection.py
15
4350
#!/usr/bin/python # Copyright 2008 Jurko Gospodnetic # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) # Tests that generators get selected correctly. import BoostBuild ################################################################################ # # test_generator_added_after_already_building_a_target_of_its_target_type() # ------------------------------------------------------------------------- # ################################################################################ def test_generator_added_after_already_building_a_target_of_its_target_type(): """Regression test for a Boost Build bug causing it to not use a generator if it got added after already building a targer of its target type. """ t = BoostBuild.Tester() t.write("dummy.cpp", "void f() {}\n") t.write("jamroot.jam", """ # Building this dummy target must not cause a later defined CPP target type # generator not to be recognized as viable. obj dummy : dummy.cpp ; alias the-other-obj : Other//other-obj ; """) t.write("Other/source.extension", "A dummy source file.") t.write("Other/mygen.jam", """ import generators ; import os ; import type ; type.register MY_TYPE : extension ; generators.register-standard mygen.generate-a-cpp-file : MY_TYPE : CPP ; rule generate-a-cpp-file { ECHO Generating a CPP file... ; } if [ os.name ] = NT { actions generate-a-cpp-file { echo void g() {} > "$(<)" } } else { actions generate-a-cpp-file { echo "void g() {}" > "$(<)" } } """) t.write("Other/mygen.py", """ import b2.build.generators as generators import b2.build.type as type from b2.manager import get_manager import os type.register('MY_TYPE', ['extension']) generators.register_standard('mygen.generate-a-cpp-file', ['MY_TYPE'], ['CPP']) if os.name == 'nt': action = 'echo void g() {} > "$(<)"' else: action = 'echo "void g() {}" > "$(<)"' def f(*args): print "Generating a CPP file..." get_manager().engine().register_action("mygen.generate-a-cpp-file", action, function=f) """) t.write("Other/jamfile.jam", """ import mygen ; obj other-obj : source.extension ; """) t.run_build_system() t.expect_output_line("Generating a CPP file...") t.expect_addition("bin/$toolset/debug/dummy.obj") t.expect_addition("Other/bin/$toolset/debug/other-obj.obj") t.cleanup() ################################################################################ # # test_using_a_derived_source_type_created_after_generator_already_used() # ----------------------------------------------------------------------- # ################################################################################ def test_using_a_derived_source_type_created_after_generator_already_used(): """Regression test for a Boost Build bug causing it to not use a generator with a source type derived from one of the generator's sources but created only after already using the generateor. """ t = BoostBuild.Tester() t.write("dummy.cpp", "void f() {}\n") t.write("jamroot.jam", """ # Building this dummy target must not cause a later defined UNGA_CPP target type # not to be recognized as a viable source type for building OBJ targets. obj dummy : dummy.cpp ; alias the-test-output : Other//other-obj ; """) t.write("Other/source.unga_cpp", "void g() {}\n") t.write("Other/jamfile.jam", """ import type ; type.register UNGA_CPP : unga_cpp : CPP ; # We are careful not to do anything between defining our new UNGA_CPP target # type and using the CPP --> OBJ generator that could potentially cover the # Boost Build bug by clearing its internal viable source target type state. obj other-obj : source.unga_cpp ; """) t.run_build_system() t.expect_addition("bin/$toolset/debug/dummy.obj") t.expect_addition("Other/bin/$toolset/debug/other-obj.obj") t.expect_nothing_more() t.cleanup() ################################################################################ # # main() # ------ # ################################################################################ test_generator_added_after_already_building_a_target_of_its_target_type() test_using_a_derived_source_type_created_after_generator_already_used()
mit
shukiz/VAR-SOM-AM33-Kernel-3-15
tools/perf/tests/attr.py
3174
9441
#! /usr/bin/python import os import sys import glob import optparse import tempfile import logging import shutil import ConfigParser class Fail(Exception): def __init__(self, test, msg): self.msg = msg self.test = test def getMsg(self): return '\'%s\' - %s' % (self.test.path, self.msg) class Unsup(Exception): def __init__(self, test): self.test = test def getMsg(self): return '\'%s\'' % self.test.path class Event(dict): terms = [ 'cpu', 'flags', 'type', 'size', 'config', 'sample_period', 'sample_type', 'read_format', 'disabled', 'inherit', 'pinned', 'exclusive', 'exclude_user', 'exclude_kernel', 'exclude_hv', 'exclude_idle', 'mmap', 'comm', 'freq', 'inherit_stat', 'enable_on_exec', 'task', 'watermark', 'precise_ip', 'mmap_data', 'sample_id_all', 'exclude_host', 'exclude_guest', 'exclude_callchain_kernel', 'exclude_callchain_user', 'wakeup_events', 'bp_type', 'config1', 'config2', 'branch_sample_type', 'sample_regs_user', 'sample_stack_user', ] def add(self, data): for key, val in data: log.debug(" %s = %s" % (key, val)) self[key] = val def __init__(self, name, data, base): log.debug(" Event %s" % name); self.name = name; self.group = '' self.add(base) self.add(data) def compare_data(self, a, b): # Allow multiple values in assignment separated by '|' a_list = a.split('|') b_list = b.split('|') for a_item in a_list: for b_item in b_list: if (a_item == b_item): return True elif (a_item == '*') or (b_item == '*'): return True return False def equal(self, other): for t in Event.terms: log.debug(" [%s] %s %s" % (t, self[t], other[t])); if not self.has_key(t) or not other.has_key(t): return False if not self.compare_data(self[t], other[t]): return False return True def diff(self, other): for t in Event.terms: if not self.has_key(t) or not other.has_key(t): continue if not self.compare_data(self[t], other[t]): log.warning("expected %s=%s, got %s" % (t, self[t], other[t])) # Test file description needs to have following sections: # [config] # - just single instance in file # - needs to specify: # 'command' - perf command name # 'args' - special command arguments # 'ret' - expected command return value (0 by default) # # [eventX:base] # - one or multiple instances in file # - expected values assignments class Test(object): def __init__(self, path, options): parser = ConfigParser.SafeConfigParser() parser.read(path) log.warning("running '%s'" % path) self.path = path self.test_dir = options.test_dir self.perf = options.perf self.command = parser.get('config', 'command') self.args = parser.get('config', 'args') try: self.ret = parser.get('config', 'ret') except: self.ret = 0 self.expect = {} self.result = {} log.debug(" loading expected events"); self.load_events(path, self.expect) def is_event(self, name): if name.find("event") == -1: return False else: return True def load_events(self, path, events): parser_event = ConfigParser.SafeConfigParser() parser_event.read(path) # The event record section header contains 'event' word, # optionaly followed by ':' allowing to load 'parent # event' first as a base for section in filter(self.is_event, parser_event.sections()): parser_items = parser_event.items(section); base_items = {} # Read parent event if there's any if (':' in section): base = section[section.index(':') + 1:] parser_base = ConfigParser.SafeConfigParser() parser_base.read(self.test_dir + '/' + base) base_items = parser_base.items('event') e = Event(section, parser_items, base_items) events[section] = e def run_cmd(self, tempdir): cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir, self.perf, self.command, tempdir, self.args) ret = os.WEXITSTATUS(os.system(cmd)) log.info(" '%s' ret %d " % (cmd, ret)) if ret != int(self.ret): raise Unsup(self) def compare(self, expect, result): match = {} log.debug(" compare"); # For each expected event find all matching # events in result. Fail if there's not any. for exp_name, exp_event in expect.items(): exp_list = [] log.debug(" matching [%s]" % exp_name) for res_name, res_event in result.items(): log.debug(" to [%s]" % res_name) if (exp_event.equal(res_event)): exp_list.append(res_name) log.debug(" ->OK") else: log.debug(" ->FAIL"); log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list))) # we did not any matching event - fail if (not exp_list): exp_event.diff(res_event) raise Fail(self, 'match failure'); match[exp_name] = exp_list # For each defined group in the expected events # check we match the same group in the result. for exp_name, exp_event in expect.items(): group = exp_event.group if (group == ''): continue for res_name in match[exp_name]: res_group = result[res_name].group if res_group not in match[group]: raise Fail(self, 'group failure') log.debug(" group: [%s] matches group leader %s" % (exp_name, str(match[group]))) log.debug(" matched") def resolve_groups(self, events): for name, event in events.items(): group_fd = event['group_fd']; if group_fd == '-1': continue; for iname, ievent in events.items(): if (ievent['fd'] == group_fd): event.group = iname log.debug('[%s] has group leader [%s]' % (name, iname)) break; def run(self): tempdir = tempfile.mkdtemp(); try: # run the test script self.run_cmd(tempdir); # load events expectation for the test log.debug(" loading result events"); for f in glob.glob(tempdir + '/event*'): self.load_events(f, self.result); # resolve group_fd to event names self.resolve_groups(self.expect); self.resolve_groups(self.result); # do the expectation - results matching - both ways self.compare(self.expect, self.result) self.compare(self.result, self.expect) finally: # cleanup shutil.rmtree(tempdir) def run_tests(options): for f in glob.glob(options.test_dir + '/' + options.test): try: Test(f, options).run() except Unsup, obj: log.warning("unsupp %s" % obj.getMsg()) def setup_log(verbose): global log level = logging.CRITICAL if verbose == 1: level = logging.WARNING if verbose == 2: level = logging.INFO if verbose >= 3: level = logging.DEBUG log = logging.getLogger('test') log.setLevel(level) ch = logging.StreamHandler() ch.setLevel(level) formatter = logging.Formatter('%(message)s') ch.setFormatter(formatter) log.addHandler(ch) USAGE = '''%s [OPTIONS] -d dir # tests dir -p path # perf binary -t test # single test -v # verbose level ''' % sys.argv[0] def main(): parser = optparse.OptionParser(usage=USAGE) parser.add_option("-t", "--test", action="store", type="string", dest="test") parser.add_option("-d", "--test-dir", action="store", type="string", dest="test_dir") parser.add_option("-p", "--perf", action="store", type="string", dest="perf") parser.add_option("-v", "--verbose", action="count", dest="verbose") options, args = parser.parse_args() if args: parser.error('FAILED wrong arguments %s' % ' '.join(args)) return -1 setup_log(options.verbose) if not options.test_dir: print 'FAILED no -d option specified' sys.exit(-1) if not options.test: options.test = 'test*' try: run_tests(options) except Fail, obj: print "FAILED %s" % obj.getMsg(); sys.exit(-1) sys.exit(0) if __name__ == '__main__': main()
gpl-2.0
glavase/limbo-android
jni/qemu/roms/seabios/tools/checkrom.py
103
1842
#!/usr/bin/env python # Script to check a bios image and report info on it. # # Copyright (C) 2008 Kevin O'Connor <[email protected]> # # This file may be distributed under the terms of the GNU GPLv3 license. import sys import layoutrom def main(): # Get args objinfo, rawfile, outfile = sys.argv[1:] # Read in symbols objinfofile = open(objinfo, 'rb') symbols = layoutrom.parseObjDump(objinfofile, 'in')[1] # Read in raw file f = open(rawfile, 'rb') rawdata = f.read() f.close() datasize = len(rawdata) finalsize = 64*1024 if datasize > 64*1024: finalsize = 128*1024 if datasize > 128*1024: finalsize = 256*1024 # Sanity checks start = symbols['code32flat_start'].offset end = symbols['code32flat_end'].offset expend = layoutrom.BUILD_BIOS_ADDR + layoutrom.BUILD_BIOS_SIZE if end != expend: print "Error! Code does not end at 0x%x (got 0x%x)" % ( expend, end) sys.exit(1) if datasize > finalsize: print "Error! Code is too big (0x%x vs 0x%x)" % ( datasize, finalsize) sys.exit(1) expdatasize = end - start if datasize != expdatasize: print "Error! Unknown extra data (0x%x vs 0x%x)" % ( datasize, expdatasize) sys.exit(1) # Print statistics runtimesize = datasize if '_reloc_abs_start' in symbols: runtimesize = end - symbols['code32init_end'].offset print "Total size: %d Fixed: %d Free: %d (used %.1f%% of %dKiB rom)" % ( datasize, runtimesize, finalsize - datasize , (datasize / float(finalsize)) * 100.0 , finalsize / 1024) # Write final file f = open(outfile, 'wb') f.write(("\0" * (finalsize - datasize)) + rawdata) f.close() if __name__ == '__main__': main()
gpl-2.0
highlanderkev/kpw.org
kpw-org/languages/uk.py
27
13793
# coding: utf8 { '!langcode!': 'uk', '!langname!': 'Українська', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"Оновити" це додатковий вираз, такий, як "field1=\'нове_значення\'". Ви не можете змінювати або вилучати дані об\'єднаних таблиць.', '%d days ago': '%d %%{день} тому', '%d hours ago': '%d %%{годину} тому', '%d minutes ago': '%d %%{хвилину} тому', '%d months ago': '%d %%{місяць} тому', '%d secods ago': '%d %%{секунду} тому', '%d weeks ago': '%d %%{тиждень} тому', '%d years ago': '%d %%{рік} тому', '%s %%{row} deleted': 'Вилучено %s %%{рядок}', '%s %%{row} updated': 'Змінено %s %%{рядок}', '%s selected': 'Вибрано %s %%{запис}', '%Y-%m-%d': '%Y/%m/%d', '%Y-%m-%d %H:%M:%S': '%Y/%m/%d %H:%M:%S', '1 day ago': '1 день тому', '1 hour ago': '1 годину тому', '1 minute ago': '1 хвилину тому', '1 month ago': '1 місяць тому', '1 second ago': '1 секунду тому', '1 week ago': '1 тиждень тому', '1 year ago': '1 рік тому', '@markmin\x01(**%.0d MB**)': '(**``%.0d``:red МБ**)', '@markmin\x01**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{елемент(items)}, **%(bytes)s** %%{байт(bytes)}', '@markmin\x01``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '**нема в наявності** (потребує Пітонівської бібліотеки [[guppy {посилання відкриється у новому вікні} http://pypi.python.org/pypi/guppy/ popup]])', '@markmin\x01Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.", '@markmin\x01DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в ДИСКОВОМУ КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.", '@markmin\x01Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Оцінка поцілювання: **%(ratio)s%%** (**%(hits)s** %%{поцілювання(hits)} та **%(misses)s** %%{схибнення(misses)})', '@markmin\x01Number of entries: **%s**': 'Кількість входжень: ``**%s**``:red', '@markmin\x01RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': "Час життя об'єктів в ОЗП-КЕШІ сягає **%(hours)02d** %%{годину(hours)} **%(min)02d** %%{хвилину(min)} та **%(sec)02d** %%{секунду(sec)}.", 'About': 'Про додаток', 'Access Control': 'Контроль доступу', 'Administrative Interface': 'Адміністративний інтерфейс', 'Ajax Recipes': 'Рецепти для Ajax', 'appadmin is disabled because insecure channel': 'використовується незахищенний канал (HTTP). Appadmin вимкнено', 'Are you sure you want to delete this object?': "Ви впевнені, що хочете вилучити цей об'єкт?", 'Available Databases and Tables': 'Доступні бази даних та таблиці', 'Buy this book': 'Купити книжку', 'cache': 'кеш', 'Cache': 'Кеш', 'Cache Keys': 'Ключі кешу', 'Cannot be empty': 'Порожнє значення неприпустиме', 'Change password': 'Змінити пароль', 'Check to delete': 'Позначити для вилучення', 'Check to delete:': 'Позначте для вилучення:', 'Clear CACHE?': 'Очистити ВЕСЬ кеш?', 'Clear DISK': 'Очистити ДИСКОВИЙ кеш', 'Clear RAM': "Очистити кеш В ПАМ'ЯТІ", 'Client IP': 'IP клієнта', 'Community': 'Спільнота', 'Components and Plugins': 'Компоненти та втулки', 'Controller': 'Контролер', 'Copyright': 'Правовласник', 'Created By': 'Створив(ла)', 'Created On': 'Створено в', 'Current request': 'Поточний запит (current request)', 'Current response': 'Поточна відповідь (current response)', 'Current session': 'Поточна сесія (current session)', 'customize me!': 'причепуріть мене!', 'data uploaded': 'дані завантажено', 'Database': 'База даних', 'Database %s select': 'Вибірка з бази даних %s', 'db': 'база даних', 'DB Model': 'Модель БД', 'Delete:': 'Вилучити:', 'Demo': 'Демо', 'Deployment Recipes': 'Способи розгортання', 'Description': 'Опис', 'design': 'налаштування', 'DISK': 'ДИСК', 'Disk Cache Keys': 'Ключі дискового кешу', 'Disk Cleared': 'Дисковий кеш очищено', 'Documentation': 'Документація', "Don't know what to do?": 'Не знаєте що робити далі?', 'done!': 'зроблено!', 'Download': 'Завантажити', 'E-mail': 'Ел.пошта', 'edit': 'редагувати', 'Edit current record': 'Редагувати поточний запис', 'Edit Page': 'Редагувати сторінку', 'Email and SMS': 'Ел.пошта та SMS', 'enter a value': 'введіть значення', 'enter an integer between %(min)g and %(max)g': 'введіть ціле число між %(min)g та %(max)g', 'Error!': 'Помилка!', 'Errors': 'Помилки', 'Errors in form, please check it out.': 'У формі є помилка. Виправте її, будь-ласка.', 'export as csv file': 'експортувати як файл csv', 'FAQ': 'ЧаПи (FAQ)', 'First name': "Ім'я", 'Forgot username?': "Забули ім'я користувача?", 'Forms and Validators': 'Форми та коректність даних', 'Free Applications': 'Вільні додатки', 'Group %(group_id)s created': 'Групу %(group_id)s створено', 'Group ID': 'Ідентифікатор групи', 'Group uniquely assigned to user %(id)s': "Група унікально зв'язана з користувачем %(id)s", 'Groups': 'Групи', 'Hello World': 'Привіт, світ!', 'Home': 'Початок', 'How did you get here?': 'Як цього було досягнуто?', 'import': 'Імпортувати', 'Import/Export': 'Імпорт/Експорт', 'insert new': 'Створити новий запис', 'insert new %s': 'створити новий запис %s', 'Internal State': 'Внутрішній стан', 'Introduction': 'Введення', 'Invalid email': 'Невірна адреса ел.пошти', 'Invalid login': "Невірне ім'я користувача", 'Invalid password': 'Невірний пароль', 'Invalid Query': 'Помилковий запит', 'invalid request': 'хибний запит', 'Is Active': 'Активна', 'Key': 'Ключ', 'Last name': 'Прізвище', 'Layout': 'Макет (Layout)', 'Layout Plugins': 'Втулки макетів', 'Layouts': 'Макети', 'Live Chat': 'Чат', 'Logged in': 'Вхід здійснено', 'Logged out': 'Вихід здійснено', 'Login': 'Вхід', 'Logout': 'Вихід', 'Lost Password': 'Забули пароль', 'Lost password?': 'Забули пароль?', 'Manage Cache': 'Управління кешем', 'Menu Model': 'Модель меню', 'Modified By': 'Зміни провадив(ла)', 'Modified On': 'Змінено в', 'My Sites': 'Сайт (усі додатки)', 'Name': "Ім'я", 'New password': 'Новий пароль', 'New Record': 'Новий запис', 'new record inserted': 'новий рядок додано', 'next 100 rows': 'наступні 100 рядків', 'No databases in this application': 'Даний додаток не використовує базу даних', 'now': 'зараз', 'Object or table name': "Об'єкт або назва таблиці", 'Old password': 'Старий пароль', 'Online examples': 'Зразковий демо-сайт', 'or import from csv file': 'або імпортувати з csv-файлу', 'Origin': 'Походження', 'Other Plugins': 'Інші втулки', 'Other Recipes': 'Інші рецепти', 'Overview': 'Огляд', 'Page Not Found!': 'Сторінку не знайдено!', 'Page saved': 'Сторінку збережено', 'Password': 'Пароль', 'Password changed': 'Пароль змінено', "Password fields don't match": 'Пароль не співпав', 'please input your password again': 'Будь-ласка введіть пароль ще раз', 'Plugins': 'Втулки (Plugins)', 'Powered by': 'Працює на', 'Preface': 'Передмова', 'previous 100 rows': 'попередні 100 рядків', 'Profile': 'Параметри', 'Profile updated': 'Параметри змінено', 'Python': 'Мова Python', 'Query:': 'Запит:', 'Quick Examples': 'Швидкі приклади', 'RAM': "ОПЕРАТИВНА ПАМ'ЯТЬ (ОЗП)", 'RAM Cache Keys': 'Ключі ОЗП-кешу', 'Ram Cleared': 'ОЗП-кеш очищено', 'Recipes': 'Рецепти', 'Record': 'запис', 'Record %(id)s updated': 'Запис %(id)s змінено', 'record does not exist': 'запису не існує', 'Record ID': 'Ід.запису', 'Record id': 'ід. запису', 'Record Updated': 'Запис змінено', 'Register': 'Реєстрація', 'Registration identifier': 'Реєстраційний ідентифікатор', 'Registration key': 'Реєстраційний ключ', 'Registration successful': 'Реєстрація пройшла успішно', 'Remember me (for 30 days)': "Запам'ятати мене (на 30 днів)", 'Request reset password': 'Запит на зміну пароля', 'Reset Password key': 'Ключ скидання пароля', 'Role': 'Роль', 'Rows in Table': 'Рядки в таблиці', 'Rows selected': 'Відмічено рядків', 'Save profile': 'Зберегти параметри', 'Semantic': 'Семантика', 'Services': 'Сервіс', 'Size of cache:': 'Розмір кешу:', 'state': 'стан', 'Statistics': 'Статистика', 'Stylesheet': 'CSS-стилі', 'submit': 'застосувати', 'Submit': 'Застосувати', 'Support': 'Підтримка', 'Table': 'Таблиця', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"Запит" це умова, на зразок "db.table1.field1==\'значення\'". Вираз "db.table1.field1==db.table2.field2" повертає результат об\'єднання (SQL JOIN) таблиць.', 'The Core': 'Ядро', 'The output of the file is a dictionary that was rendered by the view %s': 'Результат функції - словник пар (назва=значення) було відображено з допомогою відображення (view) %s', 'The Views': 'Відображення (Views)', 'This App': 'Цей додаток', 'Time in Cache (h:m:s)': 'Час знаходження в кеші (h:m:s)', 'Timestamp': 'Відмітка часу', 'too short': 'Занадто короткий', 'Twitter': 'Твіттер', 'unable to parse csv file': 'не вдається розібрати csv-файл', 'Update:': 'Оновити:', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Для створення складних запитів використовуйте (...)&(...) замість AND, (...)|(...) замість OR, та ~(...) замість NOT.', 'User %(id)s Logged-in': 'Користувач %(id)s увійшов', 'User %(id)s Logged-out': 'Користувач %(id)s вийшов', 'User %(id)s Password changed': 'Користувач %(id)s змінив свій пароль', 'User %(id)s Password reset': 'Користувач %(id)s скинув пароль', 'User %(id)s Profile updated': 'Параметри користувача %(id)s змінено', 'User %(id)s Registered': 'Користувач %(id)s зареєструвався', 'User ID': 'Ід.користувача', 'value already in database or empty': 'значення вже в базі даних або порожнє', 'Verify Password': 'Повторити пароль', 'Videos': 'Відео', 'View': 'Відображення (View)', 'Welcome': 'Ласкаво просимо', 'Welcome to web2py!': 'Ласкаво просимо до web2py!', 'Which called the function %s located in the file %s': 'Управління передалось функції %s, яка розташована у файлі %s', 'You are successfully running web2py': 'Ви успішно запустили web2py', 'You can modify this application and adapt it to your needs': 'Ви можете модифікувати цей додаток і адаптувати його до своїх потреб', 'You visited the url %s': 'Ви відвідали наступну адресу: %s', }
mit
googleworkspace/python-samples
events/next18/customer_spreadsheet_reader.py
3
2691
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=E1102 # python3 """Reads the customer data from the template spreadsheet.""" import collections class CustomerSpreadsheetReader(object): def __init__(self, sheets_service, spreadsheet_id): self._sheets_service = sheets_service self._spreadsheet_id = spreadsheet_id self._data_filters = collections.OrderedDict() def ReadColumnData(self, column_id): data_filter = { 'developerMetadataLookup': { 'metadataKey': 'column_id', 'metadataValue': column_id, } } self._data_filters[column_id] = data_filter def ExecuteRead(self): filters = list(self._data_filters.values()) get_body = {'dataFilters': filters} read_fields = ','.join([ 'sheets.properties.sheetId', 'sheets.data.rowData.values.formattedValue', 'developerMetadata.metadataValue']) spreadsheet = self._sheets_service.spreadsheets().getByDataFilter( spreadsheetId=self._spreadsheet_id, body=get_body, fields=read_fields).execute() customer_spreadsheet = CustomerSpreadsheet( spreadsheet, self._data_filters) self._data_filters = collections.OrderedDict() return customer_spreadsheet class CustomerSpreadsheet(object): def __init__(self, spreadsheet, data_filters): self._spreadsheet = spreadsheet self._data_filters = data_filters def GetSheetId(self): sheet = self._spreadsheet.get('sheets')[0] return sheet.get('properties').get('sheetId') def GetTemplateId(self): metadata = self._spreadsheet.get('developerMetadata')[0] return metadata.get('metadataValue') def GetColumnData(self, column_id): index = list(self._data_filters.keys()).index(column_id) data = self._spreadsheet.get('sheets')[0].get('data')[index] values = [row.get('values')[0].get('formattedValue') for row in data.get('rowData')] # Remove the first value which is just the label return values[1:]
apache-2.0
Juzley/golfstats
lib/werkzeug/security.py
255
8971
# -*- coding: utf-8 -*- """ werkzeug.security ~~~~~~~~~~~~~~~~~ Security related helpers such as secure password hashing tools. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import os import hmac import hashlib import posixpath import codecs from struct import Struct from random import SystemRandom from operator import xor from itertools import starmap from werkzeug._compat import range_type, PY2, text_type, izip, to_bytes, \ string_types, to_native SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' DEFAULT_PBKDF2_ITERATIONS = 1000 _pack_int = Struct('>I').pack _builtin_safe_str_cmp = getattr(hmac, 'compare_digest', None) _sys_rng = SystemRandom() _os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, '/')) def _find_hashlib_algorithms(): algos = getattr(hashlib, 'algorithms', None) if algos is None: algos = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') rv = {} for algo in algos: func = getattr(hashlib, algo, None) if func is not None: rv[algo] = func return rv _hash_funcs = _find_hashlib_algorithms() def pbkdf2_hex(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None): """Like :func:`pbkdf2_bin`, but returns a hex-encoded string. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided, the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function, or a function from the hashlib module. Defaults to sha1. """ rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc) return to_native(codecs.encode(rv, 'hex_codec')) _has_native_pbkdf2 = hasattr(hashlib, 'pbkdf2_hmac') def pbkdf2_bin(data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None): """Returns a binary digest for the PBKDF2 hash algorithm of `data` with the given `salt`. It iterates `iterations` times and produces a key of `keylen` bytes. By default, SHA-1 is used as hash function; a different hashlib `hashfunc` can be provided. .. versionadded:: 0.9 :param data: the data to derive. :param salt: the salt for the derivation. :param iterations: the number of iterations. :param keylen: the length of the resulting key. If not provided the digest size will be used. :param hashfunc: the hash function to use. This can either be the string name of a known hash function or a function from the hashlib module. Defaults to sha1. """ if isinstance(hashfunc, string_types): hashfunc = _hash_funcs[hashfunc] elif not hashfunc: hashfunc = hashlib.sha1 data = to_bytes(data) salt = to_bytes(salt) # If we're on Python with pbkdf2_hmac we can try to use it for # compatible digests. if _has_native_pbkdf2: _test_hash = hashfunc() if hasattr(_test_hash, 'name') and \ _test_hash.name in _hash_funcs: return hashlib.pbkdf2_hmac(_test_hash.name, data, salt, iterations, keylen) mac = hmac.HMAC(data, None, hashfunc) if not keylen: keylen = mac.digest_size def _pseudorandom(x, mac=mac): h = mac.copy() h.update(x) return bytearray(h.digest()) buf = bytearray() for block in range_type(1, -(-keylen // mac.digest_size) + 1): rv = u = _pseudorandom(salt + _pack_int(block)) for i in range_type(iterations - 1): u = _pseudorandom(bytes(u)) rv = bytearray(starmap(xor, izip(rv, u))) buf.extend(rv) return bytes(buf[:keylen]) def safe_str_cmp(a, b): """This function compares strings in somewhat constant time. This requires that the length of at least one string is known in advance. Returns `True` if the two strings are equal, or `False` if they are not. .. versionadded:: 0.7 """ if isinstance(a, text_type): a = a.encode('utf-8') if isinstance(b, text_type): b = b.encode('utf-8') if _builtin_safe_str_cmp is not None: return _builtin_safe_str_cmp(a, b) if len(a) != len(b): return False rv = 0 if PY2: for x, y in izip(a, b): rv |= ord(x) ^ ord(y) else: for x, y in izip(a, b): rv |= x ^ y return rv == 0 def gen_salt(length): """Generate a random string of SALT_CHARS with specified ``length``.""" if length <= 0: raise ValueError('Salt length must be positive') return ''.join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length)) def _hash_internal(method, salt, password): """Internal password hash helper. Supports plaintext without salt, unsalted and salted passwords. In case salted passwords are used hmac is used. """ if method == 'plain': return password, method if isinstance(password, text_type): password = password.encode('utf-8') if method.startswith('pbkdf2:'): args = method[7:].split(':') if len(args) not in (1, 2): raise ValueError('Invalid number of arguments for PBKDF2') method = args.pop(0) iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS is_pbkdf2 = True actual_method = 'pbkdf2:%s:%d' % (method, iterations) else: is_pbkdf2 = False actual_method = method hash_func = _hash_funcs.get(method) if hash_func is None: raise TypeError('invalid method %r' % method) if is_pbkdf2: if not salt: raise ValueError('Salt is required for PBKDF2') rv = pbkdf2_hex(password, salt, iterations, hashfunc=hash_func) elif salt: if isinstance(salt, text_type): salt = salt.encode('utf-8') rv = hmac.HMAC(salt, password, hash_func).hexdigest() else: h = hash_func() h.update(password) rv = h.hexdigest() return rv, actual_method def generate_password_hash(password, method='pbkdf2:sha1', salt_length=8): """Hash a password with the given method and salt with with a string of the given length. The format of the string returned includes the method that was used so that :func:`check_password_hash` can check the hash. The format for the hashed string looks like this:: method$salt$hash This method can **not** generate unsalted passwords but it is possible to set the method to plain to enforce plaintext passwords. If a salt is used, hmac is used internally to salt the password. If PBKDF2 is wanted it can be enabled by setting the method to ``pbkdf2:method:iterations`` where iterations is optional:: pbkdf2:sha1:2000$salt$hash pbkdf2:sha1$salt$hash :param password: the password to hash. :param method: the hash method to use (one that hashlib supports). Can optionally be in the format ``pbkdf2:<method>[:iterations]`` to enable PBKDF2. :param salt_length: the length of the salt in letters. """ salt = method != 'plain' and gen_salt(salt_length) or '' h, actual_method = _hash_internal(method, salt, password) return '%s$%s$%s' % (actual_method, salt, h) def check_password_hash(pwhash, password): """check a password against a given salted and hashed password value. In order to support unsalted legacy passwords this method supports plain text passwords, md5 and sha1 hashes (both salted and unsalted). Returns `True` if the password matched, `False` otherwise. :param pwhash: a hashed string like returned by :func:`generate_password_hash`. :param password: the plaintext password to compare against the hash. """ if pwhash.count('$') < 2: return False method, salt, hashval = pwhash.split('$', 2) return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval) def safe_join(directory, filename): """Safely join `directory` and `filename`. If this cannot be done, this function returns ``None``. :param directory: the base directory. :param filename: the untrusted filename relative to that directory. """ filename = posixpath.normpath(filename) for sep in _os_alt_seps: if sep in filename: return None if os.path.isabs(filename) or filename.startswith('../'): return None return os.path.join(directory, filename)
mit
xkollar/spacewalk
spacecmd/src/lib/org.py
5
12499
# # Licensed under the GNU General Public License Version 3 # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright 2013 Aron Parsons <[email protected]> # # NOTE: the 'self' variable is an instance of SpacewalkShell # wildcard import # pylint: disable=W0401,W0614 # unused argument # pylint: disable=W0613 # invalid function name # pylint: disable=C0103 import shlex from getpass import getpass from operator import itemgetter from optparse import Option from spacecmd.utils import * _PREFIXES = ['Dr.', 'Mr.', 'Miss', 'Mrs.', 'Ms.'] def help_org_create(self): print 'org_create: Create an organization' print '''usage: org_create [options] options: -n ORG_NAME -u USERNAME -P PREFIX (%s) -f FIRST_NAME -l LAST_NAME -e EMAIL -p PASSWORD --pam enable PAM authentication''' % ', '.join(_PREFIXES) def do_org_create(self, args): options = [Option('-n', '--org-name', action='store'), Option('-u', '--username', action='store'), Option('-P', '--prefix', action='store'), Option('-f', '--first-name', action='store'), Option('-l', '--last-name', action='store'), Option('-e', '--email', action='store'), Option('-p', '--password', action='store'), Option('', '--pam', action='store_true')] (args, options) = parse_arguments(args, options) if is_interactive(options): options.org_name = prompt_user('Organization Name:', noblank=True) options.username = prompt_user('Username:', noblank=True) options.prefix = prompt_user('Prefix (%s):' % ', '.join(_PREFIXES), noblank=True) options.first_name = prompt_user('First Name:', noblank=True) options.last_name = prompt_user('Last Name:', noblank=True) options.email = prompt_user('Email:', noblank=True) options.pam = self.user_confirm('PAM Authentication [y/N]:', nospacer=True, integer=False, ignore_yes=True) options.password = '' while options.password == '': password1 = getpass('Password: ') password2 = getpass('Repeat Password: ') if password1 == password2: options.password = password1 elif password1 == '': logging.warning('Password must be at least 5 characters') else: logging.warning("Passwords don't match") else: if not options.org_name: logging.error('An organization name is required') return if not options.username: logging.error('A username is required') return if not options.first_name: logging.error('A first name is required') return if not options.last_name: logging.error('A last name is required') return if not options.email: logging.error('An email address is required') return if not options.password: logging.error('A password is required') return if not options.pam: options.pam = False if not options.prefix: options.prefix = 'Dr.' if options.prefix[-1] != '.' and options.prefix != 'Miss': options.prefix = options.prefix + '.' self.client.org.create(self.session, options.org_name, options.username, options.password, options.prefix.capitalize(), options.first_name, options.last_name, options.email, options.pam) #################### def help_org_delete(self): print 'org_delete: Delete an organization' print 'usage: org_delete NAME' def complete_org_delete(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_delete(self, args): (args, _options) = parse_arguments(args) if len(args) != 1: self.help_org_delete() return name = args[0] org_id = self.get_org_id(name) if self.org_confirm('Delete this organization [y/N]:'): self.client.org.delete(self.session, org_id) #################### def help_org_rename(self): print 'org_rename: Rename an organization' print 'usage: org_rename OLDNAME NEWNAME' def complete_org_rename(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_rename(self, args): (args, _options) = parse_arguments(args) if len(args) != 2: self.help_org_rename() return org_id = self.get_org_id(args[0]) new_name = args[1] self.client.org.updateName(self.session, org_id, new_name) #################### def help_org_addtrust(self): print 'org_addtrust: Add a trust between two organizations' print 'usage: org_addtrust YOUR_ORG ORG_TO_TRUST' def complete_org_addtrust(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_addtrust(self, args): (args, _options) = parse_arguments(args) if len(args) != 2: self.help_org_addtrust() return your_org_id = self.get_org_id(args[0]) org_to_trust_id = self.get_org_id(args[1]) self.client.org.trusts.addTrust(self.session, your_org_id, org_to_trust_id) #################### def help_org_removetrust(self): print 'org_removetrust: Remove a trust between two organizations' print 'usage: org_removetrust YOUR_ORG TRUSTED_ORG' def complete_org_removetrust(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_removetrust(self, args): (args, _options) = parse_arguments(args) if len(args) != 2: self.help_org_removetrust() return your_org_id = self.get_org_id(args[0]) trusted_org_id = self.get_org_id(args[1]) systems = self.client.org.trusts.listSystemsAffected(self.session, your_org_id, trusted_org_id) print 'Affected Systems' print '----------------' if len(systems): print '\n'.join(sorted([s.get('systemName') for s in systems])) else: print 'None' if not self.user_confirm('Remove this trust [y/N]:'): return self.client.org.trusts.removeTrust(self.session, your_org_id, trusted_org_id) #################### def help_org_trustdetails(self): print 'org_trustdetails: Show the details of an organizational trust' print 'usage: org_trustdetails TRUSTED_ORG' def complete_org_trustdetails(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_trustdetails(self, args): (args, _options) = parse_arguments(args) if not len(args): self.help_org_trustdetails() return trusted_org = args[0] org_id = self.get_org_id(trusted_org) details = self.client.org.trusts.getDetails(self.session, org_id) consumed = self.client.org.trusts.listChannelsConsumed(self.session, org_id) provided = self.client.org.trusts.listChannelsProvided(self.session, org_id) print 'Trusted Organization: %s' % trusted_org print 'Trusted Since: %s' % details.get('trusted_since') print 'Systems Migrated From: %i' % details.get('systems_migrated_from') print 'Systems Migrated To: %i' % details.get('systems_migrated_to') print print 'Channels Consumed' print '-----------------' if len(consumed): print '\n'.join(sorted([c.get('name') for c in consumed])) print print 'Channels Provided' print '-----------------' if len(provided): print '\n'.join(sorted([c.get('name') for c in provided])) #################### def help_org_list(self): print 'org_list: List all organizations' print 'usage: org_list' def do_org_list(self, args, doreturn=False): orgs = self.client.org.listOrgs(self.session) orgs = [o.get('name') for o in orgs] if doreturn: return orgs else: if len(orgs): print '\n'.join(sorted(orgs)) #################### def help_org_listtrusts(self): print "org_listtrusts: List an organization's trusts" print 'usage: org_listtrusts NAME' def complete_org_listtrusts(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_listtrusts(self, args): (args, _options) = parse_arguments(args) if not len(args): self.help_org_listtrusts() return org_id = self.get_org_id(args[0]) trusts = self.client.org.trusts.listTrusts(self.session, org_id) for trust in sorted(trusts, key=itemgetter('orgName')): if trust.get('trustEnabled'): print trust.get('orgName') #################### def help_org_listusers(self): print "org_listusers: List an organization's users" print 'usage: org_listusers NAME' def complete_org_listusers(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_listusers(self, args): (args, _options) = parse_arguments(args) if not len(args): self.help_org_listusers() return org_id = self.get_org_id(args[0]) users = self.client.org.listUsers(self.session, org_id) print '\n'.join(sorted([u.get('login') for u in users])) #################### def help_org_details(self): print 'org_details: Show the details of an organization' print 'usage: org_details NAME' def complete_org_details(self, text, line, beg, end): return tab_completer(self.do_org_list('', True), text) def do_org_details(self, args): (args, _options) = parse_arguments(args) if not len(args): self.help_org_details() return name = args[0] details = self.client.org.getDetails(self.session, name) print 'Name: %s' % details.get('name') print 'Active Users: %i' % details.get('active_users') print 'Systems: %i' % details.get('systems') # trusts is optional, which is annoying... if details.has_key('trusts'): print 'Trusts: %i' % details.get('trusts') else: print 'Trusts: %i' % 0 print 'System Groups: %i' % details.get('system_groups') print 'Activation Keys: %i' % details.get('activation_keys') print 'Kickstart Profiles: %i' % details.get('kickstart_profiles') print 'Configuration Channels: %i' % details.get('configuration_channels') #################### def help_org_setsystementitlements(self): print "org_setsystementitlements: Sets an organization's system", print "entitlements" print 'usage: org_setsystementitlements ORG ENTITLEMENT VALUE' def complete_org_setsystementitlements(self, text, line, beg, end): parts = shlex.split(line) if line[-1] == ' ': parts.append('') if len(parts) == 2: return tab_completer(self.do_org_list('', True), text) def do_org_setsystementitlements(self, args): (args, _options) = parse_arguments(args) if not len(args): self.help_org_setsystementitlements() return org_id = self.get_org_id(args[0]) label = args[1] try: value = int(args[2]) except ValueError: logging.error('Value must be an integer') return self.client.org.setSystemEntitlements(self.session, org_id, label, value)
gpl-2.0
ftomassetti/intellij-community
python/lib/Lib/site-packages/django/contrib/gis/db/models/sql/where.py
309
3938
from django.db.models.fields import Field, FieldDoesNotExist from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.sql.expressions import SQLEvaluator from django.db.models.sql.where import Constraint, WhereNode from django.contrib.gis.db.models.fields import GeometryField class GeoConstraint(Constraint): """ This subclass overrides `process` to better handle geographic SQL construction. """ def __init__(self, init_constraint): self.alias = init_constraint.alias self.col = init_constraint.col self.field = init_constraint.field def process(self, lookup_type, value, connection): if isinstance(value, SQLEvaluator): # Make sure the F Expression destination field exists, and # set an `srid` attribute with the same as that of the # destination. geo_fld = GeoWhereNode._check_geo_field(value.opts, value.expression.name) if not geo_fld: raise ValueError('No geographic field found in expression.') value.srid = geo_fld.srid db_type = self.field.db_type(connection=connection) params = self.field.get_db_prep_lookup(lookup_type, value, connection=connection) return (self.alias, self.col, db_type), params class GeoWhereNode(WhereNode): """ Used to represent the SQL where-clause for spatial databases -- these are tied to the GeoQuery class that created it. """ def add(self, data, connector): if isinstance(data, (list, tuple)): obj, lookup_type, value = data if ( isinstance(obj, Constraint) and isinstance(obj.field, GeometryField) ): data = (GeoConstraint(obj), lookup_type, value) super(GeoWhereNode, self).add(data, connector) def make_atom(self, child, qn, connection): lvalue, lookup_type, value_annot, params_or_value = child if isinstance(lvalue, GeoConstraint): data, params = lvalue.process(lookup_type, params_or_value, connection) spatial_sql = connection.ops.spatial_lookup_sql(data, lookup_type, params_or_value, lvalue.field, qn) return spatial_sql, params else: return super(GeoWhereNode, self).make_atom(child, qn, connection) @classmethod def _check_geo_field(cls, opts, lookup): """ Utility for checking the given lookup with the given model options. The lookup is a string either specifying the geographic field, e.g. 'point, 'the_geom', or a related lookup on a geographic field like 'address__point'. If a GeometryField exists according to the given lookup on the model options, it will be returned. Otherwise returns None. """ # This takes into account the situation where the lookup is a # lookup to a related geographic field, e.g., 'address__point'. field_list = lookup.split(LOOKUP_SEP) # Reversing so list operates like a queue of related lookups, # and popping the top lookup. field_list.reverse() fld_name = field_list.pop() try: geo_fld = opts.get_field(fld_name) # If the field list is still around, then it means that the # lookup was for a geometry field across a relationship -- # thus we keep on getting the related model options and the # model field associated with the next field in the list # until there's no more left. while len(field_list): opts = geo_fld.rel.to._meta geo_fld = opts.get_field(field_list.pop()) except (FieldDoesNotExist, AttributeError): return False # Finally, make sure we got a Geographic field and return. if isinstance(geo_fld, GeometryField): return geo_fld else: return False
apache-2.0
stonegithubs/odoo
addons/website/tests/test_crawl.py
251
3415
# -*- coding: utf-8 -*- import logging import urlparse import time import lxml.html import openerp import re _logger = logging.getLogger(__name__) class Crawler(openerp.tests.HttpCase): """ Test suite crawling an openerp CMS instance and checking that all internal links lead to a 200 response. If a username and a password are provided, authenticates the user before starting the crawl """ at_install = False post_install = True def crawl(self, url, seen=None, msg=''): if seen == None: seen = set() url_slug = re.sub(r"[/](([^/=?&]+-)?[0-9]+)([/]|$)", '/<slug>/', url) url_slug = re.sub(r"([^/=?&]+)=[^/=?&]+", '\g<1>=param', url_slug) if url_slug in seen: return seen else: seen.add(url_slug) _logger.info("%s %s", msg, url) r = self.url_open(url) code = r.getcode() self.assertIn( code, xrange(200, 300), "%s Fetching %s returned error response (%d)" % (msg, url, code)) if r.info().gettype() == 'text/html': doc = lxml.html.fromstring(r.read()) for link in doc.xpath('//a[@href]'): href = link.get('href') parts = urlparse.urlsplit(href) # href with any fragment removed href = urlparse.urlunsplit(( parts.scheme, parts.netloc, parts.path, parts.query, '' )) # FIXME: handle relative link (not parts.path.startswith /) if parts.netloc or \ not parts.path.startswith('/') or \ parts.path == '/web' or\ parts.path.startswith('/web/') or \ parts.path.startswith('/en_US/') or \ (parts.scheme and parts.scheme not in ('http', 'https')): continue self.crawl(href, seen, msg) return seen def test_10_crawl_public(self): t0 = time.time() t0_sql = self.registry.test_cr.sql_log_count seen = self.crawl('/', msg='Anonymous Coward') count = len(seen) duration = time.time() - t0 sql = self.registry.test_cr.sql_log_count - t0_sql _logger.log(25, "public crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request, ", count, duration, sql, duration/count, float(sql)/count) def test_20_crawl_demo(self): t0 = time.time() t0_sql = self.registry.test_cr.sql_log_count self.authenticate('demo', 'demo') seen = self.crawl('/', msg='demo') count = len(seen) duration = time.time() - t0 sql = self.registry.test_cr.sql_log_count - t0_sql _logger.log(25, "demo crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count) def test_30_crawl_admin(self): t0 = time.time() t0_sql = self.registry.test_cr.sql_log_count self.authenticate('admin', 'admin') seen = self.crawl('/', msg='admin') count = len(seen) duration = time.time() - t0 sql = self.registry.test_cr.sql_log_count - t0_sql _logger.log(25, "admin crawled %s urls in %.2fs %s queries, %.3fs %.2fq per request", count, duration, sql, duration/count, float(sql)/count)
agpl-3.0
diaevd/android_kernel_samsung_sm-t325
tools/perf/scripts/python/syscall-counts-by-pid.py
11180
1927
# system call counts, by pid # (c) 2010, Tom Zanussi <[email protected]> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide system call totals, broken down by syscall. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os, sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import syscall_name usage = "perf script -s syscall-counts-by-pid.py [comm]\n"; for_comm = None for_pid = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: try: for_pid = int(sys.argv[1]) except: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): print_syscall_totals() def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): if (for_comm and common_comm != for_comm) or \ (for_pid and common_pid != for_pid ): return try: syscalls[common_comm][common_pid][id] += 1 except TypeError: syscalls[common_comm][common_pid][id] = 1 def print_syscall_totals(): if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events by comm/pid:\n\n", print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id, val in sorted(syscalls[comm][pid].iteritems(), \ key = lambda(k, v): (v, k), reverse = True): print " %-38s %10d\n" % (syscall_name(id), val),
gpl-2.0
prospwro/odoo
addons/sale_mrp/__init__.py
445
1062
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import sale_mrp # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
yencarnacion/jaikuengine
.google_appengine/lib/django-1.4/tests/regressiontests/one_to_one_regress/tests.py
26
4753
from __future__ import absolute_import from django.test import TestCase from .models import Place, Restaurant, Bar, Favorites, Target, UndergroundBar class OneToOneRegressionTests(TestCase): def setUp(self): self.p1 = Place(name='Demon Dogs', address='944 W. Fullerton') self.p1.save() self.r1 = Restaurant(place=self.p1, serves_hot_dogs=True, serves_pizza=False) self.r1.save() self.b1 = Bar(place=self.p1, serves_cocktails=False) self.b1.save() def test_reverse_relationship_cache_cascade(self): """ Regression test for #9023: accessing the reverse relationship shouldn't result in a cascading delete(). """ bar = UndergroundBar.objects.create(place=self.p1, serves_cocktails=False) # The bug in #9023: if you access the one-to-one relation *before* # setting to None and deleting, the cascade happens anyway. self.p1.undergroundbar bar.place.name='foo' bar.place = None bar.save() self.p1.delete() self.assertEqual(Place.objects.all().count(), 0) self.assertEqual(UndergroundBar.objects.all().count(), 1) def test_create_models_m2m(self): """ Regression test for #1064 and #1506 Check that we create models via the m2m relation if the remote model has a OneToOneField. """ f = Favorites(name = 'Fred') f.save() f.restaurants = [self.r1] self.assertQuerysetEqual( f.restaurants.all(), ['<Restaurant: Demon Dogs the restaurant>'] ) def test_reverse_object_cache(self): """ Regression test for #7173 Check that the name of the cache for the reverse object is correct. """ self.assertEqual(self.p1.restaurant, self.r1) self.assertEqual(self.p1.bar, self.b1) def test_related_object_cache(self): """ Regression test for #6886 (the related-object cache) """ # Look up the objects again so that we get "fresh" objects p = Place.objects.get(name="Demon Dogs") r = p.restaurant # Accessing the related object again returns the exactly same object self.assertTrue(p.restaurant is r) # But if we kill the cache, we get a new object del p._restaurant_cache self.assertFalse(p.restaurant is r) # Reassigning the Restaurant object results in an immediate cache update # We can't use a new Restaurant because that'll violate one-to-one, but # with a new *instance* the is test below will fail if #6886 regresses. r2 = Restaurant.objects.get(pk=r.pk) p.restaurant = r2 self.assertTrue(p.restaurant is r2) # Assigning None succeeds if field is null=True. ug_bar = UndergroundBar.objects.create(place=p, serves_cocktails=False) ug_bar.place = None self.assertTrue(ug_bar.place is None) # Assigning None fails: Place.restaurant is null=False self.assertRaises(ValueError, setattr, p, 'restaurant', None) # You also can't assign an object of the wrong type here self.assertRaises(ValueError, setattr, p, 'restaurant', p) # Creation using keyword argument should cache the related object. p = Place.objects.get(name="Demon Dogs") r = Restaurant(place=p) self.assertTrue(r.place is p) # Creation using keyword argument and unsaved related instance (#8070). p = Place() r = Restaurant(place=p) self.assertTrue(r.place is p) # Creation using attname keyword argument and an id will cause the related # object to be fetched. p = Place.objects.get(name="Demon Dogs") r = Restaurant(place_id=p.id) self.assertFalse(r.place is p) self.assertEqual(r.place, p) def test_filter_one_to_one_relations(self): """ Regression test for #9968 filtering reverse one-to-one relations with primary_key=True was misbehaving. We test both (primary_key=True & False) cases here to prevent any reappearance of the problem. """ t = Target.objects.create() self.assertQuerysetEqual( Target.objects.filter(pointer=None), ['<Target: Target object>'] ) self.assertQuerysetEqual( Target.objects.exclude(pointer=None), [] ) self.assertQuerysetEqual( Target.objects.filter(pointer2=None), ['<Target: Target object>'] ) self.assertQuerysetEqual( Target.objects.exclude(pointer2=None), [] )
apache-2.0
sharhar/USB-Thing
UpdaterFiles/Lib/python-3.5.1.amd64/Lib/unittest/result.py
8
7441
"""Test result object""" import io import sys import traceback from . import util from functools import wraps __unittest = True def failfast(method): @wraps(method) def inner(self, *args, **kw): if getattr(self, 'failfast', False): self.stop() return method(self, *args, **kw) return inner STDOUT_LINE = '\nStdout:\n%s' STDERR_LINE = '\nStderr:\n%s' class TestResult(object): """Holder for test result information. Test results are automatically managed by the TestCase and TestSuite classes, and do not need to be explicitly manipulated by writers of tests. Each instance holds the total number of tests run, and collections of failures and errors that occurred among those test runs. The collections contain tuples of (testcase, exceptioninfo), where exceptioninfo is the formatted traceback of the error that occurred. """ _previousTestClass = None _testRunEntered = False _moduleSetUpFailed = False def __init__(self, stream=None, descriptions=None, verbosity=None): self.failfast = False self.failures = [] self.errors = [] self.testsRun = 0 self.skipped = [] self.expectedFailures = [] self.unexpectedSuccesses = [] self.shouldStop = False self.buffer = False self.tb_locals = False self._stdout_buffer = None self._stderr_buffer = None self._original_stdout = sys.stdout self._original_stderr = sys.stderr self._mirrorOutput = False def printErrors(self): "Called by TestRunner after test run" def startTest(self, test): "Called when the given test is about to be run" self.testsRun += 1 self._mirrorOutput = False self._setupStdout() def _setupStdout(self): if self.buffer: if self._stderr_buffer is None: self._stderr_buffer = io.StringIO() self._stdout_buffer = io.StringIO() sys.stdout = self._stdout_buffer sys.stderr = self._stderr_buffer def startTestRun(self): """Called once before any tests are executed. See startTest for a method called before each test. """ def stopTest(self, test): """Called when the given test has been run""" self._restoreStdout() self._mirrorOutput = False def _restoreStdout(self): if self.buffer: if self._mirrorOutput: output = sys.stdout.getvalue() error = sys.stderr.getvalue() if output: if not output.endswith('\n'): output += '\n' self._original_stdout.write(STDOUT_LINE % output) if error: if not error.endswith('\n'): error += '\n' self._original_stderr.write(STDERR_LINE % error) sys.stdout = self._original_stdout sys.stderr = self._original_stderr self._stdout_buffer.seek(0) self._stdout_buffer.truncate() self._stderr_buffer.seek(0) self._stderr_buffer.truncate() def stopTestRun(self): """Called once after all tests are executed. See stopTest for a method called after each test. """ @failfast def addError(self, test, err): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info(). """ self.errors.append((test, self._exc_info_to_string(err, test))) self._mirrorOutput = True @failfast def addFailure(self, test, err): """Called when an error has occurred. 'err' is a tuple of values as returned by sys.exc_info().""" self.failures.append((test, self._exc_info_to_string(err, test))) self._mirrorOutput = True def addSubTest(self, test, subtest, err): """Called at the end of a subtest. 'err' is None if the subtest ended successfully, otherwise it's a tuple of values as returned by sys.exc_info(). """ # By default, we don't do anything with successful subtests, but # more sophisticated test results might want to record them. if err is not None: if getattr(self, 'failfast', False): self.stop() if issubclass(err[0], test.failureException): errors = self.failures else: errors = self.errors errors.append((subtest, self._exc_info_to_string(err, test))) self._mirrorOutput = True def addSuccess(self, test): "Called when a test has completed successfully" pass def addSkip(self, test, reason): """Called when a test is skipped.""" self.skipped.append((test, reason)) def addExpectedFailure(self, test, err): """Called when an expected failure/error occured.""" self.expectedFailures.append( (test, self._exc_info_to_string(err, test))) @failfast def addUnexpectedSuccess(self, test): """Called when a test was expected to fail, but succeed.""" self.unexpectedSuccesses.append(test) def wasSuccessful(self): """Tells whether or not this result was a success.""" # The hasattr check is for test_result's OldResult test. That # way this method works on objects that lack the attribute. # (where would such result intances come from? old stored pickles?) return ((len(self.failures) == len(self.errors) == 0) and (not hasattr(self, 'unexpectedSuccesses') or len(self.unexpectedSuccesses) == 0)) def stop(self): """Indicates that the tests should be aborted.""" self.shouldStop = True def _exc_info_to_string(self, err, test): """Converts a sys.exc_info()-style tuple of values into a string.""" exctype, value, tb = err # Skip test runner traceback levels while tb and self._is_relevant_tb_level(tb): tb = tb.tb_next if exctype is test.failureException: # Skip assert*() traceback levels length = self._count_relevant_tb_levels(tb) else: length = None tb_e = traceback.TracebackException( exctype, value, tb, limit=length, capture_locals=self.tb_locals) msgLines = list(tb_e.format()) if self.buffer: output = sys.stdout.getvalue() error = sys.stderr.getvalue() if output: if not output.endswith('\n'): output += '\n' msgLines.append(STDOUT_LINE % output) if error: if not error.endswith('\n'): error += '\n' msgLines.append(STDERR_LINE % error) return ''.join(msgLines) def _is_relevant_tb_level(self, tb): return '__unittest' in tb.tb_frame.f_globals def _count_relevant_tb_levels(self, tb): length = 0 while tb and not self._is_relevant_tb_level(tb): length += 1 tb = tb.tb_next return length def __repr__(self): return ("<%s run=%i errors=%i failures=%i>" % (util.strclass(self.__class__), self.testsRun, len(self.errors), len(self.failures)))
apache-2.0
asm666/sympy
sympy/functions/elementary/piecewise.py
69
23292
from __future__ import print_function, division from sympy.core import Basic, S, Function, diff, Tuple from sympy.core.relational import Equality, Relational from sympy.functions.elementary.miscellaneous import Max, Min from sympy.logic.boolalg import (And, Boolean, distribute_and_over_or, Not, Or, true, false) from sympy.core.compatibility import default_sort_key, range class ExprCondPair(Tuple): """Represents an expression, condition pair.""" def __new__(cls, expr, cond): if cond == True: return Tuple.__new__(cls, expr, true) elif cond == False: return Tuple.__new__(cls, expr, false) return Tuple.__new__(cls, expr, cond) @property def expr(self): """ Returns the expression of this pair. """ return self.args[0] @property def cond(self): """ Returns the condition of this pair. """ return self.args[1] @property def free_symbols(self): """ Return the free symbols of this pair. """ # Overload Basic.free_symbols because self.args[1] may contain non-Basic result = self.expr.free_symbols if hasattr(self.cond, 'free_symbols'): result |= self.cond.free_symbols return result @property def is_commutative(self): return self.expr.is_commutative def __iter__(self): yield self.expr yield self.cond class Piecewise(Function): """ Represents a piecewise function. Usage: Piecewise( (expr,cond), (expr,cond), ... ) - Each argument is a 2-tuple defining an expression and condition - The conds are evaluated in turn returning the first that is True. If any of the evaluated conds are not determined explicitly False, e.g. x < 1, the function is returned in symbolic form. - If the function is evaluated at a place where all conditions are False, a ValueError exception will be raised. - Pairs where the cond is explicitly False, will be removed. Examples ======== >>> from sympy import Piecewise, log >>> from sympy.abc import x >>> f = x**2 >>> g = log(x) >>> p = Piecewise( (0, x<-1), (f, x<=1), (g, True)) >>> p.subs(x,1) 1 >>> p.subs(x,5) log(5) See Also ======== piecewise_fold """ nargs = None is_Piecewise = True def __new__(cls, *args, **options): # (Try to) sympify args first newargs = [] for ec in args: # ec could be a ExprCondPair or a tuple pair = ExprCondPair(*getattr(ec, 'args', ec)) cond = pair.cond if cond == false: continue if not isinstance(cond, (bool, Relational, Boolean)): raise TypeError( "Cond %s is of type %s, but must be a Relational," " Boolean, or a built-in bool." % (cond, type(cond))) newargs.append(pair) if cond == True: break if options.pop('evaluate', True): r = cls.eval(*newargs) else: r = None if r is None: return Basic.__new__(cls, *newargs, **options) else: return r @classmethod def eval(cls, *args): # Check for situations where we can evaluate the Piecewise object. # 1) Hit an unevaluable cond (e.g. x<1) -> keep object # 2) Hit a true condition -> return that expr # 3) Remove false conditions, if no conditions left -> raise ValueError all_conds_evaled = True # Do all conds eval to a bool? piecewise_again = False # Should we pass args to Piecewise again? non_false_ecpairs = [] or1 = Or(*[cond for (_, cond) in args if cond != true]) for expr, cond in args: # Check here if expr is a Piecewise and collapse if one of # the conds in expr matches cond. This allows the collapsing # of Piecewise((Piecewise(x,x<0),x<0)) to Piecewise((x,x<0)). # This is important when using piecewise_fold to simplify # multiple Piecewise instances having the same conds. # Eventually, this code should be able to collapse Piecewise's # having different intervals, but this will probably require # using the new assumptions. if isinstance(expr, Piecewise): or2 = Or(*[c for (_, c) in expr.args if c != true]) for e, c in expr.args: # Don't collapse if cond is "True" as this leads to # incorrect simplifications with nested Piecewises. if c == cond and (or1 == or2 or cond != true): expr = e piecewise_again = True cond_eval = cls.__eval_cond(cond) if cond_eval is None: all_conds_evaled = False elif cond_eval: if all_conds_evaled: return expr if len(non_false_ecpairs) != 0: if non_false_ecpairs[-1].cond == cond: continue elif non_false_ecpairs[-1].expr == expr: newcond = Or(cond, non_false_ecpairs[-1].cond) if isinstance(newcond, (And, Or)): newcond = distribute_and_over_or(newcond) non_false_ecpairs[-1] = ExprCondPair(expr, newcond) continue non_false_ecpairs.append(ExprCondPair(expr, cond)) if len(non_false_ecpairs) != len(args) or piecewise_again: return cls(*non_false_ecpairs) return None def doit(self, **hints): """ Evaluate this piecewise function. """ newargs = [] for e, c in self.args: if hints.get('deep', True): if isinstance(e, Basic): e = e.doit(**hints) if isinstance(c, Basic): c = c.doit(**hints) newargs.append((e, c)) return self.func(*newargs) def _eval_as_leading_term(self, x): for e, c in self.args: if c == True or c.subs(x, 0) == True: return e.as_leading_term(x) def _eval_adjoint(self): return self.func(*[(e.adjoint(), c) for e, c in self.args]) def _eval_conjugate(self): return self.func(*[(e.conjugate(), c) for e, c in self.args]) def _eval_derivative(self, x): return self.func(*[(diff(e, x), c) for e, c in self.args]) def _eval_evalf(self, prec): return self.func(*[(e.evalf(prec), c) for e, c in self.args]) def _eval_integral(self, x): from sympy.integrals import integrate return self.func(*[(integrate(e, x), c) for e, c in self.args]) def _eval_interval(self, sym, a, b): """Evaluates the function along the sym in a given interval ab""" # FIXME: Currently complex intervals are not supported. A possible # replacement algorithm, discussed in issue 5227, can be found in the # following papers; # http://portal.acm.org/citation.cfm?id=281649 # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.70.4127&rep=rep1&type=pdf if a is None or b is None: # In this case, it is just simple substitution return piecewise_fold( super(Piecewise, self)._eval_interval(sym, a, b)) mul = 1 if (a == b) == True: return S.Zero elif (a > b) == True: a, b, mul = b, a, -1 elif (a <= b) != True: newargs = [] for e, c in self.args: intervals = self._sort_expr_cond( sym, S.NegativeInfinity, S.Infinity, c) values = [] for lower, upper, expr in intervals: if (a < lower) == True: mid = lower rep = b val = e._eval_interval(sym, mid, b) val += self._eval_interval(sym, a, mid) elif (a > upper) == True: mid = upper rep = b val = e._eval_interval(sym, mid, b) val += self._eval_interval(sym, a, mid) elif (a >= lower) == True and (a <= upper) == True: rep = b val = e._eval_interval(sym, a, b) elif (b < lower) == True: mid = lower rep = a val = e._eval_interval(sym, a, mid) val += self._eval_interval(sym, mid, b) elif (b > upper) == True: mid = upper rep = a val = e._eval_interval(sym, a, mid) val += self._eval_interval(sym, mid, b) elif ((b >= lower) == True) and ((b <= upper) == True): rep = a val = e._eval_interval(sym, a, b) else: raise NotImplementedError( """The evaluation of a Piecewise interval when both the lower and the upper limit are symbolic is not yet implemented.""") values.append(val) if len(set(values)) == 1: try: c = c.subs(sym, rep) except AttributeError: pass e = values[0] newargs.append((e, c)) else: for i in range(len(values)): newargs.append((values[i], (c == True and i == len(values) - 1) or And(rep >= intervals[i][0], rep <= intervals[i][1]))) return self.func(*newargs) # Determine what intervals the expr,cond pairs affect. int_expr = self._sort_expr_cond(sym, a, b) # Finally run through the intervals and sum the evaluation. ret_fun = 0 for int_a, int_b, expr in int_expr: if isinstance(expr, Piecewise): # If we still have a Piecewise by now, _sort_expr_cond would # already have determined that its conditions are independent # of the integration variable, thus we just use substitution. ret_fun += piecewise_fold( super(Piecewise, expr)._eval_interval(sym, Max(a, int_a), Min(b, int_b))) else: ret_fun += expr._eval_interval(sym, Max(a, int_a), Min(b, int_b)) return mul * ret_fun def _sort_expr_cond(self, sym, a, b, targetcond=None): """Determine what intervals the expr, cond pairs affect. 1) If cond is True, then log it as default 1.1) Currently if cond can't be evaluated, throw NotImplementedError. 2) For each inequality, if previous cond defines part of the interval update the new conds interval. - eg x < 1, x < 3 -> [oo,1],[1,3] instead of [oo,1],[oo,3] 3) Sort the intervals to make it easier to find correct exprs Under normal use, we return the expr,cond pairs in increasing order along the real axis corresponding to the symbol sym. If targetcond is given, we return a list of (lowerbound, upperbound) pairs for this condition.""" from sympy.solvers.inequalities import _solve_inequality default = None int_expr = [] expr_cond = [] or_cond = False or_intervals = [] independent_expr_cond = [] for expr, cond in self.args: if isinstance(cond, Or): for cond2 in sorted(cond.args, key=default_sort_key): expr_cond.append((expr, cond2)) else: expr_cond.append((expr, cond)) if cond == True: break for expr, cond in expr_cond: if cond == True: independent_expr_cond.append((expr, cond)) default = self.func(*independent_expr_cond) break orig_cond = cond if sym not in cond.free_symbols: independent_expr_cond.append((expr, cond)) continue elif isinstance(cond, Equality): continue elif isinstance(cond, And): lower = S.NegativeInfinity upper = S.Infinity for cond2 in cond.args: if sym not in [cond2.lts, cond2.gts]: cond2 = _solve_inequality(cond2, sym) if cond2.lts == sym: upper = Min(cond2.gts, upper) elif cond2.gts == sym: lower = Max(cond2.lts, lower) else: raise NotImplementedError( "Unable to handle interval evaluation of expression.") else: if sym not in [cond.lts, cond.gts]: cond = _solve_inequality(cond, sym) lower, upper = cond.lts, cond.gts # part 1: initialize with givens if cond.lts == sym: # part 1a: expand the side ... lower = S.NegativeInfinity # e.g. x <= 0 ---> -oo <= 0 elif cond.gts == sym: # part 1a: ... that can be expanded upper = S.Infinity # e.g. x >= 0 ---> oo >= 0 else: raise NotImplementedError( "Unable to handle interval evaluation of expression.") # part 1b: Reduce (-)infinity to what was passed in. lower, upper = Max(a, lower), Min(b, upper) for n in range(len(int_expr)): # Part 2: remove any interval overlap. For any conflicts, the # iterval already there wins, and the incoming interval updates # its bounds accordingly. if self.__eval_cond(lower < int_expr[n][1]) and \ self.__eval_cond(lower >= int_expr[n][0]): lower = int_expr[n][1] elif len(int_expr[n][1].free_symbols) and \ self.__eval_cond(lower >= int_expr[n][0]): if self.__eval_cond(lower == int_expr[n][0]): lower = int_expr[n][1] else: int_expr[n][1] = Min(lower, int_expr[n][1]) elif len(int_expr[n][0].free_symbols) and \ self.__eval_cond(upper == int_expr[n][1]): upper = Min(upper, int_expr[n][0]) elif len(int_expr[n][1].free_symbols) and \ (lower >= int_expr[n][0]) != True and \ (int_expr[n][1] == Min(lower, upper)) != True: upper = Min(upper, int_expr[n][0]) elif self.__eval_cond(upper > int_expr[n][0]) and \ self.__eval_cond(upper <= int_expr[n][1]): upper = int_expr[n][0] elif len(int_expr[n][0].free_symbols) and \ self.__eval_cond(upper < int_expr[n][1]): int_expr[n][0] = Max(upper, int_expr[n][0]) if self.__eval_cond(lower >= upper) != True: # Is it still an interval? int_expr.append([lower, upper, expr]) if orig_cond == targetcond: return [(lower, upper, None)] elif isinstance(targetcond, Or) and cond in targetcond.args: or_cond = Or(or_cond, cond) or_intervals.append((lower, upper, None)) if or_cond == targetcond: or_intervals.sort(key=lambda x: x[0]) return or_intervals int_expr.sort(key=lambda x: x[1].sort_key( ) if x[1].is_number else S.NegativeInfinity.sort_key()) int_expr.sort(key=lambda x: x[0].sort_key( ) if x[0].is_number else S.Infinity.sort_key()) for n in range(len(int_expr)): if len(int_expr[n][0].free_symbols) or len(int_expr[n][1].free_symbols): if isinstance(int_expr[n][1], Min) or int_expr[n][1] == b: newval = Min(*int_expr[n][:-1]) if n > 0 and int_expr[n][0] == int_expr[n - 1][1]: int_expr[n - 1][1] = newval int_expr[n][0] = newval else: newval = Max(*int_expr[n][:-1]) if n < len(int_expr) - 1 and int_expr[n][1] == int_expr[n + 1][0]: int_expr[n + 1][0] = newval int_expr[n][1] = newval # Add holes to list of intervals if there is a default value, # otherwise raise a ValueError. holes = [] curr_low = a for int_a, int_b, expr in int_expr: if (curr_low < int_a) == True: holes.append([curr_low, Min(b, int_a), default]) elif (curr_low >= int_a) != True: holes.append([curr_low, Min(b, int_a), default]) curr_low = Min(b, int_b) if (curr_low < b) == True: holes.append([Min(b, curr_low), b, default]) elif (curr_low >= b) != True: holes.append([Min(b, curr_low), b, default]) if holes and default is not None: int_expr.extend(holes) if targetcond == True: return [(h[0], h[1], None) for h in holes] elif holes and default is None: raise ValueError("Called interval evaluation over piecewise " "function on undefined intervals %s" % ", ".join([str((h[0], h[1])) for h in holes])) return int_expr def _eval_nseries(self, x, n, logx): args = [(ec.expr._eval_nseries(x, n, logx), ec.cond) for ec in self.args] return self.func(*args) def _eval_power(self, s): return self.func(*[(e**s, c) for e, c in self.args]) def _eval_subs(self, old, new): """ Piecewise conditions may contain bool which are not of Basic type. """ args = list(self.args) for i, (e, c) in enumerate(args): if isinstance(c, bool): pass elif isinstance(c, Basic): c = c._subs(old, new) if c != False: e = e._subs(old, new) args[i] = e, c if c == True: return self.func(*args) return self.func(*args) def _eval_transpose(self): return self.func(*[(e.transpose(), c) for e, c in self.args]) def _eval_template_is_attr(self, is_attr, when_multiple=None): b = None for expr, _ in self.args: a = getattr(expr, is_attr) if a is None: return None if b is None: b = a elif b is not a: return when_multiple return b _eval_is_finite = lambda self: self._eval_template_is_attr( 'is_finite', when_multiple=False) _eval_is_complex = lambda self: self._eval_template_is_attr('is_complex') _eval_is_even = lambda self: self._eval_template_is_attr('is_even') _eval_is_imaginary = lambda self: self._eval_template_is_attr( 'is_imaginary') _eval_is_integer = lambda self: self._eval_template_is_attr('is_integer') _eval_is_irrational = lambda self: self._eval_template_is_attr( 'is_irrational') _eval_is_negative = lambda self: self._eval_template_is_attr('is_negative') _eval_is_nonnegative = lambda self: self._eval_template_is_attr( 'is_nonnegative') _eval_is_nonpositive = lambda self: self._eval_template_is_attr( 'is_nonpositive') _eval_is_nonzero = lambda self: self._eval_template_is_attr( 'is_nonzero', when_multiple=True) _eval_is_odd = lambda self: self._eval_template_is_attr('is_odd') _eval_is_polar = lambda self: self._eval_template_is_attr('is_polar') _eval_is_positive = lambda self: self._eval_template_is_attr('is_positive') _eval_is_real = lambda self: self._eval_template_is_attr('is_real') _eval_is_zero = lambda self: self._eval_template_is_attr( 'is_zero', when_multiple=False) @classmethod def __eval_cond(cls, cond): """Return the truth value of the condition.""" from sympy.solvers.solvers import checksol if cond == True: return True if isinstance(cond, Equality): if checksol(cond, {}, minimal=True): # the equality is trivially solved return True diff = cond.lhs - cond.rhs if diff.is_commutative: return diff.is_zero return None def as_expr_set_pairs(self): exp_sets = [] U = S.Reals for expr, cond in self.args: cond_int = U.intersect(cond.as_set()) U = U - cond_int exp_sets.append((expr, cond_int)) return exp_sets def piecewise_fold(expr): """ Takes an expression containing a piecewise function and returns the expression in piecewise form. Examples ======== >>> from sympy import Piecewise, piecewise_fold, sympify as S >>> from sympy.abc import x >>> p = Piecewise((x, x < 1), (1, S(1) <= x)) >>> piecewise_fold(x*p) Piecewise((x**2, x < 1), (x, 1 <= x)) See Also ======== Piecewise """ if not isinstance(expr, Basic) or not expr.has(Piecewise): return expr new_args = list(map(piecewise_fold, expr.args)) if expr.func is ExprCondPair: return ExprCondPair(*new_args) piecewise_args = [] for n, arg in enumerate(new_args): if isinstance(arg, Piecewise): piecewise_args.append(n) if len(piecewise_args) > 0: n = piecewise_args[0] new_args = [(expr.func(*(new_args[:n] + [e] + new_args[n + 1:])), c) for e, c in new_args[n].args] if isinstance(expr, Boolean): # If expr is Boolean, we must return some kind of PiecewiseBoolean. # This is constructed by means of Or, And and Not. # piecewise_fold(0 < Piecewise( (sin(x), x<0), (cos(x), True))) # can't return Piecewise((0 < sin(x), x < 0), (0 < cos(x), True)) # but instead Or(And(x < 0, 0 < sin(x)), And(0 < cos(x), Not(x<0))) other = True rtn = False for e, c in new_args: rtn = Or(rtn, And(other, c, e)) other = And(other, Not(c)) if len(piecewise_args) > 1: return piecewise_fold(rtn) return rtn if len(piecewise_args) > 1: return piecewise_fold(Piecewise(*new_args)) return Piecewise(*new_args) else: return expr.func(*new_args)
bsd-3-clause
JamesMura/sentry
src/sentry/models/dsymfile.py
1
18884
""" sentry.models.dsymfile ~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2016 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import os import shutil import hashlib import six import tempfile from requests.exceptions import RequestException from jsonfield import JSONField from itertools import chain from django.db import models, router, transaction, connection, IntegrityError from django.utils import timezone from django.utils.translation import ugettext_lazy as _ from symsynd.macho.arch import get_macho_uuids from sentry.db.models import FlexibleForeignKey, Model, BoundedBigIntegerField, \ sane_repr, BaseManager, BoundedPositiveIntegerField from sentry.models.file import File from sentry.utils.zip import safe_extract_zip from sentry.utils.db import is_sqlite from sentry.utils.native import parse_addr from sentry.constants import KNOWN_DSYM_TYPES from sentry.reprocessing import resolve_processing_issue class VersionDSymFile(Model): __core__ = False objects = BaseManager() dsym_file = FlexibleForeignKey('sentry.ProjectDSymFile', null=True) dsym_app = FlexibleForeignKey('sentry.DSymApp') version = models.CharField(max_length=32) build = models.CharField(max_length=32, null=True) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_versiondsymfile' unique_together = (('dsym_file', 'version', 'build'),) # TODO(dcramer): pull in enum library class DSymPlatform(object): GENERIC = 0 APPLE = 1 ANDROID = 2 DSYM_PLATFORMS = { 'generic': DSymPlatform.GENERIC, 'apple': DSymPlatform.APPLE, 'android': DSymPlatform.ANDROID, } def _auto_enrich_data(data, app_id, platform): # If we don't have an icon URL we can try to fetch one from iTunes if 'icon_url' not in data and platform == DSymPlatform.APPLE: from sentry.http import safe_urlopen try: rv = safe_urlopen('http://itunes.apple.com/lookup', params={ 'bundleId': app_id, }) except RequestException: pass else: if rv.ok: rv = rv.json() if rv.get('results'): data['icon_url'] = rv['results'][0]['artworkUrl512'] class DSymAppManager(BaseManager): def create_or_update_app(self, sync_id, app_id, project, data=None, platform=DSymPlatform.GENERIC): if data is None: data = {} _auto_enrich_data(data, app_id, platform) existing_app = DSymApp.objects.filter( app_id=app_id, project=project).first() if existing_app is not None: now = timezone.now() existing_app.update( sync_id=sync_id, data=data, last_synced=now, ) return existing_app return BaseManager.create(self, sync_id=sync_id, app_id=app_id, data=data, project=project, platform=platform ) class DSymApp(Model): __core__ = False objects = DSymAppManager() project = FlexibleForeignKey('sentry.Project') app_id = models.CharField(max_length=64) sync_id = models.CharField(max_length=64, null=True) data = JSONField() platform = BoundedPositiveIntegerField(default=0, choices=( (DSymPlatform.GENERIC, _('Generic')), (DSymPlatform.APPLE, _('Apple')), (DSymPlatform.ANDROID, _('Android')), )) last_synced = models.DateTimeField(default=timezone.now) date_added = models.DateTimeField(default=timezone.now) class Meta: app_label = 'sentry' db_table = 'sentry_dsymapp' unique_together = (('project', 'platform', 'app_id'),) class DSymSDKManager(BaseManager): def enumerate_sdks(self, sdk=None, version=None): """Return a grouped list of SDKs.""" filter = '' args = [] if version is not None: for col, val in zip(['major', 'minor', 'patchlevel'], version.split('.')): if not val.isdigit(): return [] filter += ' and k.version_%s = %d' % ( col, int(val) ) if sdk is not None: filter += ' and k.sdk_name = %s' args.append(sdk) cur = connection.cursor() cur.execute(''' select distinct k.*, count(*) as bundle_count, o.cpu_name from sentry_dsymsdk k, sentry_dsymbundle b, sentry_dsymobject o where b.sdk_id = k.id and b.object_id = o.id %s group by k.id, k.sdk_name, o.cpu_name ''' % filter, args) rv = [] for row in cur.fetchall(): row = dict(zip([x[0] for x in cur.description], row)) ver = '%s.%s.%s' % ( row['version_major'], row['version_minor'], row['version_patchlevel'] ) rv.append({ 'sdk_name': row['sdk_name'], 'version': ver, 'build': row['version_build'], 'bundle_count': row['bundle_count'], 'cpu_name': row['cpu_name'], }) return sorted(rv, key=lambda x: (x['sdk_name'], x['version'], x['build'], x['cpu_name'])) class DSymSDK(Model): __core__ = False dsym_type = models.CharField(max_length=20, db_index=True) sdk_name = models.CharField(max_length=20) version_major = models.IntegerField() version_minor = models.IntegerField() version_patchlevel = models.IntegerField() version_build = models.CharField(max_length=40) objects = DSymSDKManager() class Meta: app_label = 'sentry' db_table = 'sentry_dsymsdk' index_together = [ ('version_major', 'version_minor', 'version_patchlevel', 'version_build'), ] class DSymObject(Model): __core__ = False cpu_name = models.CharField(max_length=40) object_path = models.TextField(db_index=True) uuid = models.CharField(max_length=36, db_index=True) vmaddr = BoundedBigIntegerField(null=True) vmsize = BoundedBigIntegerField(null=True) class Meta: app_label = 'sentry' db_table = 'sentry_dsymobject' class DSymBundle(Model): __core__ = False sdk = FlexibleForeignKey('sentry.DSymSDK') object = FlexibleForeignKey('sentry.DSymObject') class Meta: app_label = 'sentry' db_table = 'sentry_dsymbundle' class DSymSymbolManager(BaseManager): def bulk_insert(self, items): db = router.db_for_write(DSymSymbol) items = list(items) if not items: return # On SQLite we don't do this. Two reasons: one, it does not # seem significantly faster and you're an idiot if you import # huge amounts of system symbols into sqlite anyways. secondly # because of the low parameter limit if not is_sqlite(): try: with transaction.atomic(using=db): cur = connection.cursor() cur.execute(''' insert into sentry_dsymsymbol (object_id, address, symbol) values %s ''' % ', '.join(['(%s, %s, %s)'] * len(items)), list(chain(*items))) cur.close() return except IntegrityError: pass cur = connection.cursor() for item in items: cur.execute(''' insert into sentry_dsymsymbol (object_id, address, symbol) select %(object_id)s, %(address)s, %(symbol)s where not exists ( select 1 from sentry_dsymsymbol where object_id = %(object_id)s and address = %(address)s); ''', { 'object_id': item[0], 'address': item[1], 'symbol': item[2], }) cur.close() def lookup_symbol(self, instruction_addr, image_addr, uuid, cpu_name=None, object_path=None, sdk_info=None, image_vmaddr=None): """Finds a system symbol.""" # If we use the "none" dsym type we never return a symbol here. if sdk_info is not None and sdk_info['dsym_type'] == 'none': return instruction_addr = parse_addr(instruction_addr) image_addr = parse_addr(image_addr) addr_abs = None if image_vmaddr is not None: image_vmaddr = parse_addr(image_vmaddr) addr_abs = image_vmaddr + instruction_addr - image_addr addr_rel = instruction_addr - image_addr uuid = six.text_type(uuid).lower() cur = connection.cursor() try: # First try: exact match on uuid (addr_rel) cur.execute(''' select s.symbol from sentry_dsymsymbol s, sentry_dsymobject o where o.uuid = %s and s.object_id = o.id and s.address <= o.vmaddr + %s and s.address >= o.vmaddr order by address desc limit 1; ''', [uuid, addr_rel]) rv = cur.fetchone() if rv: return rv[0] # Second try: exact match on uuid (addr_abs) if addr_abs is not None: cur.execute(''' select s.symbol from sentry_dsymsymbol s, sentry_dsymobject o where o.uuid = %s and s.object_id = o.id and s.address <= %s and s.address >= %s order by address desc limit 1; ''', [uuid, addr_abs, image_vmaddr]) rv = cur.fetchone() if rv: return rv[0] # Third try: exact match on path and arch (addr_rel) if sdk_info is None or \ cpu_name is None or \ object_path is None: return cur.execute(''' select s.symbol from sentry_dsymsymbol s, sentry_dsymobject o, sentry_dsymsdk k, sentry_dsymbundle b where b.sdk_id = k.id and b.object_id = o.id and s.object_id = o.id and k.sdk_name = %s and k.dsym_type = %s and k.version_major = %s and k.version_minor = %s and k.version_patchlevel = %s and o.cpu_name = %s and o.object_path = %s and s.address <= o.vmaddr + %s and s.address >= o.vmaddr order by address desc limit 1; ''', [sdk_info['sdk_name'], sdk_info['dsym_type'], sdk_info['version_major'], sdk_info['version_minor'], sdk_info['version_patchlevel'], cpu_name, object_path, addr_rel]) rv = cur.fetchone() if rv: return rv[0] # Fourth try: exact match on path and arch (addr_abs) if addr_abs is not None: cur.execute(''' select s.symbol from sentry_dsymsymbol s, sentry_dsymobject o, sentry_dsymsdk k, sentry_dsymbundle b where b.sdk_id = k.id and b.object_id = o.id and s.object_id = o.id and k.sdk_name = %s and k.dsym_type = %s and k.version_major = %s and k.version_minor = %s and k.version_patchlevel = %s and o.cpu_name = %s and o.object_path = %s and s.address <= %s and s.address >= %s order by address desc limit 1; ''', [sdk_info['sdk_name'], sdk_info['dsym_type'], sdk_info['version_major'], sdk_info['version_minor'], sdk_info['version_patchlevel'], cpu_name, object_path, addr_abs, image_vmaddr]) rv = cur.fetchone() if rv: return rv[0] finally: cur.close() class DSymSymbol(Model): __core__ = False object = FlexibleForeignKey('sentry.DSymObject') address = BoundedBigIntegerField(db_index=True) symbol = models.TextField() objects = DSymSymbolManager() class Meta: app_label = 'sentry' db_table = 'sentry_dsymsymbol' unique_together = [ ('object', 'address'), ] class CommonDSymFile(Model): """ A single dsym file that is associated with a project. """ __core__ = False file = FlexibleForeignKey('sentry.File') object_name = models.TextField() cpu_name = models.CharField(max_length=40) __repr__ = sane_repr('object_name', 'cpu_name', 'uuid') class Meta: abstract = True app_label = 'sentry' @property def dsym_type(self): ct = self.file.headers.get('Content-Type').lower() return KNOWN_DSYM_TYPES.get(ct, 'unknown') class ProjectDSymFileManager(BaseManager): def find_missing(self, checksums, project): if not checksums: return[] checksums = [x.lower() for x in checksums] missing = set(checksums) found = ProjectDSymFile.objects.filter( file__checksum__in=checksums, project=project ).values('file__checksum') for values in found: missing.discard(values.values()[0]) return sorted(missing) def find_by_checksums(self, checksums, project): if not checksums: return [] checksums = [x.lower() for x in checksums] return ProjectDSymFile.objects.filter( file__checksum__in=checksums, project=project ) class ProjectDSymFile(CommonDSymFile): project = FlexibleForeignKey('sentry.Project', null=True) uuid = models.CharField(max_length=36) is_global = False objects = ProjectDSymFileManager() class Meta(CommonDSymFile.Meta): unique_together = (('project', 'uuid'),) db_table = 'sentry_projectdsymfile' class GlobalDSymFile(CommonDSymFile): uuid = models.CharField(max_length=36, unique=True) is_global = True class Meta(CommonDSymFile.Meta): db_table = 'sentry_globaldsymfile' def _create_macho_dsym_from_uuid(project, cpu_name, uuid, fileobj, object_name): """This creates a mach dsym file from the given uuid and open file object to a dsym file. This will not verify the uuid. Use `create_files_from_macho_zip` for doing everything. """ extra = {} if project is None: cls = GlobalDSymFile file_type = 'global.dsym' else: cls = ProjectDSymFile extra['project'] = project file_type = 'project.dsym' h = hashlib.sha1() while 1: chunk = fileobj.read(16384) if not chunk: break h.update(chunk) checksum = h.hexdigest() fileobj.seek(0, 0) try: rv = cls.objects.get(uuid=uuid, **extra) if rv.file.checksum == checksum: return rv except cls.DoesNotExist: pass else: # The checksum mismatches. In this case we delete the old object # and perform a re-upload. rv.delete() file = File.objects.create( name=uuid, type=file_type, headers={ 'Content-Type': 'application/x-mach-binary' }, ) file.putfile(fileobj) try: with transaction.atomic(): rv = cls.objects.create( file=file, uuid=uuid, cpu_name=cpu_name, object_name=object_name, **extra ) except IntegrityError: file.delete() rv = cls.objects.get(uuid=uuid, **extra) resolve_processing_issue( project=project, scope='native', object='dsym:%s' % uuid, ) return rv def create_files_from_macho_zip(fileobj, project=None): """Creates all missing dsym files from the given zip file. This returns a list of all files created. """ scratchpad = tempfile.mkdtemp() try: safe_extract_zip(fileobj, scratchpad) to_create = [] for dirpath, dirnames, filenames in os.walk(scratchpad): for fn in filenames: fn = os.path.join(dirpath, fn) try: uuids = get_macho_uuids(fn) except (IOError, ValueError): # Whatever was contained there, was probably not a # macho file. continue for cpu, uuid in uuids: to_create.append((cpu, uuid, fn)) rv = [] for cpu, uuid, filename in to_create: with open(filename, 'rb') as f: rv.append((_create_macho_dsym_from_uuid( project, cpu, uuid, f, os.path.basename(filename)))) return rv finally: shutil.rmtree(scratchpad) def find_dsym_file(project, image_uuid): """Finds a dsym file for the given uuid. Looks both within the project as well the global store. """ image_uuid = image_uuid.lower() try: return ProjectDSymFile.objects.filter( uuid=image_uuid, project=project ).select_related('file').get() except ProjectDSymFile.DoesNotExist: pass try: return GlobalDSymFile.objects.filter( uuid=image_uuid ).select_related('file').get() except GlobalDSymFile.DoesNotExist: return None
bsd-3-clause
lxsmnv/spark
examples/src/main/python/ml/decision_tree_classification_example.py
123
3003
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Decision Tree Classification Example. """ from __future__ import print_function # $example on$ from pyspark.ml import Pipeline from pyspark.ml.classification import DecisionTreeClassifier from pyspark.ml.feature import StringIndexer, VectorIndexer from pyspark.ml.evaluation import MulticlassClassificationEvaluator # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("DecisionTreeClassificationExample")\ .getOrCreate() # $example on$ # Load the data stored in LIBSVM format as a DataFrame. data = spark.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt") # Index labels, adding metadata to the label column. # Fit on whole dataset to include all labels in index. labelIndexer = StringIndexer(inputCol="label", outputCol="indexedLabel").fit(data) # Automatically identify categorical features, and index them. # We specify maxCategories so features with > 4 distinct values are treated as continuous. featureIndexer =\ VectorIndexer(inputCol="features", outputCol="indexedFeatures", maxCategories=4).fit(data) # Split the data into training and test sets (30% held out for testing) (trainingData, testData) = data.randomSplit([0.7, 0.3]) # Train a DecisionTree model. dt = DecisionTreeClassifier(labelCol="indexedLabel", featuresCol="indexedFeatures") # Chain indexers and tree in a Pipeline pipeline = Pipeline(stages=[labelIndexer, featureIndexer, dt]) # Train model. This also runs the indexers. model = pipeline.fit(trainingData) # Make predictions. predictions = model.transform(testData) # Select example rows to display. predictions.select("prediction", "indexedLabel", "features").show(5) # Select (prediction, true label) and compute test error evaluator = MulticlassClassificationEvaluator( labelCol="indexedLabel", predictionCol="prediction", metricName="accuracy") accuracy = evaluator.evaluate(predictions) print("Test Error = %g " % (1.0 - accuracy)) treeModel = model.stages[2] # summary only print(treeModel) # $example off$ spark.stop()
apache-2.0
rosswhitfield/mantid
Testing/SystemTests/tests/framework/ISIS/SANS/WORKFLOWS/SANSReductionCoreTest.py
3
13544
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2020 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + # pylint: disable=too-many-public-methods, invalid-name, too-many-arguments import unittest import os import systemtesting import mantid from ISIS.SANS.isis_sans_system_test import ISISSansSystemTest from mantid.api import AlgorithmManager from sans.state.Serializer import Serializer from sans.state.StateObjects.StateData import get_data_builder from sans.common.enums import (DetectorType, DataType, SANSFacility, SANSInstrument) from sans.common.constants import EMPTY_NAME from sans.common.general_functions import create_unmanaged_algorithm from sans.common.file_information import SANSFileInformationFactory # ----------------------------------------------- # Tests for the SANSReductionCore algorithm # ----------------------------------------------- from sans.user_file.txt_parsers.UserFileReaderAdapter import UserFileReaderAdapter @ISISSansSystemTest(SANSInstrument.SANS2D) class SANSReductionCoreTest(unittest.TestCase): def _load_workspace(self, state): load_alg = AlgorithmManager.createUnmanaged("SANSLoad") load_alg.setChild(True) load_alg.initialize() state_dict = Serializer.to_json(state) load_alg.setProperty("SANSState", state_dict) load_alg.setProperty("PublishToCache", False) load_alg.setProperty("UseCached", False) load_alg.setProperty("SampleScatterWorkspace", EMPTY_NAME) load_alg.setProperty("SampleScatterMonitorWorkspace", EMPTY_NAME) if state.data.sample_transmission: load_alg.setProperty("SampleTransmissionWorkspace", EMPTY_NAME) if state.data.sample_direct: load_alg.setProperty("SampleDirectWorkspace", EMPTY_NAME) # Act load_alg.execute() self.assertTrue(load_alg.isExecuted()) sample_scatter = load_alg.getProperty("SampleScatterWorkspace").value sample_scatter_monitor_workspace = load_alg.getProperty("SampleScatterMonitorWorkspace").value if state.data.sample_transmission: transmission_workspace = load_alg.getProperty("SampleTransmissionWorkspace").value else: transmission_workspace = None if state.data.sample_direct: direct_workspace = load_alg.getProperty("SampleDirectWorkspace").value else: direct_workspace = None return sample_scatter, sample_scatter_monitor_workspace, transmission_workspace, direct_workspace def _run_reduction_core(self, state, workspace, monitor, transmission=None, direct=None, detector_type=DetectorType.LAB, component=DataType.SAMPLE): reduction_core_alg = AlgorithmManager.createUnmanaged("SANSReductionCore") reduction_core_alg.setChild(True) reduction_core_alg.initialize() state_dict = Serializer.to_json(state) reduction_core_alg.setProperty("SANSState", state_dict) reduction_core_alg.setProperty("ScatterWorkspace", workspace) reduction_core_alg.setProperty("ScatterMonitorWorkspace", monitor) if transmission: reduction_core_alg.setProperty("TransmissionWorkspace", transmission) if direct: reduction_core_alg.setProperty("DirectWorkspace", direct) reduction_core_alg.setProperty("Component", detector_type.value) reduction_core_alg.setProperty("DataType", component.value) reduction_core_alg.setProperty("OutputWorkspaces", EMPTY_NAME) reduction_core_alg.setProperty("CalculatedTransmissionWorkspaces", EMPTY_NAME) reduction_core_alg.setProperty("UnfittedTransmissionWorkspaces", EMPTY_NAME) # Act reduction_core_alg.execute() self.assertTrue(reduction_core_alg.isExecuted()) return reduction_core_alg def _compare_workspace(self, workspace, reference_file_name): # Load the reference file load_name = "LoadNexusProcessed" load_options = {"Filename": reference_file_name, "OutputWorkspace": EMPTY_NAME} load_alg = create_unmanaged_algorithm(load_name, **load_options) load_alg.execute() reference_workspace = load_alg.getProperty("OutputWorkspace").value # Save the workspace out and reload it again. This makes equalizes it with the reference workspace f_name = os.path.join(mantid.config.getString('defaultsave.directory'), 'SANS_temp_single_core_reduction_testout.nxs') save_name = "SaveNexus" save_options = {"Filename": f_name, "InputWorkspace": workspace} save_alg = create_unmanaged_algorithm(save_name, **save_options) save_alg.execute() load_alg.setProperty("Filename", f_name) load_alg.setProperty("OutputWorkspace", EMPTY_NAME) load_alg.execute() ws = load_alg.getProperty("OutputWorkspace").value # Compare reference file with the output_workspace # We need to disable the instrument comparison, it takes way too long # We need to disable the sample -- since the sample has been modified (more logs are being written) # operation how many entries can be found in the sample logs compare_name = "CompareWorkspaces" compare_options = {"Workspace1": ws, "Workspace2": reference_workspace, "Tolerance": 1e-6, "CheckInstrument": False, "CheckSample": False, "ToleranceRelErr": True, "CheckAllData": True, "CheckMasking": True, "CheckType": True, "CheckAxes": True, "CheckSpectraMap": True} compare_alg = create_unmanaged_algorithm(compare_name, **compare_options) compare_alg.setChild(False) compare_alg.execute() result = compare_alg.getProperty("Result").value self.assertTrue(result) # Remove file if os.path.exists(f_name): os.remove(f_name) def test_that_reduction_core_evaluates_LAB(self): # Arrange # Build the data information file_information_factory = SANSFileInformationFactory() file_information = file_information_factory.create_sans_file_information("SANS2D00034484") data_builder = get_data_builder(SANSFacility.ISIS, file_information) data_builder.set_sample_scatter("SANS2D00034484") data_builder.set_sample_transmission("SANS2D00034505") data_builder.set_sample_direct("SANS2D00034461") data_state = data_builder.build() # Get the rest of the state from the user file user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt" user_file_director = UserFileReaderAdapter(file_information=file_information, user_file_name=user_file) state = user_file_director.get_all_states(file_information=file_information) state.adjustment.calibration = "TUBE_SANS2D_BOTH_31681_25Sept15.nxs" state.compatibility.use_compatibility_mode = True state.data = data_state # Load the sample workspaces workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(state) # Act reduction_core_alg = self._run_reduction_core(state, workspace, workspace_monitor, transmission_workspace, direct_workspace) output_workspace = reduction_core_alg.getProperty("OutputWorkspaces").value calculated_transmission = reduction_core_alg.getProperty("CalculatedTransmissionWorkspaces").value unfitted_transmission = reduction_core_alg.getProperty("UnfittedTransmissionWorkspaces").value # Evaluate it up to a defined point reference_file_name = "SANS2D_ws_D20_reference.nxs" self._compare_workspace(output_workspace, reference_file_name) calculated_transmission_reference_file = "SANS2D_ws_D20_calculated_transmission_reference.nxs" unfitted_transmission_reference_file = "SANS2D_ws_D20_unfitted_transmission_reference.nxs" self._compare_workspace(calculated_transmission, calculated_transmission_reference_file) self._compare_workspace(unfitted_transmission, unfitted_transmission_reference_file) def test_similarity_between_results_in_compatibility_mode_and_non_compatibility_mode(self): # Arrange # Build the data information file_information_factory = SANSFileInformationFactory() file_information = file_information_factory.create_sans_file_information("SANS2D00034484") data_builder = get_data_builder(SANSFacility.ISIS, file_information) data_builder.set_sample_scatter("SANS2D00034484") data_builder.set_sample_transmission("SANS2D00034505") data_builder.set_sample_direct("SANS2D00034461") data_state = data_builder.build() ################################################################################################################ # Compatibility mode ################################################################################################################ # Get the rest of the state from the user file user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt" user_file_director = UserFileReaderAdapter(file_information=file_information, user_file_name=user_file) state = user_file_director.get_all_states(file_information=file_information) state.adjustment.calibration = "TUBE_SANS2D_BOTH_31681_25Sept15.nxs" state.compatibility.use_compatibility_mode = True state.data = data_state # Load the sample workspaces workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(state) # Act reduction_core_alg = self._run_reduction_core(state, workspace, workspace_monitor, transmission_workspace, direct_workspace) compatibility_output_workspace = reduction_core_alg.getProperty("OutputWorkspaces").value ################################################################################################################ # Non-compatibility mode ################################################################################################################ user_file = "USER_SANS2D_154E_2p4_4m_M3_Xpress_8mm_SampleChanger.txt" user_file_director = UserFileReaderAdapter(file_information=file_information, user_file_name=user_file) state = user_file_director.get_all_states(file_information=file_information) state.compatibility.use_compatibility_mode = False state.data = data_state # Load the sample workspaces workspace, workspace_monitor, transmission_workspace, direct_workspace = self._load_workspace(state) # Act reduction_core_alg = self._run_reduction_core(state, workspace, workspace_monitor, transmission_workspace, direct_workspace) non_compatibility_output_workspace = reduction_core_alg.getProperty("OutputWorkspaces").value ################################################################################################################ # Compare workspaces ################################################################################################################ compare_name = "CompareWorkspaces" compare_options = {"Workspace1": non_compatibility_output_workspace.getItem(0), "Workspace2": compatibility_output_workspace.getItem(0), "Tolerance": 1, "CheckInstrument": False, "CheckSample": False, "ToleranceRelErr": True, "CheckAllData": True, "CheckMasking": True, "CheckType": True, "CheckAxes": True, "CheckSpectraMap": True} compare_alg = create_unmanaged_algorithm(compare_name, **compare_options) compare_alg.setChild(False) compare_alg.execute() result = compare_alg.getProperty("Result").value self.assertTrue(result) class SANSReductionCoreRunnerTest(systemtesting.MantidSystemTest): def __init__(self): systemtesting.MantidSystemTest.__init__(self) self._success = False def runTest(self): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(SANSReductionCoreTest, 'test')) runner = unittest.TextTestRunner() res = runner.run(suite) if res.wasSuccessful(): self._success = True def requiredMemoryMB(self): return 2000 def validate(self): return self._success if __name__ == '__main__': unittest.main()
gpl-3.0
quattor/aquilon
lib/aquilon/worker/commands/cat_service.py
2
1792
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2008,2009,2010,2011,2012,2013,2015,2016 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Contains the logic for `aq cat --service`.""" from aquilon.aqdb.model import Service from aquilon.worker.broker import BrokerCommand from aquilon.worker.templates.service import (PlenaryServiceToplevel, PlenaryServiceClientDefault, PlenaryServiceServerDefault) class CommandCatService(BrokerCommand): required_parameters = ["service"] # We do not lock the plenary while reading it _is_lock_free = True def render(self, session, logger, service, server, default, generate, **_): dbservice = Service.get_unique(session, service, compel=True) if default: if server: cls = PlenaryServiceServerDefault else: cls = PlenaryServiceClientDefault else: cls = PlenaryServiceToplevel plenary_info = cls.get_plenary(dbservice, logger=logger) if generate: return plenary_info._generate_content() else: return plenary_info.read()
apache-2.0
loopCM/chromium
chrome/test/telemetry/chromeos/login_unittest.py
30
4150
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import json import os import unittest from telemetry.core import browser_finder from telemetry.core import exceptions from telemetry.core import extension_to_load from telemetry.core import util from telemetry.core.chrome import cros_interface from telemetry.core.chrome import cros_util from telemetry.test import options_for_unittests class CrOSAutoTest(unittest.TestCase): def setUp(self): options = options_for_unittests.GetCopy() self._cri = cros_interface.CrOSInterface(options.cros_remote, options.cros_ssh_identity) self._is_guest = options.browser_type == 'cros-chrome-guest' self._email = '' if self._is_guest else '[email protected]' def _IsCryptohomeMounted(self): """Returns True if cryptohome is mounted""" cryptohomeJSON, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome', '--action=status']) cryptohomeStatus = json.loads(cryptohomeJSON) return (cryptohomeStatus['mounts'] and cryptohomeStatus['mounts'][0]['mounted']) def _CreateBrowser(self, with_autotest_ext): """Finds and creates a browser for tests. if with_autotest_ext is True, also loads the autotest extension""" options = options_for_unittests.GetCopy() if with_autotest_ext: extension_path = os.path.join(os.path.dirname(__file__), 'autotest_ext') self._load_extension = extension_to_load.ExtensionToLoad(extension_path, True) options.extensions_to_load = [self._load_extension] browser_to_create = browser_finder.FindBrowser(options) self.assertTrue(browser_to_create) return browser_to_create.Create() def _GetAutotestExtension(self, browser): """Returns the autotest extension instance""" extension = browser.extensions[self._load_extension] self.assertTrue(extension) return extension def testCryptohomeMounted(self): """Verifies cryptohome mount status for regular and guest user and when logged out""" with self._CreateBrowser(False) as b: self.assertEquals(1, len(b.tabs)) self.assertTrue(b.tabs[0].url) self.assertTrue(self._IsCryptohomeMounted()) chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user') self.assertTrue(chronos_fs) if self._is_guest: self.assertEquals(chronos_fs, 'guestfs') else: home, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome-path', 'user', self._email]) self.assertEquals(self._cri.FilesystemMountedAt(home.rstrip()), chronos_fs) self.assertFalse(self._IsCryptohomeMounted()) self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'), '/dev/mapper/encstateful') def testLoginStatus(self): """Tests autotestPrivate.loginStatus""" with self._CreateBrowser(True) as b: extension = self._GetAutotestExtension(b) extension.ExecuteJavaScript(''' chrome.autotestPrivate.loginStatus(function(s) { window.__autotest_result = s; }); ''') login_status = extension.EvaluateJavaScript('window.__autotest_result') self.assertEquals(type(login_status), dict) self.assertEquals(not self._is_guest, login_status['isRegularUser']) self.assertEquals(self._is_guest, login_status['isGuest']) self.assertEquals(login_status['email'], self._email) self.assertFalse(login_status['isScreenLocked']) def testLogout(self): """Tests autotestPrivate.logout""" with self._CreateBrowser(True) as b: extension = self._GetAutotestExtension(b) try: extension.ExecuteJavaScript('chrome.autotestPrivate.logout();') except (exceptions.BrowserConnectionGoneException, exceptions.BrowserGoneException): pass util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
bsd-3-clause
sharad/calibre
src/calibre/ebooks/conversion/plugins/rb_output.py
24
1267
# -*- coding: utf-8 -*- __license__ = 'GPL 3' __copyright__ = '2009, John Schember <[email protected]>' __docformat__ = 'restructuredtext en' import os from calibre.customize.conversion import OutputFormatPlugin, OptionRecommendation class RBOutput(OutputFormatPlugin): name = 'RB Output' author = 'John Schember' file_type = 'rb' options = set([ OptionRecommendation(name='inline_toc', recommended_value=False, level=OptionRecommendation.LOW, help=_('Add Table of Contents to beginning of the book.')), ]) def convert(self, oeb_book, output_path, input_plugin, opts, log): from calibre.ebooks.rb.writer import RBWriter close = False if not hasattr(output_path, 'write'): close = True if not os.path.exists(os.path.dirname(output_path)) and os.path.dirname(output_path) != '': os.makedirs(os.path.dirname(output_path)) out_stream = open(output_path, 'wb') else: out_stream = output_path writer = RBWriter(opts, log) out_stream.seek(0) out_stream.truncate() writer.write_content(oeb_book, out_stream, oeb_book.metadata) if close: out_stream.close()
gpl-3.0
simonwydooghe/ansible
lib/ansible/modules/cloud/azure/azure_rm_storageaccount_info.py
10
22917
#!/usr/bin/python # # Copyright (c) 2016 Matt Davis, <[email protected]> # Chris Houseknecht, <[email protected]> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_storageaccount_info version_added: "2.9" short_description: Get storage account facts description: - Get facts for one storage account or all storage accounts within a resource group. options: name: description: - Only show results for a specific account. resource_group: description: - Limit results to a resource group. Required when filtering by name. aliases: - resource_group_name tags: description: - Limit results by providing a list of tags. Format tags as 'key' or 'key:value'. show_connection_string: description: - Show the connection string for each of the storageaccount's endpoints. - For convenient usage, C(show_connection_string) will also show the access keys for each of the storageaccount's endpoints. - Note that it will cost a lot of time when list all storageaccount rather than query a single one. type: bool version_added: "2.8" show_blob_cors: description: - Show the blob CORS settings for each blob related to the storage account. - Querying all storage accounts will take a long time. type: bool version_added: "2.8" extends_documentation_fragment: - azure author: - Chris Houseknecht (@chouseknecht) - Matt Davis (@nitzmahone) ''' EXAMPLES = ''' - name: Get facts for one account azure_rm_storageaccount_info: resource_group: myResourceGroup name: clh0002 - name: Get facts for all accounts in a resource group azure_rm_storageaccount_info: resource_group: myResourceGroup - name: Get facts for all accounts by tags azure_rm_storageaccount_info: tags: - testing - foo:bar ''' RETURN = ''' azure_storageaccounts: description: - List of storage account dicts. returned: always type: list example: [{ "id": "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/myResourceGroups/testing/providers/Microsoft.Storage/storageAccounts/testaccount001", "location": "eastus2", "name": "testaccount001", "properties": { "accountType": "Standard_LRS", "creationTime": "2016-03-28T02:46:58.290113Z", "primaryEndpoints": { "blob": "https://testaccount001.blob.core.windows.net/", "file": "https://testaccount001.file.core.windows.net/", "queue": "https://testaccount001.queue.core.windows.net/", "table": "https://testaccount001.table.core.windows.net/" }, "primaryLocation": "eastus2", "provisioningState": "Succeeded", "statusOfPrimary": "Available" }, "tags": {}, "type": "Microsoft.Storage/storageAccounts" }] storageaccounts: description: - List of storage account dicts in resource module's parameter format. returned: always type: complex contains: id: description: - Resource ID. returned: always type: str sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Storage/storageAccounts/t estaccount001" name: description: - Name of the storage account to update or create. returned: always type: str sample: testaccount001 location: description: - Valid Azure location. Defaults to location of the resource group. returned: always type: str sample: eastus account_type: description: - Type of storage account. - C(Standard_ZRS) and C(Premium_LRS) accounts cannot be changed to other account types. - Other account types cannot be changed to C(Standard_ZRS) or C(Premium_LRS). returned: always type: str sample: Standard_ZRS custom_domain: description: - User domain assigned to the storage account. - Must be a dictionary with I(name) and I(use_sub_domain) keys where I(name) is the CNAME source. returned: always type: complex contains: name: description: - CNAME source. returned: always type: str sample: testaccount use_sub_domain: description: - Whether to use sub domain. returned: always type: bool sample: true kind: description: - The kind of storage. returned: always type: str sample: Storage access_tier: description: - The access tier for this storage account. returned: always type: str sample: Hot https_only: description: - Allows https traffic only to storage service when set to C(true). returned: always type: bool sample: false provisioning_state: description: - The status of the storage account at the time the operation was called. - Possible values include C(Creating), C(ResolvingDNS), C(Succeeded). returned: always type: str sample: Succeeded secondary_location: description: - The location of the geo-replicated secondary for the storage account. - Only available if the I(account_type=Standard_GRS) or I(account_type=Standard_RAGRS). returned: always type: str sample: westus status_of_primary: description: - Status of the primary location of the storage account; either C(available) or C(unavailable). returned: always type: str sample: available status_of_secondary: description: - Status of the secondary location of the storage account; either C(available) or C(unavailable). returned: always type: str sample: available primary_location: description: - The location of the primary data center for the storage account. returned: always type: str sample: eastus primary_endpoints: description: - URLs to retrieve a public I(blob), I(queue), or I(table) object. - Note that C(Standard_ZRS) and C(Premium_LRS) accounts only return the blob endpoint. returned: always type: complex contains: blob: description: - The primary blob endpoint and connection string. returned: always type: complex contains: endpoint: description: - The primary blob endpoint. returned: always type: str sample: "https://testaccount001.blob.core.windows.net/" connectionstring: description: - Connectionstring of the blob endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;BlobEndpoint=X" queue: description: - The primary queue endpoint and connection string. returned: always type: complex contains: endpoint: description: - The primary queue endpoint. returned: always type: str sample: "https://testaccount001.queue.core.windows.net/" connectionstring: description: - Connectionstring of the queue endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;QueueEndpoint=X" table: description: - The primary table endpoint and connection string. returned: always type: complex contains: endpoint: description: - The primary table endpoint. returned: always type: str sample: "https://testaccount001.table.core.windows.net/" connectionstring: description: - Connectionstring of the table endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;TableEndpoint=X" key: description: - The account key for the primary_endpoints returned: always type: str sample: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx secondary_endpoints: description: - The URLs to retrieve a public I(blob), I(queue), or I(table) object from the secondary location. - Only available if the SKU I(name=Standard_RAGRS). returned: always type: complex contains: blob: description: - The secondary blob endpoint and connection string. returned: always type: complex contains: endpoint: description: - The secondary blob endpoint. returned: always type: str sample: "https://testaccount001.blob.core.windows.net/" connectionstring: description: - Connectionstring of the blob endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;BlobEndpoint=X" queue: description: - The secondary queue endpoint and connection string. returned: always type: complex contains: endpoint: description: - The secondary queue endpoint. returned: always type: str sample: "https://testaccount001.queue.core.windows.net/" connectionstring: description: - Connectionstring of the queue endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;QueueEndpoint=X" table: description: - The secondary table endpoint and connection string. returned: always type: complex contains: endpoint: description: - The secondary table endpoint. returned: always type: str sample: "https://testaccount001.table.core.windows.net/" connectionstring: description: - Connectionstring of the table endpoint. returned: always type: str sample: "DefaultEndpointsProtocol=https;EndpointSuffix=core.windows.net;AccountName=X;AccountKey=X;TableEndpoint=X" key: description: - The account key for the secondary_endpoints sample: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx tags: description: - Resource tags. returned: always type: dict sample: { "tag1": "abc" } ''' try: from msrestazure.azure_exceptions import CloudError except Exception: # This is handled in azure_rm_common pass from ansible.module_utils.azure_rm_common import AzureRMModuleBase from ansible.module_utils._text import to_native AZURE_OBJECT_CLASS = 'StorageAccount' class AzureRMStorageAccountInfo(AzureRMModuleBase): def __init__(self): self.module_arg_spec = dict( name=dict(type='str'), resource_group=dict(type='str', aliases=['resource_group_name']), tags=dict(type='list'), show_connection_string=dict(type='bool'), show_blob_cors=dict(type='bool') ) self.results = dict( changed=False, storageaccounts=[] ) self.name = None self.resource_group = None self.tags = None self.show_connection_string = None self.show_blob_cors = None super(AzureRMStorageAccountInfo, self).__init__(self.module_arg_spec, supports_tags=False, facts_module=True) def exec_module(self, **kwargs): is_old_facts = self.module._name == 'azure_rm_storageaccount_facts' if is_old_facts: self.module.deprecate("The 'azure_rm_storageaccount_facts' module has been renamed to 'azure_rm_storageaccount_info'", version='2.13') for key in self.module_arg_spec: setattr(self, key, kwargs[key]) if self.name and not self.resource_group: self.fail("Parameter error: resource group required when filtering by name.") results = [] if self.name: results = self.get_account() elif self.resource_group: results = self.list_resource_group() else: results = self.list_all() filtered = self.filter_tag(results) if is_old_facts: self.results['ansible_facts'] = { 'azure_storageaccounts': self.serialize(filtered), 'storageaccounts': self.format_to_dict(filtered), } self.results['storageaccounts'] = self.format_to_dict(filtered) return self.results def get_account(self): self.log('Get properties for account {0}'.format(self.name)) account = None try: account = self.storage_client.storage_accounts.get_properties(self.resource_group, self.name) return [account] except CloudError: pass return [] def list_resource_group(self): self.log('List items') try: response = self.storage_client.storage_accounts.list_by_resource_group(self.resource_group) except Exception as exc: self.fail("Error listing for resource group {0} - {1}".format(self.resource_group, str(exc))) return response def list_all(self): self.log('List all items') try: response = self.storage_client.storage_accounts.list() except Exception as exc: self.fail("Error listing all items - {0}".format(str(exc))) return response def filter_tag(self, raw): return [item for item in raw if self.has_tags(item.tags, self.tags)] def serialize(self, raw): return [self.serialize_obj(item, AZURE_OBJECT_CLASS) for item in raw] def format_to_dict(self, raw): return [self.account_obj_to_dict(item) for item in raw] def account_obj_to_dict(self, account_obj, blob_service_props=None): account_dict = dict( id=account_obj.id, name=account_obj.name, location=account_obj.location, access_tier=(account_obj.access_tier.value if account_obj.access_tier is not None else None), account_type=account_obj.sku.name.value, kind=account_obj.kind.value if account_obj.kind else None, provisioning_state=account_obj.provisioning_state.value, secondary_location=account_obj.secondary_location, status_of_primary=(account_obj.status_of_primary.value if account_obj.status_of_primary is not None else None), status_of_secondary=(account_obj.status_of_secondary.value if account_obj.status_of_secondary is not None else None), primary_location=account_obj.primary_location, https_only=account_obj.enable_https_traffic_only ) id_dict = self.parse_resource_to_dict(account_obj.id) account_dict['resource_group'] = id_dict.get('resource_group') account_key = self.get_connectionstring(account_dict['resource_group'], account_dict['name']) account_dict['custom_domain'] = None if account_obj.custom_domain: account_dict['custom_domain'] = dict( name=account_obj.custom_domain.name, use_sub_domain=account_obj.custom_domain.use_sub_domain ) account_dict['primary_endpoints'] = None if account_obj.primary_endpoints: account_dict['primary_endpoints'] = dict( blob=self.format_endpoint_dict(account_dict['name'], account_key[0], account_obj.primary_endpoints.blob, 'blob'), queue=self.format_endpoint_dict(account_dict['name'], account_key[0], account_obj.primary_endpoints.queue, 'queue'), table=self.format_endpoint_dict(account_dict['name'], account_key[0], account_obj.primary_endpoints.table, 'table') ) if account_key[0]: account_dict['primary_endpoints']['key'] = '{0}'.format(account_key[0]) account_dict['secondary_endpoints'] = None if account_obj.secondary_endpoints: account_dict['secondary_endpoints'] = dict( blob=self.format_endpoint_dict(account_dict['name'], account_key[1], account_obj.primary_endpoints.blob, 'blob'), queue=self.format_endpoint_dict(account_dict['name'], account_key[1], account_obj.primary_endpoints.queue, 'queue'), table=self.format_endpoint_dict(account_dict['name'], account_key[1], account_obj.primary_endpoints.table, 'table'), ) if account_key[1]: account_dict['secondary_endpoints']['key'] = '{0}'.format(account_key[1]) account_dict['tags'] = None if account_obj.tags: account_dict['tags'] = account_obj.tags blob_service_props = self.get_blob_service_props(account_dict['resource_group'], account_dict['name']) if blob_service_props and blob_service_props.cors and blob_service_props.cors.cors_rules: account_dict['blob_cors'] = [dict( allowed_origins=to_native(x.allowed_origins), allowed_methods=to_native(x.allowed_methods), max_age_in_seconds=x.max_age_in_seconds, exposed_headers=to_native(x.exposed_headers), allowed_headers=to_native(x.allowed_headers) ) for x in blob_service_props.cors.cors_rules] return account_dict def format_endpoint_dict(self, name, key, endpoint, storagetype, protocol='https'): result = dict(endpoint=endpoint) if key: result['connectionstring'] = 'DefaultEndpointsProtocol={0};EndpointSuffix={1};AccountName={2};AccountKey={3};{4}Endpoint={5}'.format( protocol, self._cloud_environment.suffixes.storage_endpoint, name, key, str.title(storagetype), endpoint) return result def get_blob_service_props(self, resource_group, name): if not self.show_blob_cors: return None try: blob_service_props = self.storage_client.blob_services.get_service_properties(resource_group, name) return blob_service_props except Exception: pass return None def get_connectionstring(self, resource_group, name): keys = ['', ''] if not self.show_connection_string: return keys try: cred = self.storage_client.storage_accounts.list_keys(resource_group, name) # get the following try catch from CLI try: keys = [cred.keys[0].value, cred.keys[1].value] except AttributeError: keys = [cred.key1, cred.key2] except Exception: pass return keys def main(): AzureRMStorageAccountInfo() if __name__ == '__main__': main()
gpl-3.0
fevxie/odoo
addons/base_report_designer/plugin/openerp_report_designer/bin/script/ConvertBracesToField.py
384
12556
######################################################################### # # Copyright (c) 2003-2004 Danny Brewer [email protected] # Copyright (C) 2004-2010 OpenERP SA (<http://openerp.com>). # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # See: http://www.gnu.org/licenses/lgpl.html # ############################################################################# import uno import unohelper import string import re import base64 from com.sun.star.task import XJobExecutor if __name__<>"package": from lib.gui import * from LoginTest import * from lib.logreport import * from lib.rpc import * database="test" uid = 1 class ConvertBracesToField( unohelper.Base, XJobExecutor ): def __init__(self, ctx): self.ctx = ctx self.module = "openerp_report" self.version = "0.1" LoginTest() self.logobj=Logger() if not loginstatus and __name__=="package": exit(1) global passwd self.password = passwd global url self.sock=RPCSession(url) self.aReportSyntex=[] self.getBraces(self.aReportSyntex) self.setValue() def setValue(self): desktop=getDesktop() doc = desktop.getCurrentComponent() docinfo= doc.getDocumentInfo() count = 0 regexes = [ ['[a-zA-Z0-9_]+\.[a-zA-Z0-9_.]+',"Field"], ['\\[\\[ *repeatIn\\( *([a-zA-Z0-9_\.]+), *\'([a-zA-Z0-9_]+)\' *\\) *\\]\\]', "RepeatIn"], ['\\[\\[ *([a-zA-Z0-9_\.]+) *\\]\\]', "Field"] # ['\\[\\[ ([a-zA-Z0-9_]+\.[a-zA-Z1-9]) \\]\\]',"Field"], # ['\\[\\[ [a-zA-Z0-9_\.]+ and ([a-zA-Z0-9_\.]+) or .+? \\]\\]',"Field"], # ['\\[\\[ ([a-zA-Z0-9_\.]+) or .+? \\]\\]',"Field"], # ['\\[\\[ ([a-zA-Z0-9_\.]+) and .+? \\]\\]',"Field"], # ['\\[\\[ .+? or ([a-zA-Z0-9_\.]+) \\]\\]',"Field"], # ['\\[\\[ (.+?) and ([a-zA-Z0-9_\.]+) \\]\\]',"Field"], # ['\\[\\[ .+? % ([a-zA-Z0-9_\.]+) \\]\\]',"Field"] ] oFieldObject = [] oRepeatInObjects = [] saRepeatInList = [] sHost = docinfo.getUserFieldValue(0) nCount = 0 oParEnum = doc.getTextFields().createEnumeration() while oParEnum.hasMoreElements(): oPar = oParEnum.nextElement() nCount += 1 getList(oRepeatInObjects,sHost,nCount) for ro in oRepeatInObjects: if ro.find("(")<>-1: saRepeatInList.append( [ ro[:ro.find("(")], ro[ro.find("(")+1:ro.find(")")] ]) try: oParEnum = doc.getTextFields().createEnumeration() while oParEnum.hasMoreElements(): oPar = oParEnum.nextElement() if oPar.supportsService("com.sun.star.text.TextField.DropDown"): for reg in regexes: res=re.findall(reg[0],oPar.Items[1]) if len(res) <> 0: if res[0][0] == "objects": sTemp = docinfo.getUserFieldValue(3) sTemp = "|-." + sTemp[sTemp.rfind(".")+1:] + ".-|" oPar.Items=(sTemp.encode("utf-8"),oPar.Items[1].replace(' ',"")) oPar.update() elif type(res[0]) <> type(u''): sObject = self.getRes(self.sock, docinfo.getUserFieldValue(3), res[0][0][res[0][0].find(".")+1:].replace(".","/")) r = self.sock.execute(database, uid, self.password, docinfo.getUserFieldValue(3) , 'fields_get') sExpr="|-." + r[res[0][0][res[0][0].rfind(".")+1:]]["string"] + ".-|" oPar.Items=(sExpr.encode("utf-8"),oPar.Items[1].replace(' ',"")) oPar.update() else: obj = None for rl in saRepeatInList: if rl[0] == res[0][:res[0].find(".")]: obj=rl[1] try: sObject = self.getRes(self.sock, obj, res[0][res[0].find(".")+1:].replace(".","/")) r = self.sock.execute(database, uid, self.password, sObject , 'read',[1]) except Exception,e: r = "TTT" self.logobj.log_write('ConvertBracesToField', LOG_ERROR, str(e)) if len(r) <> 0: if r <> "TTT": if len(res)>1: sExpr="" print res if reg[1] == 'Field': for ires in res: try: sExpr=r[0][ires[ires.rfind(".")+1:]] break except Exception,e: import traceback,sys info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) self.logobj.log_write('ConvertBracesToField', LOG_ERROR,info) try: oPar.Items=(sExpr.encode("utf-8") ,oPar.Items[1]) oPar.update() except: oPar.Items=(str(sExpr) ,oPar.Items[1]) oPar.update() import traceback,sys info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) self.logobj.log_write('ConvertBracesToField', LOG_ERROR, info) else: sExpr=r[0][res[0][res[0].rfind(".")+1:]] try: if sExpr: oPar.Items=(sExpr.encode("utf-8") ,oPar.Items[1]) oPar.update() else: oPar.Items=(u"/",oPar.Items[1]) oPar.update() except: oPar.Items=(str(sExpr) ,oPar.Items[1]) oPar.update() import traceback,sys info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) self.logobj.log_write('ConvertBracesToField', LOG_ERROR,info) else: oPar.Items=(u""+r,oPar.Items[1]) oPar.update() else: oPar.Items=(u"TTT",oPar.Items[1]) oPar.update() except: import traceback,sys info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) self.logobj.log_write('ConvertBraceToField', LOG_ERROR, info) def getRes(self, sock, sObject, sVar): desktop=getDesktop() doc =desktop.getCurrentComponent() docinfo=doc.getDocumentInfo() res = sock.execute(database, uid, self.password, sObject , 'fields_get') key = res.keys() key.sort() myval=None if not sVar.find("/")==-1: myval=sVar[:sVar.find("/")] else: myval=sVar for k in key: if (res[k]['type'] in ['many2one']) and k==myval: sObject = self.getRes(sock,res[myval]['relation'], sVar[sVar.find("/")+1:]) return sObject def getBraces(self, aReportSyntex=None): if aReportSyntex is None: aReportSyntex = [] desktop=getDesktop() doc = desktop.getCurrentComponent() aSearchString=[] aReplaceString=[] aRes=[] try: regexes = [ ['\\[\\[ *repeatIn\\( *([a-zA-Z0-9_\.]+), *\'([a-zA-Z0-9_]+)\' *\\) *\\]\\]', "RepeatIn"], ['\\[\\[ *([a-zA-Z0-9_\.]+) *\\]\\]', "Field"], ['\\[\\[ *.+? *\\]\\]', "Expression"] ] search = doc.createSearchDescriptor() search.SearchRegularExpression = True for reg in regexes: search.SearchString = reg[0] found = doc.findFirst( search ) while found: res=re.findall(reg[0],found.String) print len(res) if found.String not in [r[0] for r in aReportSyntex] and len(res) == 1 : text=found.getText() oInputList = doc.createInstance("com.sun.star.text.TextField.DropDown") if reg[1]<>"Expression": oInputList.Items=(u""+found.String,u""+found.String) else: oInputList.Items=(u"?",u""+found.String) aReportSyntex.append([oInputList,reg[1]]) text.insertTextContent(found,oInputList,False) found.String ="" else: aRes.append([res,reg[1]]) found = doc.findNext(found.End, search) search = doc.createSearchDescriptor() search.SearchRegularExpression = False for res in aRes: for r in res[0]: search.SearchString=r found=doc.findFirst(search) while found: text=found.getText() oInputList = doc.createInstance("com.sun.star.text.TextField.DropDown") if res[1]<>"Expression": oInputList.Items=(u""+found.String,u""+found.String) else: oInputList.Items=(u"?",u""+found.String) aReportSyntex.append([oInputList,res[1]]) text.insertTextContent(found,oInputList,False) found.String ="" found = doc.findNext(found.End, search) except: import traceback,sys info = reduce(lambda x, y: x+y, traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback)) self.logobj.log_write('ConvertBraceToField', LOG_ERROR, info) if __name__<>"package": ConvertBracesToField(None) else: g_ImplementationHelper.addImplementation( ConvertBracesToField, "org.openoffice.openerp.report.convertBF", ("com.sun.star.task.Job",),) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
gauribhoite/personfinder
env/google_appengine/lib/django-1.4/django/contrib/sites/managers.py
491
1985
from django.conf import settings from django.db import models from django.db.models.fields import FieldDoesNotExist class CurrentSiteManager(models.Manager): "Use this to limit objects to those associated with the current site." def __init__(self, field_name=None): super(CurrentSiteManager, self).__init__() self.__field_name = field_name self.__is_validated = False def _validate_field_name(self): field_names = self.model._meta.get_all_field_names() # If a custom name is provided, make sure the field exists on the model if self.__field_name is not None and self.__field_name not in field_names: raise ValueError("%s couldn't find a field named %s in %s." % \ (self.__class__.__name__, self.__field_name, self.model._meta.object_name)) # Otherwise, see if there is a field called either 'site' or 'sites' else: for potential_name in ['site', 'sites']: if potential_name in field_names: self.__field_name = potential_name self.__is_validated = True break # Now do a type check on the field (FK or M2M only) try: field = self.model._meta.get_field(self.__field_name) if not isinstance(field, (models.ForeignKey, models.ManyToManyField)): raise TypeError("%s must be a ForeignKey or ManyToManyField." %self.__field_name) except FieldDoesNotExist: raise ValueError("%s couldn't find a field named %s in %s." % \ (self.__class__.__name__, self.__field_name, self.model._meta.object_name)) self.__is_validated = True def get_query_set(self): if not self.__is_validated: self._validate_field_name() return super(CurrentSiteManager, self).get_query_set().filter(**{self.__field_name + '__id__exact': settings.SITE_ID})
apache-2.0
frankvdp/django
django/db/models/sql/datastructures.py
32
6701
""" Useful auxiliary data structures for query construction. Not useful outside the SQL domain. """ # for backwards-compatibility in Django 1.11 from django.core.exceptions import EmptyResultSet # NOQA: F401 from django.db.models.sql.constants import INNER, LOUTER class MultiJoin(Exception): """ Used by join construction code to indicate the point at which a multi-valued join was attempted (if the caller wants to treat that exceptionally). """ def __init__(self, names_pos, path_with_names): self.level = names_pos # The path travelled, this includes the path to the multijoin. self.names_with_path = path_with_names class Empty: pass class Join: """ Used by sql.Query and sql.SQLCompiler to generate JOIN clauses into the FROM entry. For example, the SQL generated could be LEFT OUTER JOIN "sometable" T1 ON ("othertable"."sometable_id" = "sometable"."id") This class is primarily used in Query.alias_map. All entries in alias_map must be Join compatible by providing the following attributes and methods: - table_name (string) - table_alias (possible alias for the table, can be None) - join_type (can be None for those entries that aren't joined from anything) - parent_alias (which table is this join's parent, can be None similarly to join_type) - as_sql() - relabeled_clone() """ def __init__(self, table_name, parent_alias, table_alias, join_type, join_field, nullable, filtered_relation=None): # Join table self.table_name = table_name self.parent_alias = parent_alias # Note: table_alias is not necessarily known at instantiation time. self.table_alias = table_alias # LOUTER or INNER self.join_type = join_type # A list of 2-tuples to use in the ON clause of the JOIN. # Each 2-tuple will create one join condition in the ON clause. self.join_cols = join_field.get_joining_columns() # Along which field (or ForeignObjectRel in the reverse join case) self.join_field = join_field # Is this join nullabled? self.nullable = nullable self.filtered_relation = filtered_relation def as_sql(self, compiler, connection): """ Generate the full LEFT OUTER JOIN sometable ON sometable.somecol = othertable.othercol, params clause for this join. """ join_conditions = [] params = [] qn = compiler.quote_name_unless_alias qn2 = connection.ops.quote_name # Add a join condition for each pair of joining columns. for lhs_col, rhs_col in self.join_cols: join_conditions.append('%s.%s = %s.%s' % ( qn(self.parent_alias), qn2(lhs_col), qn(self.table_alias), qn2(rhs_col), )) # Add a single condition inside parentheses for whatever # get_extra_restriction() returns. extra_cond = self.join_field.get_extra_restriction( compiler.query.where_class, self.table_alias, self.parent_alias) if extra_cond: extra_sql, extra_params = compiler.compile(extra_cond) join_conditions.append('(%s)' % extra_sql) params.extend(extra_params) if self.filtered_relation: extra_sql, extra_params = compiler.compile(self.filtered_relation) if extra_sql: join_conditions.append('(%s)' % extra_sql) params.extend(extra_params) if not join_conditions: # This might be a rel on the other end of an actual declared field. declared_field = getattr(self.join_field, 'field', self.join_field) raise ValueError( "Join generated an empty ON clause. %s did not yield either " "joining columns or extra restrictions." % declared_field.__class__ ) on_clause_sql = ' AND '.join(join_conditions) alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias) sql = '%s %s%s ON (%s)' % (self.join_type, qn(self.table_name), alias_str, on_clause_sql) return sql, params def relabeled_clone(self, change_map): new_parent_alias = change_map.get(self.parent_alias, self.parent_alias) new_table_alias = change_map.get(self.table_alias, self.table_alias) if self.filtered_relation is not None: filtered_relation = self.filtered_relation.clone() filtered_relation.path = [change_map.get(p, p) for p in self.filtered_relation.path] else: filtered_relation = None return self.__class__( self.table_name, new_parent_alias, new_table_alias, self.join_type, self.join_field, self.nullable, filtered_relation=filtered_relation, ) def equals(self, other, with_filtered_relation): return ( isinstance(other, self.__class__) and self.table_name == other.table_name and self.parent_alias == other.parent_alias and self.join_field == other.join_field and (not with_filtered_relation or self.filtered_relation == other.filtered_relation) ) def __eq__(self, other): return self.equals(other, with_filtered_relation=True) def demote(self): new = self.relabeled_clone({}) new.join_type = INNER return new def promote(self): new = self.relabeled_clone({}) new.join_type = LOUTER return new class BaseTable: """ The BaseTable class is used for base table references in FROM clause. For example, the SQL "foo" in SELECT * FROM "foo" WHERE somecond could be generated by this class. """ join_type = None parent_alias = None filtered_relation = None def __init__(self, table_name, alias): self.table_name = table_name self.table_alias = alias def as_sql(self, compiler, connection): alias_str = '' if self.table_alias == self.table_name else (' %s' % self.table_alias) base_sql = compiler.quote_name_unless_alias(self.table_name) return base_sql + alias_str, [] def relabeled_clone(self, change_map): return self.__class__(self.table_name, change_map.get(self.table_alias, self.table_alias)) def equals(self, other, with_filtered_relation): return ( isinstance(self, other.__class__) and self.table_name == other.table_name and self.table_alias == other.table_alias )
bsd-3-clause
Bitl/RBXLegacy-src
Cut/RBXLegacyDiscordBot/lib/pip/_vendor/requests/packages/chardet/gb2312freq.py
3132
36011
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # GB2312 most frequently used character table # # Char to FreqOrder table , from hz6763 # 512 --> 0.79 -- 0.79 # 1024 --> 0.92 -- 0.13 # 2048 --> 0.98 -- 0.06 # 6768 --> 1.00 -- 0.02 # # Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79 # Random Distribution Ration = 512 / (3755 - 512) = 0.157 # # Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9 GB2312_TABLE_SIZE = 3760 GB2312CharToFreqOrder = ( 1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205, 2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842, 2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409, 249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670, 1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820, 1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585, 152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566, 1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575, 2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853, 3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061, 544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155, 1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406, 927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816, 2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606, 360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023, 2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414, 1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513, 3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052, 198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570, 1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575, 253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250, 2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506, 1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26, 3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835, 1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686, 2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054, 1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894, 585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105, 3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403, 3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694, 252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873, 3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940, 836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121, 1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648, 3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992, 2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233, 1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157, 755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807, 1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094, 4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258, 887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478, 3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152, 3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909, 509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272, 1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221, 2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252, 1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301, 1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254, 389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070, 3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461, 3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360, 4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124, 296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535, 3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243, 1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713, 1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071, 4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442, 215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946, 814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257, 3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180, 1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427, 602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781, 1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724, 2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937, 930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943, 432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789, 396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552, 3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246, 4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451, 3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310, 750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860, 2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297, 2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780, 2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745, 776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936, 2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032, 968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657, 163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414, 220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976, 3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436, 2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254, 2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536, 1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238, 18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059, 2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741, 90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447, 286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601, 1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269, 1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894, 915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173, 681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994, 1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956, 2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437, 3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154, 2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240, 2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143, 2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634, 3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472, 1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541, 1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143, 2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312, 1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414, 3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754, 1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424, 1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302, 3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739, 795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004, 2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484, 1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739, 4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535, 1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641, 1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307, 3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573, 1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533, 47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965, 504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99, 1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280, 160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505, 1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012, 1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039, 744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982, 3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530, 4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392, 3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656, 2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220, 2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766, 1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535, 3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728, 2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338, 1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627, 1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885, 125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411, 2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671, 2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162, 3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774, 4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524, 3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346, 180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040, 3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188, 2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280, 1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131, 259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947, 774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970, 3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814, 4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557, 2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997, 1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972, 1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369, 766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376, 1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480, 3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610, 955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128, 642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769, 1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207, 57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392, 1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623, 193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782, 2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650, 158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478, 2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773, 2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007, 1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323, 1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598, 2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961, 819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302, 1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409, 1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683, 2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191, 2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616, 3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302, 1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774, 4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147, 571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731, 845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464, 3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377, 1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315, 470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557, 3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903, 1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060, 4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261, 1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092, 2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810, 1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708, 498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658, 1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871, 3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503, 448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229, 2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112, 136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504, 1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389, 1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27, 1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542, 3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861, 2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845, 3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700, 3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469, 3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582, 996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999, 2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274, 786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020, 2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601, 12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628, 1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31, 475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668, 233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778, 1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169, 3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667, 3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881, 1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276, 1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320, 3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751, 2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432, 2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772, 1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843, 3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116, 451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904, 4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652, 1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664, 2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770, 3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283, 3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626, 1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713, 768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333, 391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062, 2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555, 931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014, 1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510, 386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015, 1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459, 1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390, 1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238, 1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232, 1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624, 381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189, 852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512 #Everything below is of no interest for detection purpose 5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636, 5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874, 5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278, 3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806, 4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827, 5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512, 5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578, 4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828, 4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105, 4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189, 4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561, 3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226, 6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778, 4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039, 6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404, 4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213, 4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739, 4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328, 5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592, 3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424, 4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270, 3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232, 4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456, 4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121, 6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971, 6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409, 5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519, 4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367, 6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834, 4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460, 5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464, 5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709, 5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906, 6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530, 3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262, 6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920, 4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190, 5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318, 6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538, 6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697, 4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544, 5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016, 4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638, 5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006, 5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071, 4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552, 4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556, 5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432, 4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632, 4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885, 5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336, 4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729, 4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854, 4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332, 5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004, 5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419, 4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293, 3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580, 4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339, 6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341, 5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493, 5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046, 4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904, 6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728, 5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350, 6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233, 4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944, 5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413, 5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700, 3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999, 5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694, 6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571, 4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359, 6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178, 4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421, 4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330, 6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855, 3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587, 6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803, 4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791, 3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304, 3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445, 3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506, 4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856, 2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057, 5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777, 4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369, 5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028, 5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914, 5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175, 4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681, 5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534, 4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912, 5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054, 1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336, 3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666, 4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375, 4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113, 6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614, 4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173, 5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197, 3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271, 5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423, 5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529, 5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921, 3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837, 5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922, 5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187, 3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382, 5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628, 5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683, 5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053, 6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928, 4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662, 6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663, 4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554, 3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191, 4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013, 5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932, 5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055, 5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829, 3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096, 3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660, 6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199, 6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748, 5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402, 6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957, 6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668, 6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763, 6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407, 6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051, 5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429, 6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791, 6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028, 3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305, 3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159, 4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683, 4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372, 3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514, 5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544, 5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472, 5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716, 5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905, 5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327, 4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030, 5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281, 6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224, 5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327, 4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062, 4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354, 6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065, 3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953, 4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681, 4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708, 5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442, 6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387, 6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237, 4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713, 6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547, 5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957, 5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337, 5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074, 5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685, 5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455, 4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722, 5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615, 5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093, 5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989, 5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094, 6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212, 4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967, 5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733, 4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260, 4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864, 6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353, 4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095, 6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287, 3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504, 5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539, 6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750, 6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864, 6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213, 5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573, 6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252, 6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970, 3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703, 5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978, 4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767) # flake8: noqa
gpl-3.0
openshift/openshift-tools
ansible/roles/lib_oa_openshift/src/ansible/oc_adm_ca_server_cert.py
63
1365
# pylint: skip-file # flake8: noqa # pylint: disable=wrong-import-position from ansible.module_utils.six import string_types def main(): ''' ansible oc adm module for ca create-server-cert ''' module = AnsibleModule( argument_spec=dict( state=dict(default='present', type='str', choices=['present']), debug=dict(default=False, type='bool'), kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), backup=dict(default=True, type='bool'), force=dict(default=False, type='bool'), # oc adm ca create-server-cert [options] cert=dict(default=None, type='str'), key=dict(default=None, type='str'), signer_cert=dict(default='/etc/origin/master/ca.crt', type='str'), signer_key=dict(default='/etc/origin/master/ca.key', type='str'), signer_serial=dict(default='/etc/origin/master/ca.serial.txt', type='str'), hostnames=dict(default=[], type='list'), expire_days=dict(default=None, type='int'), ), supports_check_mode=True, ) results = CAServerCert.run_ansible(module.params, module.check_mode) if 'failed' in results: return module.fail_json(**results) return module.exit_json(**results) if __name__ == '__main__': main()
apache-2.0
dan1/horizon-x509
horizon/forms/base.py
79
2451
# Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django import forms from django.forms.forms import NON_FIELD_ERRORS # noqa class SelfHandlingMixin(object): def __init__(self, request, *args, **kwargs): self.request = request if not hasattr(self, "handle"): raise NotImplementedError("%s does not define a handle method." % self.__class__.__name__) super(SelfHandlingMixin, self).__init__(*args, **kwargs) class SelfHandlingForm(SelfHandlingMixin, forms.Form): """A base :class:`Form <django:django.forms.Form>` class which includes processing logic in its subclasses. """ required_css_class = 'required' def api_error(self, message): """Adds an error to the form's error dictionary after validation based on problems reported via the API. This is useful when you wish for API errors to appear as errors on the form rather than using the messages framework. """ self._errors[NON_FIELD_ERRORS] = self.error_class([message]) def set_warning(self, message): """Sets a warning on the form. Unlike NON_FIELD_ERRORS, this doesn't fail form validation. """ self.warnings = self.error_class([message]) class DateForm(forms.Form): """A simple form for selecting a range of time.""" start = forms.DateField(input_formats=("%Y-%m-%d",)) end = forms.DateField(input_formats=("%Y-%m-%d",)) def __init__(self, *args, **kwargs): super(DateForm, self).__init__(*args, **kwargs) self.fields['start'].widget.attrs['data-date-format'] = "yyyy-mm-dd" self.fields['end'].widget.attrs['data-date-format'] = "yyyy-mm-dd"
apache-2.0
cooldudezach/android_kernel_zte_warplte
tools/perf/util/setup.py
4998
1330
#!/usr/bin/python2 from distutils.core import setup, Extension from os import getenv from distutils.command.build_ext import build_ext as _build_ext from distutils.command.install_lib import install_lib as _install_lib class build_ext(_build_ext): def finalize_options(self): _build_ext.finalize_options(self) self.build_lib = build_lib self.build_temp = build_tmp class install_lib(_install_lib): def finalize_options(self): _install_lib.finalize_options(self) self.build_dir = build_lib cflags = ['-fno-strict-aliasing', '-Wno-write-strings'] cflags += getenv('CFLAGS', '').split() build_lib = getenv('PYTHON_EXTBUILD_LIB') build_tmp = getenv('PYTHON_EXTBUILD_TMP') ext_sources = [f.strip() for f in file('util/python-ext-sources') if len(f.strip()) > 0 and f[0] != '#'] perf = Extension('perf', sources = ext_sources, include_dirs = ['util/include'], extra_compile_args = cflags, ) setup(name='perf', version='0.1', description='Interface with the Linux profiling infrastructure', author='Arnaldo Carvalho de Melo', author_email='[email protected]', license='GPLv2', url='http://perf.wiki.kernel.org', ext_modules=[perf], cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
gpl-2.0
shyamalschandra/scikit-learn
examples/decomposition/plot_image_denoising.py
181
5819
""" ========================================= Image denoising using dictionary learning ========================================= An example comparing the effect of reconstructing noisy fragments of the Lena image using firstly online :ref:`DictionaryLearning` and various transform methods. The dictionary is fitted on the distorted left half of the image, and subsequently used to reconstruct the right half. Note that even better performance could be achieved by fitting to an undistorted (i.e. noiseless) image, but here we start from the assumption that it is not available. A common practice for evaluating the results of image denoising is by looking at the difference between the reconstruction and the original image. If the reconstruction is perfect this will look like Gaussian noise. It can be seen from the plots that the results of :ref:`omp` with two non-zero coefficients is a bit less biased than when keeping only one (the edges look less prominent). It is in addition closer from the ground truth in Frobenius norm. The result of :ref:`least_angle_regression` is much more strongly biased: the difference is reminiscent of the local intensity value of the original image. Thresholding is clearly not useful for denoising, but it is here to show that it can produce a suggestive output with very high speed, and thus be useful for other tasks such as object classification, where performance is not necessarily related to visualisation. """ print(__doc__) from time import time import matplotlib.pyplot as plt import numpy as np from scipy.misc import lena from sklearn.decomposition import MiniBatchDictionaryLearning from sklearn.feature_extraction.image import extract_patches_2d from sklearn.feature_extraction.image import reconstruct_from_patches_2d ############################################################################### # Load Lena image and extract patches lena = lena() / 256.0 # downsample for higher speed lena = lena[::2, ::2] + lena[1::2, ::2] + lena[::2, 1::2] + lena[1::2, 1::2] lena /= 4.0 height, width = lena.shape # Distort the right half of the image print('Distorting image...') distorted = lena.copy() distorted[:, height // 2:] += 0.075 * np.random.randn(width, height // 2) # Extract all reference patches from the left half of the image print('Extracting reference patches...') t0 = time() patch_size = (7, 7) data = extract_patches_2d(distorted[:, :height // 2], patch_size) data = data.reshape(data.shape[0], -1) data -= np.mean(data, axis=0) data /= np.std(data, axis=0) print('done in %.2fs.' % (time() - t0)) ############################################################################### # Learn the dictionary from reference patches print('Learning the dictionary...') t0 = time() dico = MiniBatchDictionaryLearning(n_components=100, alpha=1, n_iter=500) V = dico.fit(data).components_ dt = time() - t0 print('done in %.2fs.' % dt) plt.figure(figsize=(4.2, 4)) for i, comp in enumerate(V[:100]): plt.subplot(10, 10, i + 1) plt.imshow(comp.reshape(patch_size), cmap=plt.cm.gray_r, interpolation='nearest') plt.xticks(()) plt.yticks(()) plt.suptitle('Dictionary learned from Lena patches\n' + 'Train time %.1fs on %d patches' % (dt, len(data)), fontsize=16) plt.subplots_adjust(0.08, 0.02, 0.92, 0.85, 0.08, 0.23) ############################################################################### # Display the distorted image def show_with_diff(image, reference, title): """Helper function to display denoising""" plt.figure(figsize=(5, 3.3)) plt.subplot(1, 2, 1) plt.title('Image') plt.imshow(image, vmin=0, vmax=1, cmap=plt.cm.gray, interpolation='nearest') plt.xticks(()) plt.yticks(()) plt.subplot(1, 2, 2) difference = image - reference plt.title('Difference (norm: %.2f)' % np.sqrt(np.sum(difference ** 2))) plt.imshow(difference, vmin=-0.5, vmax=0.5, cmap=plt.cm.PuOr, interpolation='nearest') plt.xticks(()) plt.yticks(()) plt.suptitle(title, size=16) plt.subplots_adjust(0.02, 0.02, 0.98, 0.79, 0.02, 0.2) show_with_diff(distorted, lena, 'Distorted image') ############################################################################### # Extract noisy patches and reconstruct them using the dictionary print('Extracting noisy patches... ') t0 = time() data = extract_patches_2d(distorted[:, height // 2:], patch_size) data = data.reshape(data.shape[0], -1) intercept = np.mean(data, axis=0) data -= intercept print('done in %.2fs.' % (time() - t0)) transform_algorithms = [ ('Orthogonal Matching Pursuit\n1 atom', 'omp', {'transform_n_nonzero_coefs': 1}), ('Orthogonal Matching Pursuit\n2 atoms', 'omp', {'transform_n_nonzero_coefs': 2}), ('Least-angle regression\n5 atoms', 'lars', {'transform_n_nonzero_coefs': 5}), ('Thresholding\n alpha=0.1', 'threshold', {'transform_alpha': .1})] reconstructions = {} for title, transform_algorithm, kwargs in transform_algorithms: print(title + '...') reconstructions[title] = lena.copy() t0 = time() dico.set_params(transform_algorithm=transform_algorithm, **kwargs) code = dico.transform(data) patches = np.dot(code, V) if transform_algorithm == 'threshold': patches -= patches.min() patches /= patches.max() patches += intercept patches = patches.reshape(len(data), *patch_size) if transform_algorithm == 'threshold': patches -= patches.min() patches /= patches.max() reconstructions[title][:, height // 2:] = reconstruct_from_patches_2d( patches, (width, height // 2)) dt = time() - t0 print('done in %.2fs.' % dt) show_with_diff(reconstructions[title], lena, title + ' (time: %.1fs)' % dt) plt.show()
bsd-3-clause
micropython/micropython
tests/misc/sys_settrace_loop.py
8
1144
# test sys.settrace with while and for loops import sys try: sys.settrace except AttributeError: print("SKIP") raise SystemExit def print_stacktrace(frame, level=0): print( "%2d: %s@%s:%s => %s:%d" % ( level, " ", frame.f_globals["__name__"], frame.f_code.co_name, # Keep just the filename. "sys_settrace_" + frame.f_code.co_filename.split("sys_settrace_")[-1], frame.f_lineno, ) ) if frame.f_back: print_stacktrace(frame.f_back, level + 1) trace_count = 0 def trace_tick_handler(frame, event, arg): global trace_count print("### trace_handler::main event:", event) trace_count += 1 print_stacktrace(frame) return trace_tick_handler def test_loop(): # for loop r = 0 for i in range(3): r += i print("test_for_loop", r) # while loop r = 0 i = 0 while i < 3: r += i i += 1 print("test_while_loop", i) sys.settrace(trace_tick_handler) test_loop() sys.settrace(None) print("Total traces executed: ", trace_count)
mit
sbalde/edxplatform
common/djangoapps/student/tests/test_recent_enrollments.py
63
7937
""" Tests for the recently enrolled messaging within the Dashboard. """ import datetime from django.conf import settings from django.core.urlresolvers import reverse from opaque_keys.edx import locator from pytz import UTC import unittest import ddt from shoppingcart.models import DonationConfiguration from student.tests.factories import UserFactory from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase from xmodule.modulestore.tests.factories import CourseFactory from course_modes.tests.factories import CourseModeFactory from student.models import CourseEnrollment, DashboardConfiguration from student.views import get_course_enrollments, _get_recently_enrolled_courses # pylint: disable=protected-access @unittest.skipUnless(settings.ROOT_URLCONF == 'lms.urls', 'Test only valid in lms') @ddt.ddt class TestRecentEnrollments(ModuleStoreTestCase): """ Unit tests for getting the list of courses for a logged in user """ PASSWORD = 'test' def setUp(self): """ Add a student """ super(TestRecentEnrollments, self).setUp() self.student = UserFactory() self.student.set_password(self.PASSWORD) self.student.save() # Old Course old_course_location = locator.CourseLocator('Org0', 'Course0', 'Run0') course, enrollment = self._create_course_and_enrollment(old_course_location) enrollment.created = datetime.datetime(1900, 12, 31, 0, 0, 0, 0) enrollment.save() # New Course course_location = locator.CourseLocator('Org1', 'Course1', 'Run1') self.course, self.enrollment = self._create_course_and_enrollment(course_location) def _create_course_and_enrollment(self, course_location): """ Creates a course and associated enrollment. """ course = CourseFactory.create( org=course_location.org, number=course_location.course, run=course_location.run ) enrollment = CourseEnrollment.enroll(self.student, course.id) return course, enrollment def _configure_message_timeout(self, timeout): """Configure the amount of time the enrollment message will be displayed. """ config = DashboardConfiguration(recent_enrollment_time_delta=timeout) config.save() def test_recently_enrolled_courses(self): """ Test if the function for filtering recent enrollments works appropriately. """ self._configure_message_timeout(60) # get courses through iterating all courses courses_list = list(get_course_enrollments(self.student, None, [])) self.assertEqual(len(courses_list), 2) recent_course_list = _get_recently_enrolled_courses(courses_list) self.assertEqual(len(recent_course_list), 1) def test_zero_second_delta(self): """ Tests that the recent enrollment list is empty if configured to zero seconds. """ self._configure_message_timeout(0) courses_list = list(get_course_enrollments(self.student, None, [])) self.assertEqual(len(courses_list), 2) recent_course_list = _get_recently_enrolled_courses(courses_list) self.assertEqual(len(recent_course_list), 0) def test_enrollments_sorted_most_recent(self): """ Test that the list of newly created courses are properly sorted to show the most recent enrollments first. """ self._configure_message_timeout(600) # Create a number of new enrollments and courses, and force their creation behind # the first enrollment courses = [] for idx, seconds_past in zip(range(2, 6), [5, 10, 15, 20]): course_location = locator.CourseLocator( 'Org{num}'.format(num=idx), 'Course{num}'.format(num=idx), 'Run{num}'.format(num=idx) ) course, enrollment = self._create_course_and_enrollment(course_location) enrollment.created = datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds_past) enrollment.save() courses.append(course) courses_list = list(get_course_enrollments(self.student, None, [])) self.assertEqual(len(courses_list), 6) recent_course_list = _get_recently_enrolled_courses(courses_list) self.assertEqual(len(recent_course_list), 5) self.assertEqual(recent_course_list[1].course.id, courses[0].id) self.assertEqual(recent_course_list[2].course.id, courses[1].id) self.assertEqual(recent_course_list[3].course.id, courses[2].id) self.assertEqual(recent_course_list[4].course.id, courses[3].id) def test_dashboard_rendering(self): """ Tests that the dashboard renders the recent enrollment messages appropriately. """ self._configure_message_timeout(600) self.client.login(username=self.student.username, password=self.PASSWORD) response = self.client.get(reverse("dashboard")) self.assertContains(response, "Thank you for enrolling in") @ddt.data( #Register as an honor in any course modes with no payment option ([('audit', 0), ('honor', 0)], 'honor', True), ([('honor', 0)], 'honor', True), ([], 'honor', True), #Register as an honor in any course modes which has payment option ([('honor', 10)], 'honor', False), # This is a paid course ([('audit', 0), ('honor', 0), ('professional', 20)], 'honor', True), ([('audit', 0), ('honor', 0), ('verified', 20)], 'honor', True), ([('audit', 0), ('honor', 0), ('verified', 20), ('professional', 20)], 'honor', True), ([], 'honor', True), #Register as an audit in any course modes with no payment option ([('audit', 0), ('honor', 0)], 'audit', True), ([('audit', 0)], 'audit', True), #Register as an audit in any course modes which has no payment option ([('audit', 0), ('honor', 0), ('verified', 10)], 'audit', True), #Register as a verified in any course modes which has payment option ([('professional', 20)], 'professional', False), ([('verified', 20)], 'verified', False), ([('professional', 20), ('verified', 20)], 'verified', False), ([('audit', 0), ('honor', 0), ('verified', 20)], 'verified', False) ) @ddt.unpack def test_donate_button(self, course_modes, enrollment_mode, show_donate): # Enable the enrollment success message self._configure_message_timeout(10000) # Enable donations DonationConfiguration(enabled=True).save() # Create the course mode(s) for mode, min_price in course_modes: CourseModeFactory(mode_slug=mode, course_id=self.course.id, min_price=min_price) self.enrollment.mode = enrollment_mode self.enrollment.save() # Check that the donate button is or is not displayed self.client.login(username=self.student.username, password=self.PASSWORD) response = self.client.get(reverse("dashboard")) if show_donate: self.assertContains(response, "donate-container") else: self.assertNotContains(response, "donate-container") def test_donate_button_honor_with_price(self): # Enable the enrollment success message and donations self._configure_message_timeout(10000) DonationConfiguration(enabled=True).save() # Create a white-label course mode # (honor mode with a price set) CourseModeFactory(mode_slug="honor", course_id=self.course.id, min_price=100) # Check that the donate button is NOT displayed self.client.login(username=self.student.username, password=self.PASSWORD) response = self.client.get(reverse("dashboard")) self.assertNotContains(response, "donate-container")
agpl-3.0
Audacity-Team/Audacity
lib-src/lv2/lv2/plugins/eg01-amp.lv2/waflib/Tools/ruby.py
316
3925
#! /usr/bin/env python # encoding: utf-8 # WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file import os from waflib import Task,Options,Utils from waflib.TaskGen import before_method,feature,after_method,Task,extension from waflib.Configure import conf @feature('rubyext') @before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') def init_rubyext(self): self.install_path='${ARCHDIR_RUBY}' self.uselib=self.to_list(getattr(self,'uselib','')) if not'RUBY'in self.uselib: self.uselib.append('RUBY') if not'RUBYEXT'in self.uselib: self.uselib.append('RUBYEXT') @feature('rubyext') @before_method('apply_link','propagate_uselib') def apply_ruby_so_name(self): self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] @conf def check_ruby_version(self,minver=()): if Options.options.rubybinary: self.env.RUBY=Options.options.rubybinary else: self.find_program('ruby',var='RUBY') ruby=self.env.RUBY try: version=self.cmd_and_log([ruby,'-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() except Exception: self.fatal('could not determine ruby version') self.env.RUBY_VERSION=version try: ver=tuple(map(int,version.split("."))) except Exception: self.fatal('unsupported ruby version %r'%version) cver='' if minver: if ver<minver: self.fatal('ruby is too old %r'%ver) cver='.'.join([str(x)for x in minver]) else: cver=ver self.msg('Checking for ruby version %s'%str(minver or''),cver) @conf def check_ruby_ext_devel(self): if not self.env.RUBY: self.fatal('ruby detection is required first') if not self.env.CC_NAME and not self.env.CXX_NAME: self.fatal('load a c/c++ compiler first') version=tuple(map(int,self.env.RUBY_VERSION.split("."))) def read_out(cmd): return Utils.to_list(self.cmd_and_log([self.env.RUBY,'-rrbconfig','-e',cmd])) def read_config(key): return read_out('puts Config::CONFIG[%r]'%key) ruby=self.env['RUBY'] archdir=read_config('archdir') cpppath=archdir if version>=(1,9,0): ruby_hdrdir=read_config('rubyhdrdir') cpppath+=ruby_hdrdir cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file') self.env.LIBPATH_RUBYEXT=read_config('libdir') self.env.LIBPATH_RUBYEXT+=archdir self.env.INCLUDES_RUBYEXT=cpppath self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] flags=read_config('LDSHARED') while flags and flags[0][0]!='-': flags=flags[1:] if len(flags)>1 and flags[1]=="ppc": flags=flags[2:] self.env.LINKFLAGS_RUBYEXT=flags self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') if Options.options.rubyarchdir: self.env.ARCHDIR_RUBY=Options.options.rubyarchdir else: self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] if Options.options.rubylibdir: self.env.LIBDIR_RUBY=Options.options.rubylibdir else: self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] @conf def check_ruby_module(self,module_name): self.start_msg('Ruby module %s'%module_name) try: self.cmd_and_log([self.env['RUBY'],'-e','require \'%s\';puts 1'%module_name]) except Exception: self.end_msg(False) self.fatal('Could not find the ruby module %r'%module_name) self.end_msg(True) @extension('.rb') def process(self,node): tsk=self.create_task('run_ruby',node) class run_ruby(Task.Task): run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' def options(opt): opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary')
mit
openiitbombayx/edx-platform
lms/djangoapps/courseware/features/openended.py
119
2996
# pylint: disable=missing-docstring # pylint: disable=redefined-outer-name from lettuce import world, step from lettuce.django import django_url from nose.tools import assert_equals, assert_in # pylint: disable=no-name-in-module from logging import getLogger logger = getLogger(__name__) @step('I navigate to an openended question$') def navigate_to_an_openended_question(step): world.register_by_course_key('MITx/3.091x/2012_Fall') world.log_in(email='[email protected]', password='test') problem = '/courses/MITx/3.091x/2012_Fall/courseware/Week_10/Polymer_Synthesis/' world.browser.visit(django_url(problem)) tab_css = 'ol#sequence-list > li > a[data-element="5"]' world.css_click(tab_css) @step('I navigate to an openended question as staff$') def navigate_to_an_openended_question_as_staff(step): world.register_by_course_key('MITx/3.091x/2012_Fall', True) world.log_in(email='[email protected]', password='test') problem = '/courses/MITx/3.091x/2012_Fall/courseware/Week_10/Polymer_Synthesis/' world.browser.visit(django_url(problem)) tab_css = 'ol#sequence-list > li > a[data-element="5"]' world.css_click(tab_css) @step(u'I enter the answer "([^"]*)"$') def enter_the_answer_text(step, text): world.css_fill('textarea', text) @step(u'I submit the answer "([^"]*)"$') def i_submit_the_answer_text(step, text): world.css_fill('textarea', text) world.css_click('input.check') @step('I click the link for full output$') def click_full_output_link(step): world.css_click('a.full') @step(u'I visit the staff grading page$') def i_visit_the_staff_grading_page(step): world.click_link('Instructor') world.click_link('Staff grading') @step(u'I see the grader message "([^"]*)"$') def see_grader_message(step, msg): message_css = 'div.external-grader-message' assert_in(msg, world.css_text(message_css)) @step(u'I see the grader status "([^"]*)"$') def see_the_grader_status(step, status): status_css = 'div.grader-status' assert_equals(status, world.css_text(status_css)) @step('I see the red X$') def see_the_red_x(step): assert world.is_css_present('div.grader-status > span.incorrect') @step(u'I see the grader score "([^"]*)"$') def see_the_grader_score(step, score): score_css = 'div.result-output > p' score_text = world.css_text(score_css) assert_equals(score_text, 'Score: %s' % score) @step('I see the link for full output$') def see_full_output_link(step): assert world.is_css_present('a.full') @step('I see the spelling grading message "([^"]*)"$') def see_spelling_msg(step, msg): spelling_msg = world.css_text('div.spelling') assert_equals('Spelling: %s' % msg, spelling_msg) @step(u'my answer is queued for instructor grading$') def answer_is_queued_for_instructor_grading(step): list_css = 'ul.problem-list > li > a' actual_msg = world.css_text(list_css) expected_msg = "(0 graded, 1 pending)" assert_in(expected_msg, actual_msg)
agpl-3.0
waytai/odoo
addons/web/controllers/main.py
46
66013
# -*- coding: utf-8 -*- import ast import base64 import csv import functools import glob import itertools import jinja2 import logging import operator import datetime import hashlib import os import re import simplejson import sys import time import urllib2 import zlib from xml.etree import ElementTree from cStringIO import StringIO import babel.messages.pofile import werkzeug.utils import werkzeug.wrappers try: import xlwt except ImportError: xlwt = None import openerp import openerp.modules.registry from openerp.addons.base.ir.ir_qweb import AssetsBundle, QWebTemplateNotFound from openerp.modules import get_module_resource from openerp.service import model as service_model from openerp.tools import topological_sort from openerp.tools.translate import _ from openerp.tools import ustr from openerp import http from openerp.http import request, serialize_exception as _serialize_exception _logger = logging.getLogger(__name__) if hasattr(sys, 'frozen'): # When running on compiled windows binary, we don't have access to package loader. path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'views')) loader = jinja2.FileSystemLoader(path) else: loader = jinja2.PackageLoader('openerp.addons.web', "views") env = jinja2.Environment(loader=loader, autoescape=True) env.filters["json"] = simplejson.dumps # 1 week cache for asset bundles as advised by Google Page Speed BUNDLE_MAXAGE = 60 * 60 * 24 * 7 #---------------------------------------------------------- # OpenERP Web helpers #---------------------------------------------------------- db_list = http.db_list db_monodb = http.db_monodb def serialize_exception(f): @functools.wraps(f) def wrap(*args, **kwargs): try: return f(*args, **kwargs) except Exception, e: _logger.exception("An exception occured during an http request") se = _serialize_exception(e) error = { 'code': 200, 'message': "Odoo Server Error", 'data': se } return werkzeug.exceptions.InternalServerError(simplejson.dumps(error)) return wrap def redirect_with_hash(*args, **kw): """ .. deprecated:: 8.0 Use the ``http.redirect_with_hash()`` function instead. """ return http.redirect_with_hash(*args, **kw) def abort_and_redirect(url): r = request.httprequest response = werkzeug.utils.redirect(url, 302) response = r.app.get_response(r, response, explicit_session=False) werkzeug.exceptions.abort(response) def ensure_db(redirect='/web/database/selector'): # This helper should be used in web client auth="none" routes # if those routes needs a db to work with. # If the heuristics does not find any database, then the users will be # redirected to db selector or any url specified by `redirect` argument. # If the db is taken out of a query parameter, it will be checked against # `http.db_filter()` in order to ensure it's legit and thus avoid db # forgering that could lead to xss attacks. db = request.params.get('db') # Ensure db is legit if db and db not in http.db_filter([db]): db = None if db and not request.session.db: # User asked a specific database on a new session. # That mean the nodb router has been used to find the route # Depending on installed module in the database, the rendering of the page # may depend on data injected by the database route dispatcher. # Thus, we redirect the user to the same page but with the session cookie set. # This will force using the database route dispatcher... r = request.httprequest url_redirect = r.base_url if r.query_string: # Can't use werkzeug.wrappers.BaseRequest.url with encoded hashes: # https://github.com/amigrave/werkzeug/commit/b4a62433f2f7678c234cdcac6247a869f90a7eb7 url_redirect += '?' + r.query_string response = werkzeug.utils.redirect(url_redirect, 302) request.session.db = db abort_and_redirect(url_redirect) # if db not provided, use the session one if not db and request.session.db and http.db_filter([request.session.db]): db = request.session.db # if no database provided and no database in session, use monodb if not db: db = db_monodb(request.httprequest) # if no db can be found til here, send to the database selector # the database selector will redirect to database manager if needed if not db: werkzeug.exceptions.abort(werkzeug.utils.redirect(redirect, 303)) # always switch the session to the computed db if db != request.session.db: request.session.logout() abort_and_redirect(request.httprequest.url) request.session.db = db def module_installed(): # Candidates module the current heuristic is the /static dir loadable = http.addons_manifest.keys() modules = {} # Retrieve database installed modules # TODO The following code should move to ir.module.module.list_installed_modules() Modules = request.session.model('ir.module.module') domain = [('state','=','installed'), ('name','in', loadable)] for module in Modules.search_read(domain, ['name', 'dependencies_id']): modules[module['name']] = [] deps = module.get('dependencies_id') if deps: deps_read = request.session.model('ir.module.module.dependency').read(deps, ['name']) dependencies = [i['name'] for i in deps_read] modules[module['name']] = dependencies sorted_modules = topological_sort(modules) return sorted_modules def module_installed_bypass_session(dbname): loadable = http.addons_manifest.keys() modules = {} try: registry = openerp.modules.registry.RegistryManager.get(dbname) with registry.cursor() as cr: m = registry.get('ir.module.module') # TODO The following code should move to ir.module.module.list_installed_modules() domain = [('state','=','installed'), ('name','in', loadable)] ids = m.search(cr, 1, [('state','=','installed'), ('name','in', loadable)]) for module in m.read(cr, 1, ids, ['name', 'dependencies_id']): modules[module['name']] = [] deps = module.get('dependencies_id') if deps: deps_read = registry.get('ir.module.module.dependency').read(cr, 1, deps, ['name']) dependencies = [i['name'] for i in deps_read] modules[module['name']] = dependencies except Exception,e: pass sorted_modules = topological_sort(modules) return sorted_modules def module_boot(db=None): server_wide_modules = openerp.conf.server_wide_modules or ['web'] serverside = [] dbside = [] for i in server_wide_modules: if i in http.addons_manifest: serverside.append(i) monodb = db or db_monodb() if monodb: dbside = module_installed_bypass_session(monodb) dbside = [i for i in dbside if i not in serverside] addons = serverside + dbside return addons def concat_xml(file_list): """Concatenate xml files :param list(str) file_list: list of files to check :returns: (concatenation_result, checksum) :rtype: (str, str) """ checksum = hashlib.new('sha1') if not file_list: return '', checksum.hexdigest() root = None for fname in file_list: with open(fname, 'rb') as fp: contents = fp.read() checksum.update(contents) fp.seek(0) xml = ElementTree.parse(fp).getroot() if root is None: root = ElementTree.Element(xml.tag) #elif root.tag != xml.tag: # raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag)) for child in xml.getchildren(): root.append(child) return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest() def fs2web(path): """convert FS path into web path""" return '/'.join(path.split(os.path.sep)) def manifest_glob(extension, addons=None, db=None, include_remotes=False): if addons is None: addons = module_boot(db=db) else: addons = addons.split(',') r = [] for addon in addons: manifest = http.addons_manifest.get(addon, None) if not manifest: continue # ensure does not ends with / addons_path = os.path.join(manifest['addons_path'], '')[:-1] globlist = manifest.get(extension, []) for pattern in globlist: if pattern.startswith(('http://', 'https://', '//')): if include_remotes: r.append((None, pattern)) else: for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))): r.append((path, fs2web(path[len(addons_path):]))) return r def manifest_list(extension, mods=None, db=None, debug=None): """ list ressources to load specifying either: mods: a comma separated string listing modules db: a database name (return all installed modules in that database) """ if debug is not None: _logger.warning("openerp.addons.web.main.manifest_list(): debug parameter is deprecated") files = manifest_glob(extension, addons=mods, db=db, include_remotes=True) return [wp for _fp, wp in files] def get_last_modified(files): """ Returns the modification time of the most recently modified file provided :param list(str) files: names of files to check :return: most recent modification time amongst the fileset :rtype: datetime.datetime """ files = list(files) if files: return max(datetime.datetime.fromtimestamp(os.path.getmtime(f)) for f in files) return datetime.datetime(1970, 1, 1) def make_conditional(response, last_modified=None, etag=None, max_age=0): """ Makes the provided response conditional based upon the request, and mandates revalidation from clients Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after setting ``last_modified`` and ``etag`` correctly on the response object :param response: Werkzeug response :type response: werkzeug.wrappers.Response :param datetime.datetime last_modified: last modification date of the response content :param str etag: some sort of checksum of the content (deep etag) :return: the response object provided :rtype: werkzeug.wrappers.Response """ response.cache_control.must_revalidate = True response.cache_control.max_age = max_age if last_modified: response.last_modified = last_modified if etag: response.set_etag(etag) return response.make_conditional(request.httprequest) def login_and_redirect(db, login, key, redirect_url='/web'): request.session.authenticate(db, login, key) return set_cookie_and_redirect(redirect_url) def set_cookie_and_redirect(redirect_url): redirect = werkzeug.utils.redirect(redirect_url, 303) redirect.autocorrect_location_header = False return redirect def login_redirect(): url = '/web/login?' # built the redirect url, keeping all the query parameters of the url redirect_url = '%s?%s' % (request.httprequest.base_url, werkzeug.urls.url_encode(request.params)) return """<html><head><script> window.location = '%sredirect=' + encodeURIComponent("%s" + location.hash); </script></head></html> """ % (url, redirect_url) def load_actions_from_ir_values(key, key2, models, meta): Values = request.session.model('ir.values') actions = Values.get(key, key2, models, meta, request.context) return [(id, name, clean_action(action)) for id, name, action in actions] def clean_action(action): action.setdefault('flags', {}) action_type = action.setdefault('type', 'ir.actions.act_window_close') if action_type == 'ir.actions.act_window': return fix_view_modes(action) return action # I think generate_views,fix_view_modes should go into js ActionManager def generate_views(action): """ While the server generates a sequence called "views" computing dependencies between a bunch of stuff for views coming directly from the database (the ``ir.actions.act_window model``), it's also possible for e.g. buttons to return custom view dictionaries generated on the fly. In that case, there is no ``views`` key available on the action. Since the web client relies on ``action['views']``, generate it here from ``view_mode`` and ``view_id``. Currently handles two different cases: * no view_id, multiple view_mode * single view_id, single view_mode :param dict action: action descriptor dictionary to generate a views key for """ view_id = action.get('view_id') or False if isinstance(view_id, (list, tuple)): view_id = view_id[0] # providing at least one view mode is a requirement, not an option view_modes = action['view_mode'].split(',') if len(view_modes) > 1: if view_id: raise ValueError('Non-db action dictionaries should provide ' 'either multiple view modes or a single view ' 'mode and an optional view id.\n\n Got view ' 'modes %r and view id %r for action %r' % ( view_modes, view_id, action)) action['views'] = [(False, mode) for mode in view_modes] return action['views'] = [(view_id, view_modes[0])] def fix_view_modes(action): """ For historical reasons, OpenERP has weird dealings in relation to view_mode and the view_type attribute (on window actions): * one of the view modes is ``tree``, which stands for both list views and tree views * the choice is made by checking ``view_type``, which is either ``form`` for a list view or ``tree`` for an actual tree view This methods simply folds the view_type into view_mode by adding a new view mode ``list`` which is the result of the ``tree`` view_mode in conjunction with the ``form`` view_type. TODO: this should go into the doc, some kind of "peculiarities" section :param dict action: an action descriptor :returns: nothing, the action is modified in place """ if not action.get('views'): generate_views(action) if action.pop('view_type', 'form') != 'form': return action if 'view_mode' in action: action['view_mode'] = ','.join( mode if mode != 'tree' else 'list' for mode in action['view_mode'].split(',')) action['views'] = [ [id, mode if mode != 'tree' else 'list'] for id, mode in action['views'] ] return action def _local_web_translations(trans_file): messages = [] try: with open(trans_file) as t_file: po = babel.messages.pofile.read_po(t_file) except Exception: return for x in po: if x.id and x.string and "openerp-web" in x.auto_comments: messages.append({'id': x.id, 'string': x.string}) return messages def xml2json_from_elementtree(el, preserve_whitespaces=False): """ xml2json-direct Simple and straightforward XML-to-JSON converter in Python New BSD Licensed http://code.google.com/p/xml2json-direct/ """ res = {} if el.tag[0] == "{": ns, name = el.tag.rsplit("}", 1) res["tag"] = name res["namespace"] = ns[1:] else: res["tag"] = el.tag res["attrs"] = {} for k, v in el.items(): res["attrs"][k] = v kids = [] if el.text and (preserve_whitespaces or el.text.strip() != ''): kids.append(el.text) for kid in el: kids.append(xml2json_from_elementtree(kid, preserve_whitespaces)) if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''): kids.append(kid.tail) res["children"] = kids return res def content_disposition(filename): filename = ustr(filename) escaped = urllib2.quote(filename.encode('utf8')) browser = request.httprequest.user_agent.browser version = int((request.httprequest.user_agent.version or '0').split('.')[0]) if browser == 'msie' and version < 9: return "attachment; filename=%s" % escaped elif browser == 'safari': return u"attachment; filename=%s" % filename else: return "attachment; filename*=UTF-8''%s" % escaped #---------------------------------------------------------- # OpenERP Web web Controllers #---------------------------------------------------------- class Home(http.Controller): @http.route('/', type='http', auth="none") def index(self, s_action=None, db=None, **kw): return http.local_redirect('/web', query=request.params, keep_hash=True) @http.route('/web', type='http', auth="none") def web_client(self, s_action=None, **kw): ensure_db() if request.session.uid: if kw.get('redirect'): return werkzeug.utils.redirect(kw.get('redirect'), 303) if not request.uid: request.uid = request.session.uid menu_data = request.registry['ir.ui.menu'].load_menus(request.cr, request.uid, context=request.context) return request.render('web.webclient_bootstrap', qcontext={'menu_data': menu_data}) else: return login_redirect() @http.route('/web/dbredirect', type='http', auth="none") def web_db_redirect(self, redirect='/', **kw): ensure_db() return werkzeug.utils.redirect(redirect, 303) @http.route('/web/login', type='http', auth="none") def web_login(self, redirect=None, **kw): ensure_db() if request.httprequest.method == 'GET' and redirect and request.session.uid: return http.redirect_with_hash(redirect) if not request.uid: request.uid = openerp.SUPERUSER_ID values = request.params.copy() if not redirect: redirect = '/web?' + request.httprequest.query_string values['redirect'] = redirect try: values['databases'] = http.db_list() except openerp.exceptions.AccessDenied: values['databases'] = None if request.httprequest.method == 'POST': old_uid = request.uid uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password']) if uid is not False: return http.redirect_with_hash(redirect) request.uid = old_uid values['error'] = "Wrong login/password" if request.env.ref('web.login', False): return request.render('web.login', values) else: # probably not an odoo compatible database error = 'Unable to login on database %s' % request.session.db return werkzeug.utils.redirect('/web/database/selector?error=%s' % error, 303) @http.route('/login', type='http', auth="none") def login(self, db, login, key, redirect="/web", **kw): if not http.db_filter([db]): return werkzeug.utils.redirect('/', 303) return login_and_redirect(db, login, key, redirect_url=redirect) @http.route([ '/web/js/<xmlid>', '/web/js/<xmlid>/<version>', ], type='http', auth='public') def js_bundle(self, xmlid, version=None, **kw): try: bundle = AssetsBundle(xmlid) except QWebTemplateNotFound: return request.not_found() response = request.make_response(bundle.js(), [('Content-Type', 'application/javascript')]) return make_conditional(response, bundle.last_modified, max_age=BUNDLE_MAXAGE) @http.route([ '/web/css/<xmlid>', '/web/css/<xmlid>/<version>', '/web/css.<int:page>/<xmlid>/<version>', ], type='http', auth='public') def css_bundle(self, xmlid, version=None, page=None, **kw): try: bundle = AssetsBundle(xmlid) except QWebTemplateNotFound: return request.not_found() response = request.make_response(bundle.css(page), [('Content-Type', 'text/css')]) return make_conditional(response, bundle.last_modified, max_age=BUNDLE_MAXAGE) class WebClient(http.Controller): @http.route('/web/webclient/csslist', type='json', auth="none") def csslist(self, mods=None): return manifest_list('css', mods=mods) @http.route('/web/webclient/jslist', type='json', auth="none") def jslist(self, mods=None): return manifest_list('js', mods=mods) @http.route('/web/webclient/qweb', type='http', auth="none") def qweb(self, mods=None, db=None): files = [f[0] for f in manifest_glob('qweb', addons=mods, db=db)] last_modified = get_last_modified(files) if request.httprequest.if_modified_since and request.httprequest.if_modified_since >= last_modified: return werkzeug.wrappers.Response(status=304) content, checksum = concat_xml(files) return make_conditional( request.make_response(content, [('Content-Type', 'text/xml')]), last_modified, checksum) @http.route('/web/webclient/bootstrap_translations', type='json', auth="none") def bootstrap_translations(self, mods): """ Load local translations from *.po files, as a temporary solution until we have established a valid session. This is meant only for translating the login page and db management chrome, using the browser's language. """ # For performance reasons we only load a single translation, so for # sub-languages (that should only be partially translated) we load the # main language PO instead - that should be enough for the login screen. lang = request.lang.split('_')[0] translations_per_module = {} for addon_name in mods: if http.addons_manifest[addon_name].get('bootstrap'): addons_path = http.addons_manifest[addon_name]['addons_path'] f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po") if not os.path.exists(f_name): continue translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)} return {"modules": translations_per_module, "lang_parameters": None} @http.route('/web/webclient/translations', type='json', auth="none") def translations(self, mods=None, lang=None): request.disable_db = False uid = openerp.SUPERUSER_ID if mods is None: m = request.registry.get('ir.module.module') mods = [x['name'] for x in m.search_read(request.cr, uid, [('state','=','installed')], ['name'])] if lang is None: lang = request.context["lang"] res_lang = request.registry.get('res.lang') ids = res_lang.search(request.cr, uid, [("code", "=", lang)]) lang_params = None if ids: lang_params = res_lang.read(request.cr, uid, ids[0], ["direction", "date_format", "time_format", "grouping", "decimal_point", "thousands_sep"]) # Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is # done server-side when the language is loaded, so we only need to load the user's lang. ir_translation = request.registry.get('ir.translation') translations_per_module = {} messages = ir_translation.search_read(request.cr, uid, [('module','in',mods),('lang','=',lang), ('comments','like','openerp-web'),('value','!=',False), ('value','!=','')], ['module','src','value','lang'], order='module') for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')): translations_per_module.setdefault(mod,{'messages':[]}) translations_per_module[mod]['messages'].extend({'id': m['src'], 'string': m['value']} \ for m in msg_group) return {"modules": translations_per_module, "lang_parameters": lang_params} @http.route('/web/webclient/version_info', type='json', auth="none") def version_info(self): return openerp.service.common.exp_version() @http.route('/web/tests', type='http', auth="none") def index(self, mod=None, **kwargs): return request.render('web.qunit_suite') class Proxy(http.Controller): @http.route('/web/proxy/load', type='json', auth="none") def load(self, path): """ Proxies an HTTP request through a JSON request. It is strongly recommended to not request binary files through this, as the result will be a binary data blob as well. :param path: actual request path :return: file content """ from werkzeug.test import Client from werkzeug.wrappers import BaseResponse base_url = request.httprequest.base_url return Client(request.httprequest.app, BaseResponse).get(path, base_url=base_url).data class Database(http.Controller): @http.route('/web/database/selector', type='http', auth="none") def selector(self, **kw): try: dbs = http.db_list() if not dbs: return http.local_redirect('/web/database/manager') except openerp.exceptions.AccessDenied: dbs = False return env.get_template("database_selector.html").render({ 'databases': dbs, 'debug': request.debug, 'error': kw.get('error') }) @http.route('/web/database/manager', type='http', auth="none") def manager(self, **kw): # TODO: migrate the webclient's database manager to server side views request.session.logout() return env.get_template("database_manager.html").render({ 'modules': simplejson.dumps(module_boot()), }) @http.route('/web/database/get_list', type='json', auth="none") def get_list(self): # TODO change js to avoid calling this method if in monodb mode try: return http.db_list() except openerp.exceptions.AccessDenied: monodb = db_monodb() if monodb: return [monodb] raise @http.route('/web/database/create', type='json', auth="none") def create(self, fields): params = dict(map(operator.itemgetter('name', 'value'), fields)) db_created = request.session.proxy("db").create_database( params['super_admin_pwd'], params['db_name'], bool(params.get('demo_data')), params['db_lang'], params['create_admin_pwd']) if db_created: request.session.authenticate(params['db_name'], 'admin', params['create_admin_pwd']) return db_created @http.route('/web/database/duplicate', type='json', auth="none") def duplicate(self, fields): params = dict(map(operator.itemgetter('name', 'value'), fields)) duplicate_attrs = ( params['super_admin_pwd'], params['db_original_name'], params['db_name'], ) return request.session.proxy("db").duplicate_database(*duplicate_attrs) @http.route('/web/database/drop', type='json', auth="none") def drop(self, fields): password, db = operator.itemgetter( 'drop_pwd', 'drop_db')( dict(map(operator.itemgetter('name', 'value'), fields))) try: if request.session.proxy("db").drop(password, db): return True else: return False except openerp.exceptions.AccessDenied: return {'error': 'AccessDenied', 'title': 'Drop Database'} except Exception: return {'error': _('Could not drop database !'), 'title': _('Drop Database')} @http.route('/web/database/backup', type='http', auth="none") def backup(self, backup_db, backup_pwd, token, backup_format='zip'): try: openerp.service.security.check_super(backup_pwd) ts = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S") filename = "%s_%s.%s" % (backup_db, ts, backup_format) headers = [ ('Content-Type', 'application/octet-stream; charset=binary'), ('Content-Disposition', content_disposition(filename)), ] dump_stream = openerp.service.db.dump_db(backup_db, None, backup_format) response = werkzeug.wrappers.Response(dump_stream, headers=headers, direct_passthrough=True) response.set_cookie('fileToken', token) return response except Exception, e: _logger.exception('Database.backup') return simplejson.dumps([[],[{'error': openerp.tools.ustr(e), 'title': _('Backup Database')}]]) @http.route('/web/database/restore', type='http', auth="none") def restore(self, db_file, restore_pwd, new_db, mode): try: copy = mode == 'copy' data = base64.b64encode(db_file.read()) request.session.proxy("db").restore(restore_pwd, new_db, data, copy) return '' except openerp.exceptions.AccessDenied, e: raise Exception("AccessDenied") @http.route('/web/database/change_password', type='json', auth="none") def change_password(self, fields): old_password, new_password = operator.itemgetter( 'old_pwd', 'new_pwd')( dict(map(operator.itemgetter('name', 'value'), fields))) try: return request.session.proxy("db").change_admin_password(old_password, new_password) except openerp.exceptions.AccessDenied: return {'error': 'AccessDenied', 'title': _('Change Password')} except Exception: return {'error': _('Error, password not changed !'), 'title': _('Change Password')} class Session(http.Controller): def session_info(self): request.session.ensure_valid() return { "session_id": request.session_id, "uid": request.session.uid, "user_context": request.session.get_context() if request.session.uid else {}, "db": request.session.db, "username": request.session.login, "company_id": request.env.user.company_id.id if request.session.uid else None, } @http.route('/web/session/get_session_info', type='json', auth="none") def get_session_info(self): request.uid = request.session.uid request.disable_db = False return self.session_info() @http.route('/web/session/authenticate', type='json', auth="none") def authenticate(self, db, login, password, base_location=None): request.session.authenticate(db, login, password) return self.session_info() @http.route('/web/session/change_password', type='json', auth="user") def change_password(self, fields): old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')( dict(map(operator.itemgetter('name', 'value'), fields))) if not (old_password.strip() and new_password.strip() and confirm_password.strip()): return {'error':_('You cannot leave any password empty.'),'title': _('Change Password')} if new_password != confirm_password: return {'error': _('The new password and its confirmation must be identical.'),'title': _('Change Password')} try: if request.session.model('res.users').change_password( old_password, new_password): return {'new_password':new_password} except Exception: return {'error': _('The old password you provided is incorrect, your password was not changed.'), 'title': _('Change Password')} return {'error': _('Error, password not changed !'), 'title': _('Change Password')} @http.route('/web/session/get_lang_list', type='json', auth="none") def get_lang_list(self): try: return request.session.proxy("db").list_lang() or [] except Exception, e: return {"error": e, "title": _("Languages")} @http.route('/web/session/modules', type='json', auth="user") def modules(self): # return all installed modules. Web client is smart enough to not load a module twice return module_installed() @http.route('/web/session/save_session_action', type='json', auth="user") def save_session_action(self, the_action): """ This method store an action object in the session object and returns an integer identifying that action. The method get_session_action() can be used to get back the action. :param the_action: The action to save in the session. :type the_action: anything :return: A key identifying the saved action. :rtype: integer """ return request.httpsession.save_action(the_action) @http.route('/web/session/get_session_action', type='json', auth="user") def get_session_action(self, key): """ Gets back a previously saved action. This method can return None if the action was saved since too much time (this case should be handled in a smart way). :param key: The key given by save_session_action() :type key: integer :return: The saved action or None. :rtype: anything """ return request.httpsession.get_action(key) @http.route('/web/session/check', type='json', auth="user") def check(self): request.session.assert_valid() return None @http.route('/web/session/destroy', type='json', auth="user") def destroy(self): request.session.logout() @http.route('/web/session/logout', type='http', auth="none") def logout(self, redirect='/web'): request.session.logout(keep_db=True) return werkzeug.utils.redirect(redirect, 303) class Menu(http.Controller): @http.route('/web/menu/load_needaction', type='json', auth="user") def load_needaction(self, menu_ids): """ Loads needaction counters for specific menu ids. :return: needaction data :rtype: dict(menu_id: {'needaction_enabled': boolean, 'needaction_counter': int}) """ return request.session.model('ir.ui.menu').get_needaction_data(menu_ids, request.context) class DataSet(http.Controller): @http.route('/web/dataset/search_read', type='json', auth="user") def search_read(self, model, fields=False, offset=0, limit=False, domain=None, sort=None): return self.do_search_read(model, fields, offset, limit, domain, sort) def do_search_read(self, model, fields=False, offset=0, limit=False, domain=None , sort=None): """ Performs a search() followed by a read() (if needed) using the provided search criteria :param str model: the name of the model to search on :param fields: a list of the fields to return in the result records :type fields: [str] :param int offset: from which index should the results start being returned :param int limit: the maximum number of records to return :param list domain: the search domain for the query :param list sort: sorting directives :returns: A structure (dict) with two keys: ids (all the ids matching the (domain, context) pair) and records (paginated records matching fields selection set) :rtype: list """ Model = request.session.model(model) records = Model.search_read(domain, fields, offset or 0, limit or False, sort or False, request.context) if not records: return { 'length': 0, 'records': [] } if limit and len(records) == limit: length = Model.search_count(domain, request.context) else: length = len(records) + (offset or 0) return { 'length': length, 'records': records } @http.route('/web/dataset/load', type='json', auth="user") def load(self, model, id, fields): m = request.session.model(model) value = {} r = m.read([id], False, request.context) if r: value = r[0] return {'value': value} def call_common(self, model, method, args, domain_id=None, context_id=None): return self._call_kw(model, method, args, {}) def _call_kw(self, model, method, args, kwargs): if method.startswith('_'): raise Exception("Access Denied: Underscore prefixed methods cannot be remotely called") @service_model.check def checked_call(__dbname, *args, **kwargs): return getattr(request.registry.get(model), method)(request.cr, request.uid, *args, **kwargs) return checked_call(request.db, *args, **kwargs) @http.route('/web/dataset/call', type='json', auth="user") def call(self, model, method, args, domain_id=None, context_id=None): return self._call_kw(model, method, args, {}) @http.route(['/web/dataset/call_kw', '/web/dataset/call_kw/<path:path>'], type='json', auth="user") def call_kw(self, model, method, args, kwargs, path=None): return self._call_kw(model, method, args, kwargs) @http.route('/web/dataset/call_button', type='json', auth="user") def call_button(self, model, method, args, domain_id=None, context_id=None): action = self._call_kw(model, method, args, {}) if isinstance(action, dict) and action.get('type') != '': return clean_action(action) return False @http.route('/web/dataset/exec_workflow', type='json', auth="user") def exec_workflow(self, model, id, signal): return request.session.exec_workflow(model, id, signal) @http.route('/web/dataset/resequence', type='json', auth="user") def resequence(self, model, ids, field='sequence', offset=0): """ Re-sequences a number of records in the model, by their ids The re-sequencing starts at the first model of ``ids``, the sequence number is incremented by one after each record and starts at ``offset`` :param ids: identifiers of the records to resequence, in the new sequence order :type ids: list(id) :param str field: field used for sequence specification, defaults to "sequence" :param int offset: sequence number for first record in ``ids``, allows starting the resequencing from an arbitrary number, defaults to ``0`` """ m = request.session.model(model) if not m.fields_get([field]): return False # python 2.6 has no start parameter for i, id in enumerate(ids): m.write(id, { field: i + offset }) return True class View(http.Controller): @http.route('/web/view/add_custom', type='json', auth="user") def add_custom(self, view_id, arch): CustomView = request.session.model('ir.ui.view.custom') CustomView.create({ 'user_id': request.session.uid, 'ref_id': view_id, 'arch': arch }, request.context) return {'result': True} @http.route('/web/view/undo_custom', type='json', auth="user") def undo_custom(self, view_id, reset=False): CustomView = request.session.model('ir.ui.view.custom') vcustom = CustomView.search([('user_id', '=', request.session.uid), ('ref_id' ,'=', view_id)], 0, False, False, request.context) if vcustom: if reset: CustomView.unlink(vcustom, request.context) else: CustomView.unlink([vcustom[0]], request.context) return {'result': True} return {'result': False} class TreeView(View): @http.route('/web/treeview/action', type='json', auth="user") def action(self, model, id): return load_actions_from_ir_values( 'action', 'tree_but_open',[(model, id)], False) class Binary(http.Controller): @http.route('/web/binary/image', type='http', auth="public") def image(self, model, id, field, **kw): last_update = '__last_update' Model = request.registry[model] cr, uid, context = request.cr, request.uid, request.context headers = [('Content-Type', 'image/png')] etag = request.httprequest.headers.get('If-None-Match') hashed_session = hashlib.md5(request.session_id).hexdigest() retag = hashed_session id = None if not id else simplejson.loads(id) if type(id) is list: id = id[0] # m2o try: if etag: if not id and hashed_session == etag: return werkzeug.wrappers.Response(status=304) else: date = Model.read(cr, uid, [id], [last_update], context)[0].get(last_update) if hashlib.md5(date).hexdigest() == etag: return werkzeug.wrappers.Response(status=304) if not id: res = Model.default_get(cr, uid, [field], context).get(field) image_base64 = res else: res = Model.read(cr, uid, [id], [last_update, field], context)[0] retag = hashlib.md5(res.get(last_update)).hexdigest() image_base64 = res.get(field) if kw.get('resize'): resize = kw.get('resize').split(',') if len(resize) == 2 and int(resize[0]) and int(resize[1]): width = int(resize[0]) height = int(resize[1]) # resize maximum 500*500 if width > 500: width = 500 if height > 500: height = 500 image_base64 = openerp.tools.image_resize_image(base64_source=image_base64, size=(width, height), encoding='base64', filetype='PNG') image_data = base64.b64decode(image_base64) except Exception: image_data = self.placeholder() headers.append(('ETag', retag)) headers.append(('Content-Length', len(image_data))) try: ncache = int(kw.get('cache')) headers.append(('Cache-Control', 'no-cache' if ncache == 0 else 'max-age=%s' % (ncache))) except: pass return request.make_response(image_data, headers) def placeholder(self, image='placeholder.png'): addons_path = http.addons_manifest['web']['addons_path'] return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', image), 'rb').read() @http.route('/web/binary/saveas', type='http', auth="public") @serialize_exception def saveas(self, model, field, id=None, filename_field=None, **kw): """ Download link for files stored as binary fields. If the ``id`` parameter is omitted, fetches the default value for the binary field (via ``default_get``), otherwise fetches the field for that precise record. :param str model: name of the model to fetch the binary from :param str field: binary field :param str id: id of the record from which to fetch the binary :param str filename_field: field holding the file's name, if any :returns: :class:`werkzeug.wrappers.Response` """ Model = request.registry[model] cr, uid, context = request.cr, request.uid, request.context fields = [field] if filename_field: fields.append(filename_field) if id: res = Model.read(cr, uid, [int(id)], fields, context)[0] else: res = Model.default_get(cr, uid, fields, context) filecontent = base64.b64decode(res.get(field) or '') if not filecontent: return request.not_found() else: filename = '%s_%s' % (model.replace('.', '_'), id) if filename_field: filename = res.get(filename_field, '') or filename return request.make_response(filecontent, [('Content-Type', 'application/octet-stream'), ('Content-Disposition', content_disposition(filename))]) @http.route('/web/binary/saveas_ajax', type='http', auth="public") @serialize_exception def saveas_ajax(self, data, token): jdata = simplejson.loads(data) model = jdata['model'] field = jdata['field'] data = jdata['data'] id = jdata.get('id', None) filename_field = jdata.get('filename_field', None) context = jdata.get('context', {}) Model = request.session.model(model) fields = [field] if filename_field: fields.append(filename_field) if data: res = {field: data, filename_field: jdata.get('filename', None)} elif id: res = Model.read([int(id)], fields, context)[0] else: res = Model.default_get(fields, context) filecontent = base64.b64decode(res.get(field) or '') if not filecontent: raise ValueError(_("No content found for field '%s' on '%s:%s'") % (field, model, id)) else: filename = '%s_%s' % (model.replace('.', '_'), id) if filename_field: filename = res.get(filename_field, '') or filename return request.make_response(filecontent, headers=[('Content-Type', 'application/octet-stream'), ('Content-Disposition', content_disposition(filename))], cookies={'fileToken': token}) @http.route('/web/binary/upload', type='http', auth="user") @serialize_exception def upload(self, callback, ufile): # TODO: might be useful to have a configuration flag for max-length file uploads out = """<script language="javascript" type="text/javascript"> var win = window.top.window; win.jQuery(win).trigger(%s, %s); </script>""" try: data = ufile.read() args = [len(data), ufile.filename, ufile.content_type, base64.b64encode(data)] except Exception, e: args = [False, e.message] return out % (simplejson.dumps(callback), simplejson.dumps(args)) @http.route('/web/binary/upload_attachment', type='http', auth="user") @serialize_exception def upload_attachment(self, callback, model, id, ufile): Model = request.session.model('ir.attachment') out = """<script language="javascript" type="text/javascript"> var win = window.top.window; win.jQuery(win).trigger(%s, %s); </script>""" try: attachment_id = Model.create({ 'name': ufile.filename, 'datas': base64.encodestring(ufile.read()), 'datas_fname': ufile.filename, 'res_model': model, 'res_id': int(id) }, request.context) args = { 'filename': ufile.filename, 'id': attachment_id } except Exception: args = {'error': "Something horrible happened"} _logger.exception("Fail to upload attachment %s" % ufile.filename) return out % (simplejson.dumps(callback), simplejson.dumps(args)) @http.route([ '/web/binary/company_logo', '/logo', '/logo.png', ], type='http', auth="none", cors="*") def company_logo(self, dbname=None, **kw): imgname = 'logo.png' placeholder = functools.partial(get_module_resource, 'web', 'static', 'src', 'img') uid = None if request.session.db: dbname = request.session.db uid = request.session.uid elif dbname is None: dbname = db_monodb() if not uid: uid = openerp.SUPERUSER_ID if not dbname: response = http.send_file(placeholder(imgname)) else: try: # create an empty registry registry = openerp.modules.registry.Registry(dbname) with registry.cursor() as cr: cr.execute("""SELECT c.logo_web, c.write_date FROM res_users u LEFT JOIN res_company c ON c.id = u.company_id WHERE u.id = %s """, (uid,)) row = cr.fetchone() if row and row[0]: image_data = StringIO(str(row[0]).decode('base64')) response = http.send_file(image_data, filename=imgname, mtime=row[1]) else: response = http.send_file(placeholder('nologo.png')) except Exception: response = http.send_file(placeholder(imgname)) return response class Action(http.Controller): @http.route('/web/action/load', type='json', auth="user") def load(self, action_id, do_not_eval=False, additional_context=None): Actions = request.session.model('ir.actions.actions') value = False try: action_id = int(action_id) except ValueError: try: module, xmlid = action_id.split('.', 1) model, action_id = request.session.model('ir.model.data').get_object_reference(module, xmlid) assert model.startswith('ir.actions.') except Exception: action_id = 0 # force failed read base_action = Actions.read([action_id], ['type'], request.context) if base_action: ctx = request.context action_type = base_action[0]['type'] if action_type == 'ir.actions.report.xml': ctx.update({'bin_size': True}) if additional_context: ctx.update(additional_context) action = request.session.model(action_type).read([action_id], False, ctx) if action: value = clean_action(action[0]) return value @http.route('/web/action/run', type='json', auth="user") def run(self, action_id): return_action = request.session.model('ir.actions.server').run( [action_id], request.context) if return_action: return clean_action(return_action) else: return False class Export(http.Controller): @http.route('/web/export/formats', type='json', auth="user") def formats(self): """ Returns all valid export formats :returns: for each export format, a pair of identifier and printable name :rtype: [(str, str)] """ return [ {'tag': 'csv', 'label': 'CSV'}, {'tag': 'xls', 'label': 'Excel', 'error': None if xlwt else "XLWT required"}, ] def fields_get(self, model): Model = request.session.model(model) fields = Model.fields_get(False, request.context) return fields @http.route('/web/export/get_fields', type='json', auth="user") def get_fields(self, model, prefix='', parent_name= '', import_compat=True, parent_field_type=None, exclude=None): if import_compat and parent_field_type == "many2one": fields = {} else: fields = self.fields_get(model) if import_compat: fields.pop('id', None) else: fields['.id'] = fields.pop('id', {'string': 'ID'}) fields_sequence = sorted(fields.iteritems(), key=lambda field: openerp.tools.ustr(field[1].get('string', ''))) records = [] for field_name, field in fields_sequence: if import_compat: if exclude and field_name in exclude: continue if field.get('readonly'): # If none of the field's states unsets readonly, skip the field if all(dict(attrs).get('readonly', True) for attrs in field.get('states', {}).values()): continue if not field.get('exportable', True): continue id = prefix + (prefix and '/'or '') + field_name name = parent_name + (parent_name and '/' or '') + field['string'] record = {'id': id, 'string': name, 'value': id, 'children': False, 'field_type': field.get('type'), 'required': field.get('required'), 'relation_field': field.get('relation_field')} records.append(record) if len(name.split('/')) < 3 and 'relation' in field: ref = field.pop('relation') record['value'] += '/id' record['params'] = {'model': ref, 'prefix': id, 'name': name} if not import_compat or field['type'] == 'one2many': # m2m field in import_compat is childless record['children'] = True return records @http.route('/web/export/namelist', type='json', auth="user") def namelist(self, model, export_id): # TODO: namelist really has no reason to be in Python (although itertools.groupby helps) export = request.session.model("ir.exports").read([export_id])[0] export_fields_list = request.session.model("ir.exports.line").read( export['export_fields']) fields_data = self.fields_info( model, map(operator.itemgetter('name'), export_fields_list)) return [ {'name': field['name'], 'label': fields_data[field['name']]} for field in export_fields_list ] def fields_info(self, model, export_fields): info = {} fields = self.fields_get(model) if ".id" in export_fields: fields['.id'] = fields.pop('id', {'string': 'ID'}) # To make fields retrieval more efficient, fetch all sub-fields of a # given field at the same time. Because the order in the export list is # arbitrary, this requires ordering all sub-fields of a given field # together so they can be fetched at the same time # # Works the following way: # * sort the list of fields to export, the default sorting order will # put the field itself (if present, for xmlid) and all of its # sub-fields right after it # * then, group on: the first field of the path (which is the same for # a field and for its subfields and the length of splitting on the # first '/', which basically means grouping the field on one side and # all of the subfields on the other. This way, we have the field (for # the xmlid) with length 1, and all of the subfields with the same # base but a length "flag" of 2 # * if we have a normal field (length 1), just add it to the info # mapping (with its string) as-is # * otherwise, recursively call fields_info via graft_subfields. # all graft_subfields does is take the result of fields_info (on the # field's model) and prepend the current base (current field), which # rebuilds the whole sub-tree for the field # # result: because we're not fetching the fields_get for half the # database models, fetching a namelist with a dozen fields (including # relational data) falls from ~6s to ~300ms (on the leads model). # export lists with no sub-fields (e.g. import_compatible lists with # no o2m) are even more efficient (from the same 6s to ~170ms, as # there's a single fields_get to execute) for (base, length), subfields in itertools.groupby( sorted(export_fields), lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))): subfields = list(subfields) if length == 2: # subfields is a seq of $base/*rest, and not loaded yet info.update(self.graft_subfields( fields[base]['relation'], base, fields[base]['string'], subfields )) elif base in fields: info[base] = fields[base]['string'] return info def graft_subfields(self, model, prefix, prefix_string, fields): export_fields = [field.split('/', 1)[1] for field in fields] return ( (prefix + '/' + k, prefix_string + '/' + v) for k, v in self.fields_info(model, export_fields).iteritems()) class ExportFormat(object): raw_data = False @property def content_type(self): """ Provides the format's content type """ raise NotImplementedError() def filename(self, base): """ Creates a valid filename for the format (with extension) from the provided base name (exension-less) """ raise NotImplementedError() def from_data(self, fields, rows): """ Conversion method from OpenERP's export data to whatever the current export class outputs :params list fields: a list of fields to export :params list rows: a list of records to export :returns: :rtype: bytes """ raise NotImplementedError() def base(self, data, token): params = simplejson.loads(data) model, fields, ids, domain, import_compat = \ operator.itemgetter('model', 'fields', 'ids', 'domain', 'import_compat')( params) Model = request.session.model(model) context = dict(request.context or {}, **params.get('context', {})) ids = ids or Model.search(domain, 0, False, False, context) if not request.env[model]._is_an_ordinary_table(): fields = [field for field in fields if field['name'] != 'id'] field_names = map(operator.itemgetter('name'), fields) import_data = Model.export_data(ids, field_names, self.raw_data, context=context).get('datas',[]) if import_compat: columns_headers = field_names else: columns_headers = [val['label'].strip() for val in fields] return request.make_response(self.from_data(columns_headers, import_data), headers=[('Content-Disposition', content_disposition(self.filename(model))), ('Content-Type', self.content_type)], cookies={'fileToken': token}) class CSVExport(ExportFormat, http.Controller): @http.route('/web/export/csv', type='http', auth="user") @serialize_exception def index(self, data, token): return self.base(data, token) @property def content_type(self): return 'text/csv;charset=utf8' def filename(self, base): return base + '.csv' def from_data(self, fields, rows): fp = StringIO() writer = csv.writer(fp, quoting=csv.QUOTE_ALL) writer.writerow([name.encode('utf-8') for name in fields]) for data in rows: row = [] for d in data: if isinstance(d, basestring): d = d.replace('\n',' ').replace('\t',' ') try: d = d.encode('utf-8') except UnicodeError: pass if d is False: d = None row.append(d) writer.writerow(row) fp.seek(0) data = fp.read() fp.close() return data class ExcelExport(ExportFormat, http.Controller): # Excel needs raw data to correctly handle numbers and date values raw_data = True @http.route('/web/export/xls', type='http', auth="user") @serialize_exception def index(self, data, token): return self.base(data, token) @property def content_type(self): return 'application/vnd.ms-excel' def filename(self, base): return base + '.xls' def from_data(self, fields, rows): workbook = xlwt.Workbook() worksheet = workbook.add_sheet('Sheet 1') for i, fieldname in enumerate(fields): worksheet.write(0, i, fieldname) worksheet.col(i).width = 8000 # around 220 pixels base_style = xlwt.easyxf('align: wrap yes') date_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD') datetime_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD HH:mm:SS') for row_index, row in enumerate(rows): for cell_index, cell_value in enumerate(row): cell_style = base_style if isinstance(cell_value, basestring): cell_value = re.sub("\r", " ", cell_value) elif isinstance(cell_value, datetime.datetime): cell_style = datetime_style elif isinstance(cell_value, datetime.date): cell_style = date_style worksheet.write(row_index + 1, cell_index, cell_value, cell_style) fp = StringIO() workbook.save(fp) fp.seek(0) data = fp.read() fp.close() return data class Reports(http.Controller): POLLING_DELAY = 0.25 TYPES_MAPPING = { 'doc': 'application/vnd.ms-word', 'html': 'text/html', 'odt': 'application/vnd.oasis.opendocument.text', 'pdf': 'application/pdf', 'sxw': 'application/vnd.sun.xml.writer', 'xls': 'application/vnd.ms-excel', } @http.route('/web/report', type='http', auth="user") @serialize_exception def index(self, action, token): action = simplejson.loads(action) report_srv = request.session.proxy("report") context = dict(request.context) context.update(action["context"]) report_data = {} report_ids = context.get("active_ids", None) if 'report_type' in action: report_data['report_type'] = action['report_type'] if 'datas' in action: if 'ids' in action['datas']: report_ids = action['datas'].pop('ids') report_data.update(action['datas']) report_id = report_srv.report( request.session.db, request.session.uid, request.session.password, action["report_name"], report_ids, report_data, context) report_struct = None while True: report_struct = report_srv.report_get( request.session.db, request.session.uid, request.session.password, report_id) if report_struct["state"]: break time.sleep(self.POLLING_DELAY) report = base64.b64decode(report_struct['result']) if report_struct.get('code') == 'zlib': report = zlib.decompress(report) report_mimetype = self.TYPES_MAPPING.get( report_struct['format'], 'octet-stream') file_name = action.get('name', 'report') if 'name' not in action: reports = request.session.model('ir.actions.report.xml') res_id = reports.search([('report_name', '=', action['report_name']),], 0, False, False, context) if len(res_id) > 0: file_name = reports.read(res_id[0], ['name'], context)['name'] else: file_name = action['report_name'] file_name = '%s.%s' % (file_name, report_struct['format']) return request.make_response(report, headers=[ ('Content-Disposition', content_disposition(file_name)), ('Content-Type', report_mimetype), ('Content-Length', len(report))], cookies={'fileToken': token}) class Apps(http.Controller): @http.route('/apps/<app>', auth='user') def get_app_url(self, req, app): act_window_obj = request.session.model('ir.actions.act_window') ir_model_data = request.session.model('ir.model.data') try: action_id = ir_model_data.get_object_reference('base', 'open_module_tree')[1] action = act_window_obj.read(action_id, ['name', 'type', 'res_model', 'view_mode', 'view_type', 'context', 'views', 'domain']) action['target'] = 'current' except ValueError: action = False try: app_id = ir_model_data.get_object_reference('base', 'module_%s' % app)[1] except ValueError: app_id = False if action and app_id: action['res_id'] = app_id action['view_mode'] = 'form' action['views'] = [(False, u'form')] sakey = Session().save_session_action(action) debug = '?debug' if req.debug else '' return werkzeug.utils.redirect('/web{0}#sa={1}'.format(debug, sakey)) # vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
antoine-de/navitia
source/jormungandr/jormungandr/realtime_schedule/__init__.py
7
1437
# encoding: utf-8 # Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # Hope you'll enjoy and contribute to this project, # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # IRC #navitia on freenode # https://groups.google.com/d/forum/navitia # www.navitia.io from __future__ import absolute_import, print_function, unicode_literals, division from jormungandr.realtime_schedule.timeo import Timeo from jormungandr.realtime_schedule.realtime_proxy_manager import RealtimeProxyManager
agpl-3.0
idaholab/raven
tests/framework/Samplers/DynamicEventTrees/AdaptiveDET/ideal_pump_control.py
2
3041
# Copyright 2017 Battelle Energy Alliance, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. def initial_function(monitored, controlled, auxiliary): # here we store some critical parameters that we want in the output auxiliary.depresSystemDistThreshold = distributions.depresSystemDist.getVariable('ProbabilityThreshold') auxiliary.depressurizationOnTime = distributions.depresSystemDist.inverseCdf(auxiliary.depresSystemDistThreshold) auxiliary.PressureFailureDistThreshold = distributions.PressureFailureDist.getVariable('ProbabilityThreshold') auxiliary.PressureFailureValue = distributions.PressureFailureDist.inverseCdf(auxiliary.PressureFailureDistThreshold) auxiliary.fakePressure = 2000.0*monitored.time + 101000.0 def restart_function(monitored, controlled, auxiliary): # here we store some critical parameters that we want in the output auxiliary.depresSystemDistThreshold = distributions.depresSystemDist.getVariable('ProbabilityThreshold') auxiliary.depressurizationOnTime = distributions.depresSystemDist.inverseCdf(auxiliary.depresSystemDistThreshold) auxiliary.PressureFailureDistThreshold = distributions.PressureFailureDist.getVariable('ProbabilityThreshold') auxiliary.PressureFailureValue = distributions.PressureFailureDist.inverseCdf(auxiliary.PressureFailureDistThreshold) auxiliary.fakePressure = 2000.0*monitored.time + 101000.0 def keep_going_function(monitored, controlled, auxiliary): if auxiliary.endSimulation: return False return True def control_function(monitored, controlled, auxiliary): auxiliary.fakePressure = 2000.0*monitored.time + 101000.0 if auxiliary.systemFailed: auxiliary.endSimulation = True print("SYSTEM FAILED!!!!!") #controlled.inlet_TDV_p_bc = 1.0e5 + 0.021*1.0e5*monitored.time if auxiliary.depressurizationOn: print("DEPRESSURIZATION SYSTEM ON!!!!!") controlled.inlet_TDV_p_bc = controlled.inlet_TDV_p_bc*0.99 if controlled.inlet_TDV_p_bc < 1.0e5: controlled.inlet_TDV_p_bc = 1.0e5 def dynamic_event_tree(monitored, controlled, auxiliary): if monitored.time_step <= 1: return if distributions.PressureFailureDist.checkCdf(auxiliary.fakePressure) and (not auxiliary.systemFailed) and (not auxiliary.depressurizationOn): auxiliary.systemFailed = True return if distributions.depresSystemDist.checkCdf(monitored.time) and (not auxiliary.systemFailed) and (not auxiliary.depressurizationOn): auxiliary.depressurizationOn = True return
apache-2.0
peastman/deepchem
deepchem/data/tests/test_reshard.py
3
2328
import deepchem as dc import numpy as np def test_reshard_with_X(): """Test resharding on a simple example""" X = np.random.rand(100, 10) dataset = dc.data.DiskDataset.from_numpy(X) assert dataset.get_number_shards() == 1 dataset.reshard(shard_size=10) assert (dataset.X == X).all() assert dataset.get_number_shards() == 10 def test_reshard_with_X_y(): """Test resharding on a simple example""" X = np.random.rand(100, 10) y = np.random.rand(100,) dataset = dc.data.DiskDataset.from_numpy(X, y) assert dataset.get_number_shards() == 1 dataset.reshard(shard_size=10) assert (dataset.X == X).all() # This is necessary since from_numpy adds in shape information assert (dataset.y.flatten() == y).all() assert dataset.get_number_shards() == 10 def test_reshard_with_X_y_generative(): """Test resharding for a hypothetical generative dataset.""" X = np.random.rand(100, 10, 10) y = np.random.rand(100, 10, 10) dataset = dc.data.DiskDataset.from_numpy(X, y) assert (dataset.X == X).all() assert (dataset.y == y).all() assert dataset.get_number_shards() == 1 dataset.reshard(shard_size=10) assert (dataset.X == X).all() assert (dataset.y == y).all() assert dataset.get_number_shards() == 10 def test_reshard_with_X_y_w(): """Test resharding on a simple example""" X = np.random.rand(100, 10) y = np.random.rand(100,) w = np.ones_like(y) dataset = dc.data.DiskDataset.from_numpy(X, y, w) assert dataset.get_number_shards() == 1 dataset.reshard(shard_size=10) assert (dataset.X == X).all() # This is necessary since from_numpy adds in shape information assert (dataset.y.flatten() == y).all() assert (dataset.w.flatten() == w).all() assert dataset.get_number_shards() == 10 def test_reshard_with_X_y_w_ids(): """Test resharding on a simple example""" X = np.random.rand(100, 10) y = np.random.rand(100,) w = np.ones_like(y) ids = np.arange(100) dataset = dc.data.DiskDataset.from_numpy(X, y, w, ids) assert dataset.get_number_shards() == 1 dataset.reshard(shard_size=10) assert (dataset.X == X).all() # This is necessary since from_numpy adds in shape information assert (dataset.y.flatten() == y).all() assert (dataset.w.flatten() == w).all() assert (dataset.ids == ids).all() assert dataset.get_number_shards() == 10
mit
bitcraft/PyTMX
pytmx/__init__.py
1
1082
# -*- coding: utf-8 -*- """ Copyright (C) 2012-2017, Leif Theden <[email protected]> This file is part of pytmx. pytmx is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. pytmx is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with pytmx. If not, see <http://www.gnu.org/licenses/>. """ import logging from .pytmx import * logger = logging.getLogger(__name__) try: from pytmx.util_pygame import load_pygame except ImportError: logger.debug('cannot import pygame tools') __version__ = (3, 23, 0) __author__ = 'bitcraft' __author_email__ = '[email protected]' __description__ = 'Map loader for TMX Files - Python 3.3 +'
lgpl-3.0
ajayaa/keystone
keystone/contrib/revoke/migrate_repo/versions/001_revoke_table.py
14
1502
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sqlalchemy as sql def upgrade(migrate_engine): # Upgrade operations go here. Don't create your own engine; bind # migrate_engine to your metadata meta = sql.MetaData() meta.bind = migrate_engine service_table = sql.Table( 'revocation_event', meta, sql.Column('id', sql.String(64), primary_key=True), sql.Column('domain_id', sql.String(64)), sql.Column('project_id', sql.String(64)), sql.Column('user_id', sql.String(64)), sql.Column('role_id', sql.String(64)), sql.Column('trust_id', sql.String(64)), sql.Column('consumer_id', sql.String(64)), sql.Column('access_token_id', sql.String(64)), sql.Column('issued_before', sql.DateTime(), nullable=False), sql.Column('expires_at', sql.DateTime()), sql.Column('revoked_at', sql.DateTime(), index=True, nullable=False)) service_table.create(migrate_engine, checkfirst=True)
apache-2.0
kmacinnis/sympy
sympy/integrals/manualintegrate.py
3
31160
"""Integration method that emulates by-hand techniques. This module also provides functionality to get the steps used to evaluate a particular integral, in the ``integral_steps`` function. This will return nested namedtuples representing the integration rules used. The ``manualintegrate`` function computes the integral using those steps given an integrand; given the steps, ``_manualintegrate`` will evaluate them. The integrator can be extended with new heuristics and evaluation techniques. To do so, write a function that accepts an ``IntegralInfo`` object and returns either a namedtuple representing a rule or ``None``. Then, write another function that accepts the namedtuple's fields and returns the antiderivative, and decorate it with ``@evaluates(namedtuple_type)``. """ from __future__ import print_function, division from collections import namedtuple import sympy from sympy.core.compatibility import reduce from sympy.functions.elementary.trigonometric import TrigonometricFunction from sympy.simplify import fraction from sympy.strategies.core import (switch, identity, do_one, null_safe, condition, tryit) def Rule(name, props=""): # GOTCHA: namedtuple class name not considered! def __eq__(self, other): return self.__class__ == other.__class__ and tuple.__eq__(self, other) __neq__ = lambda self, other: not __eq__(self, other) cls = namedtuple(name, props + " context symbol") cls.__eq__ = __eq__ cls.__ne__ = __neq__ return cls ConstantRule = Rule("ConstantRule", "constant") ConstantTimesRule = Rule("ConstantTimesRule", "constant other substep") PowerRule = Rule("PowerRule", "base exp") AddRule = Rule("AddRule", "substeps") URule = Rule("URule", "u_var u_func constant substep") PartsRule = Rule("PartsRule", "u dv v_step second_step") CyclicPartsRule = Rule("CyclicPartsRule", "parts_rules coefficient") TrigRule = Rule("TrigRule", "func arg") ExpRule = Rule("ExpRule", "base exp") LogRule = Rule("LogRule", "func") ArctanRule = Rule("ArctanRule") AlternativeRule = Rule("AlternativeRule", "alternatives") DontKnowRule = Rule("DontKnowRule") DerivativeRule = Rule("DerivativeRule") RewriteRule = Rule("RewriteRule", "rewritten substep") IntegralInfo = namedtuple('IntegralInfo', 'integrand symbol') evaluators = {} def evaluates(rule): def _evaluates(func): func.rule = rule evaluators[rule] = func return func return _evaluates def contains_dont_know(rule): if isinstance(rule, DontKnowRule): return True else: for val in rule: if isinstance(val, tuple): if contains_dont_know(val): return True elif isinstance(val, list): if any(contains_dont_know(i) for i in val): return True return False def manual_diff(f, symbol): """Derivative of f in form expected by find_substitutions SymPy's derivatives for some trig functions (like cot) aren't in a form that works well with finding substitutions; this replaces the derivatives for those particular forms with something that works better. """ if f.args: arg = f.args[0] if isinstance(f, sympy.tan): return arg.diff(symbol) * sympy.sec(arg)**2 elif isinstance(f, sympy.cot): return -arg.diff(symbol) * sympy.csc(arg)**2 elif isinstance(f, sympy.sec): return arg.diff(symbol) * sympy.sec(arg) * sympy.tan(arg) elif isinstance(f, sympy.csc): return -arg.diff(symbol) * sympy.csc(arg) * sympy.cot(arg) elif isinstance(f, sympy.Add): return sum([manual_diff(arg, symbol) for arg in f.args]) return f.diff(symbol) # Method based on that on SIN, described in "Symbolic Integration: The # Stormy Decade" def find_substitutions(integrand, symbol, u_var): results = [] def test_subterm(u, u_diff): substituted = integrand / u_diff if symbol not in substituted.free_symbols: # replaced everything already return False substituted = substituted.subs(u, u_var).cancel() if symbol not in substituted.free_symbols: return substituted return False def possible_subterms(term): if any(isinstance(term, cls) for cls in (sympy.sin, sympy.cos, sympy.tan, sympy.asin, sympy.acos, sympy.atan, sympy.exp, sympy.log)): return [term.args[0]] elif isinstance(term, sympy.Mul): r = [] for u in term.args: numer, denom = fraction(u) if numer == 1: r.append(denom) r.extend(possible_subterms(denom)) else: r.append(u) r.extend(possible_subterms(u)) return r elif isinstance(term, sympy.Pow): if term.args[1].is_constant(symbol): return [term.args[0]] elif term.args[0].is_constant(symbol): return [term.args[1]] elif isinstance(term, sympy.Add): return term.args return [] for u in possible_subterms(integrand): if u == symbol: continue new_integrand = test_subterm(u, manual_diff(u, symbol)) if new_integrand is not False: constant = new_integrand.as_coeff_mul()[0] substitution = (u, constant, new_integrand) if substitution not in results: results.append(substitution) return results def rewriter(condition, rewrite): """Strategy that rewrites an integrand.""" def _rewriter(integral): integrand, symbol = integral if condition(*integral): rewritten = rewrite(*integral) if rewritten != integrand: substep = integral_steps(rewritten, symbol) if not isinstance(substep, DontKnowRule): return RewriteRule( rewritten, substep, integrand, symbol) return _rewriter def proxy_rewriter(condition, rewrite): """Strategy that rewrites an integrand based on some other criteria.""" def _proxy_rewriter(criteria): criteria, integral = criteria integrand, symbol = integral args = criteria + list(integral) if condition(*args): rewritten = rewrite(*args) if rewritten != integrand: return RewriteRule( rewritten, integral_steps(rewritten, symbol), integrand, symbol) return _proxy_rewriter def multiplexer(conditions): """Apply the rule that matches the condition, else None""" def multiplexer_rl(expr): for key, rule in conditions.items(): if key(expr): return rule(expr) return multiplexer_rl def alternatives(*rules): """Strategy that makes an AlternativeRule out of multiple possible results.""" def _alternatives(integral): alts = [] for rule in rules: result = rule(integral) if result and not isinstance(result, DontKnowRule) and result != integral: alts.append(result) if len(alts) == 1: return alts[0] elif len(alts) > 1: return AlternativeRule(alts, *integral) return _alternatives def constant_rule(integral): integrand, symbol = integral return ConstantRule(integral.integrand, *integral) def power_rule(integral): integrand, symbol = integral base, exp = integrand.as_base_exp() if symbol not in exp.free_symbols and isinstance(base, sympy.Symbol): if sympy.simplify(exp + 1) == 0: return LogRule(base, integrand, symbol) return PowerRule(base, exp, integrand, symbol) elif symbol not in base.free_symbols and isinstance(exp, sympy.Symbol): return ExpRule(base, exp, integrand, symbol) def exp_rule(integral): integrand, symbol = integral if isinstance(integrand.args[0], sympy.Symbol): return ExpRule(sympy.E, integrand.args[0], integrand, symbol) def arctan_rule(integral): integrand, symbol = integral base, exp = integrand.as_base_exp() if sympy.simplify(exp + 1) == 0: a = sympy.Wild('a', exclude=[symbol]) b = sympy.Wild('b', exclude=[symbol]) match = base.match(a + b*symbol**2) if match: a, b = match[a], match[b] if a != 1 or b != 1: u_var = sympy.Dummy("u") rewritten = sympy.Rational(1, a) * (base / a) ** (-1) u_func = sympy.sqrt(sympy.Rational(b, a)) * symbol constant = 1 / sympy.sqrt(sympy.Rational(b, a)) substituted = rewritten.subs(u_func, u_var) if a == b: substep = ArctanRule(integrand, symbol) else: subrule = ArctanRule(substituted, u_var) if constant != 1: subrule = ConstantTimesRule( constant, substituted, subrule, substituted, symbol) substep = URule(u_var, u_func, constant, subrule, integrand, symbol) if a != 1: other = (base / a) ** (-1) return ConstantTimesRule( sympy.Rational(1, a), other, substep, integrand, symbol) return substep return ArctanRule(integrand, symbol) def add_rule(integral): integrand, symbol = integral return AddRule( [integral_steps(g, symbol) for g in integrand.as_ordered_terms()], integrand, symbol) def mul_rule(integral): integrand, symbol = integral args = integrand.args # Constant times function case coeff, f = integrand.as_independent(symbol) if coeff != 1: return ConstantTimesRule( coeff, f, integral_steps(f, symbol), integrand, symbol) def _parts_rule(integrand, symbol): # LIATE rule: # log, inverse trig, algebraic (polynomial), trigonometric, exponential def pull_out_polys(integrand): integrand = integrand.together() polys = [arg for arg in integrand.args if arg.is_polynomial(symbol)] if polys: u = sympy.Mul(*polys) dv = integrand / u return u, dv def pull_out_u(*functions): def pull_out_u_rl(integrand): if any([integrand.has(f) for f in functions]): args = [arg for arg in integrand.args if any(isinstance(arg, cls) for cls in functions)] if args: u = reduce(lambda a,b: a*b, args) dv = integrand / u return u, dv return pull_out_u_rl liate_rules = [pull_out_u(sympy.log), pull_out_u(sympy.atan), pull_out_polys, pull_out_u(sympy.sin, sympy.cos), pull_out_u(sympy.exp)] dummy = sympy.Dummy("temporary") # we can integrate log(x) and atan(x) by setting dv = 1 if isinstance(integrand, sympy.log) or isinstance(integrand, sympy.atan): integrand = dummy * integrand for index, rule in enumerate(liate_rules): result = rule(integrand) if result: u, dv = result # Don't pick u to be a constant if possible if symbol not in u.free_symbols and not u.has(dummy): return u = u.subs(dummy, 1) dv = dv.subs(dummy, 1) for rule in liate_rules[index + 1:]: r = rule(integrand) # make sure dv is amenable to integration if r and r[0].subs(dummy, 1) == dv: du = u.diff(symbol) v_step = integral_steps(dv, symbol) v = _manualintegrate(v_step) return u, dv, v, du, v_step def parts_rule(integral): integrand, symbol = integral constant, integrand = integrand.as_coeff_Mul() result = _parts_rule(integrand, symbol) steps = [] if result: u, dv, v, du, v_step = result steps.append(result) if isinstance(v, sympy.Integral): return while True: if symbol not in (integrand / (v * du)).cancel().free_symbols: coefficient = ((v * du) / integrand).cancel() rule = CyclicPartsRule( [PartsRule(u, dv, v_step, None, None, None) for (u, dv, v, du, v_step) in steps], (-1) ** len(steps) * coefficient, integrand, symbol ) if constant != 1: rule = ConstantTimesRule(constant, integrand, rule, constant * integrand, symbol) return rule result = _parts_rule(v * du, symbol) if result: u, dv, v, du, v_step = result steps.append(result) else: break def make_second_step(steps, integrand): if steps: u, dv, v, du, v_step = steps[0] return PartsRule(u, dv, v_step, make_second_step(steps[1:], v * du), integrand, symbol) else: return integral_steps(integrand, symbol) if steps: u, dv, v, du, v_step = steps[0] rule = PartsRule(u, dv, v_step, make_second_step(steps[1:], v * du), integrand, symbol) if constant != 1: rule = ConstantTimesRule(constant, integrand, rule, constant * integrand, symbol) return rule def trig_rule(integral): integrand, symbol = integral if isinstance(integrand, sympy.sin) or isinstance(integrand, sympy.cos): arg = integrand.args[0] if not isinstance(arg, sympy.Symbol): return # perhaps a substitution can deal with it if isinstance(integrand, sympy.sin): func = 'sin' else: func = 'cos' return TrigRule(func, arg, integrand, symbol) if isinstance(integrand, sympy.tan): rewritten = sympy.sin(*integrand.args) / sympy.cos(*integrand.args) elif isinstance(integrand, sympy.cot): rewritten = sympy.cos(*integrand.args) / sympy.sin(*integrand.args) elif isinstance(integrand, sympy.sec): arg = integrand.args[0] rewritten = ((sympy.sec(arg)**2 + sympy.tan(arg) * sympy.sec(arg)) / (sympy.sec(arg) + sympy.tan(arg))) elif isinstance(integrand, sympy.csc): arg = integrand.args[0] rewritten = ((sympy.csc(arg)**2 + sympy.cot(arg) * sympy.csc(arg)) / (sympy.csc(arg) + sympy.cot(arg))) return RewriteRule( rewritten, integral_steps(rewritten, symbol), integrand, symbol ) def trig_product_rule(integral): integrand, symbol = integral sectan = sympy.sec(symbol) * sympy.tan(symbol) q = integrand / sectan if symbol not in q.free_symbols: rule = TrigRule('sec*tan', symbol, sectan, symbol) if q != 1: rule = ConstantTimesRule(q, sectan, rule, integrand, symbol) return rule csccot = -sympy.csc(symbol) * sympy.cot(symbol) q = integrand / csccot if symbol not in q.free_symbols: rule = TrigRule('csc*cot', symbol, csccot, symbol) if q != 1: rule = ConstantTimesRule(q, csccot, rule, integrand, symbol) return rule @sympy.cacheit def make_wilds(symbol): a = sympy.Wild('a', exclude=[symbol]) b = sympy.Wild('b', exclude=[symbol]) m = sympy.Wild('m', exclude=[symbol], properties=[lambda n: isinstance(n, sympy.Integer)]) n = sympy.Wild('n', exclude=[symbol], properties=[lambda n: isinstance(n, sympy.Integer)]) return a, b, m, n @sympy.cacheit def sincos_pattern(symbol): a, b, m, n = make_wilds(symbol) pattern = sympy.sin(a*symbol)**m * sympy.cos(b*symbol)**n return pattern, a, b, m, n @sympy.cacheit def tansec_pattern(symbol): a, b, m, n = make_wilds(symbol) pattern = sympy.tan(a*symbol)**m * sympy.sec(b*symbol)**n return pattern, a, b, m, n @sympy.cacheit def cotcsc_pattern(symbol): a, b, m, n = make_wilds(symbol) pattern = sympy.cot(a*symbol)**m * sympy.csc(b*symbol)**n return pattern, a, b, m, n def uncurry(func): def uncurry_rl(args): return func(*args) return uncurry_rl def trig_rewriter(rewrite): def trig_rewriter_rl(args): a, b, m, n, integrand, symbol = args rewritten = rewrite(a, b, m, n, integrand, symbol) if rewritten != integrand: return RewriteRule( rewritten, integral_steps(rewritten, symbol), integrand, symbol) return trig_rewriter_rl sincos_botheven_condition = uncurry(lambda a, b, m, n, i, s: m.is_even and n.is_even) sincos_botheven = trig_rewriter( lambda a, b, m, n, i, symbol: ( (((1 - sympy.cos(2*a*symbol)) / 2) ** (m / 2)) * (((1 + sympy.cos(2*b*symbol)) / 2) ** (n / 2)) )) sincos_sinodd_condition = uncurry(lambda a, b, m, n, i, s: m.is_odd and m >= 3) sincos_sinodd = trig_rewriter( lambda a, b, m, n, i, symbol: ( (1 - sympy.cos(a*symbol)**2)**((m - 1) / 2) * sympy.sin(a*symbol) * sympy.cos(b*symbol) ** n)) sincos_cosodd_condition = uncurry(lambda a, b, m, n, i, s: n.is_odd and n >= 3) sincos_cosodd = trig_rewriter( lambda a, b, m, n, i, symbol: ( (1 - sympy.sin(b*symbol)**2)**((n - 1) / 2) * sympy.cos(b*symbol) * sympy.sin(a*symbol) ** m)) tansec_seceven_condition = uncurry(lambda a, b, m, n, i, s: n.is_even and n >= 4) tansec_seceven = trig_rewriter( lambda a, b, m, n, i, symbol: ( (1 + sympy.tan(b*symbol)**2) ** (n/2 - 1) * sympy.sec(b*symbol)**2 * sympy.tan(a*symbol) ** m )) tansec_tanodd_condition = uncurry(lambda a, b, m, n, i, s: m.is_odd) tansec_tanodd = trig_rewriter( lambda a, b, m, n, i, symbol: ( (sympy.sec(a*symbol)**2 - 1) ** ((m - 1) / 2) * sympy.tan(a*symbol) * sympy.sec(b*symbol) ** n )) cotcsc_csceven_condition = uncurry(lambda a, b, m, n, i, s: n.is_even and n >= 4) cotcsc_csceven = trig_rewriter( lambda a, b, m, n, i, symbol: ( (1 + sympy.cot(b*symbol)**2) ** (n/2 - 1) * sympy.csc(b*symbol)**2 * sympy.cot(a*symbol) ** m )) cotcsc_cotodd_condition = uncurry(lambda a, b, m, n, i, s: m.is_odd) cotcsc_cotodd = trig_rewriter( lambda a, b, m, n, i, symbol: ( (sympy.csc(a*symbol)**2 - 1) ** ((m - 1) / 2) * sympy.cot(a*symbol) * sympy.csc(b*symbol) ** n )) def trig_powers_products_rule(integral): integrand, symbol = integral if any(integrand.has(f) for f in (sympy.sin, sympy.cos)): pattern, a, b, m, n = sincos_pattern(symbol) match = integrand.match(pattern) if match: a, b, m, n = match.get(a, 0),match.get(b, 0), match.get(m, 0), match.get(n, 0) return multiplexer({ sincos_botheven_condition: sincos_botheven, sincos_sinodd_condition: sincos_sinodd, sincos_cosodd_condition: sincos_cosodd })((a, b, m, n, integrand, symbol)) integrand = integrand.subs({ 1 / sympy.cos(symbol): sympy.sec(symbol) }) if any(integrand.has(f) for f in (sympy.tan, sympy.sec)): pattern, a, b, m, n = tansec_pattern(symbol) match = integrand.match(pattern) if match: a, b, m, n = match.get(a, 0),match.get(b, 0), match.get(m, 0), match.get(n, 0) return multiplexer({ tansec_tanodd_condition: tansec_tanodd, tansec_seceven_condition: tansec_seceven })((a, b, m, n, integrand, symbol)) integrand = integrand.subs({ 1 / sympy.sin(symbol): sympy.csc(symbol), 1 / sympy.tan(symbol): sympy.cot(symbol), sympy.cos(symbol) / sympy.tan(symbol): sympy.cot(symbol) }) if any(integrand.has(f) for f in (sympy.cot, sympy.csc)): pattern, a, b, m, n = cotcsc_pattern(symbol) match = integrand.match(pattern) if match: a, b, m, n = match.get(a, 0),match.get(b, 0), match.get(m, 0), match.get(n, 0) return multiplexer({ cotcsc_cotodd_condition: cotcsc_cotodd, cotcsc_csceven_condition: cotcsc_csceven })((a, b, m, n, integrand, symbol)) def substitution_rule(integral): integrand, symbol = integral u_var = sympy.Dummy("u") substitutions = find_substitutions(integrand, symbol, u_var) if substitutions: ways = [] for u_func, c, substituted in substitutions: subrule = integral_steps(substituted / c, u_var) if contains_dont_know(subrule): continue if sympy.simplify(c - 1) != 0: subrule = ConstantTimesRule( c, substituted / c, subrule, substituted, symbol ) ways.append(URule(u_var, u_func, c, subrule, integrand, symbol)) if len(ways) > 1: return AlternativeRule(ways, integrand, symbol) elif ways: return ways[0] elif integrand.has(sympy.exp): u_func = sympy.exp(symbol) c = 1 substituted = integrand / u_func.diff(symbol) substituted = substituted.subs(u_func, u_var) if symbol not in substituted.free_symbols: return URule(u_var, u_func, c, integral_steps(substituted, u_var), integrand, symbol) partial_fractions_rule = rewriter( lambda integrand, symbol: integrand.is_rational_function(), lambda integrand, symbol: integrand.apart(symbol)) distribute_expand_rule = rewriter( lambda integrand, symbol: ( all(arg.is_Pow or arg.is_polynomial(symbol) for arg in integrand.args) or isinstance(integrand, sympy.Pow) or isinstance(integrand, sympy.Mul)), lambda integrand, symbol: integrand.expand()) def derivative_rule(integral): variables = integral[0].args[1:] if variables[-1] == integral.symbol: return DerivativeRule(*integral) else: return ConstantRule(integral.integrand, *integral) def fallback_rule(integral): return DontKnowRule(*integral) _integral_cache = {} def integral_steps(integrand, symbol, **options): """Returns the steps needed to compute an integral. This function attempts to mirror what a student would do by hand as closely as possible. SymPy Gamma uses this to provide a step-by-step explanation of an integral. The code it uses to format the results of this function can be found at https://github.com/sympy/sympy_gamma/blob/master/app/logic/intsteps.py. Examples ======== >>> from sympy import exp, sin, cos >>> from sympy.integrals.manualintegrate import integral_steps >>> from sympy.abc import x >>> print(repr(integral_steps(exp(x) / (1 + exp(2 * x)), x))) \ # doctest: +NORMALIZE_WHITESPACE URule(u_var=_u, u_func=exp(x), constant=1, substep=ArctanRule(context=1/(_u**2 + 1), symbol=_u), context=exp(x)/(exp(2*x) + 1), symbol=x) >>> print(repr(integral_steps(sin(x), x))) \ # doctest: +NORMALIZE_WHITESPACE TrigRule(func='sin', arg=x, context=sin(x), symbol=x) >>> print(repr(integral_steps((x**2 + 3)**2 , x))) \ # doctest: +NORMALIZE_WHITESPACE RewriteRule(rewritten=x**4 + 6*x**2 + 9, substep=AddRule(substeps=[PowerRule(base=x, exp=4, context=x**4, symbol=x), ConstantTimesRule(constant=6, other=x**2, substep=PowerRule(base=x, exp=2, context=x**2, symbol=x), context=6*x**2, symbol=x), ConstantRule(constant=9, context=9, symbol=x)], context=x**4 + 6*x**2 + 9, symbol=x), context=(x**2 + 3)**2, symbol=x) Returns ======= rule : namedtuple The first step; most rules have substeps that must also be considered. These substeps can be evaluated using ``manualintegrate`` to obtain a result. """ cachekey = (integrand, symbol) if cachekey in _integral_cache: if _integral_cache[cachekey] is None: # cyclic integral! null_safe will eliminate that path return None else: return _integral_cache[cachekey] else: _integral_cache[cachekey] = None integral = IntegralInfo(integrand, symbol) def key(integral): integrand = integral.integrand if isinstance(integrand, TrigonometricFunction): return TrigonometricFunction elif isinstance(integrand, sympy.Derivative): return sympy.Derivative elif symbol not in integrand.free_symbols: return sympy.Number else: for cls in (sympy.Pow, sympy.Symbol, sympy.exp, sympy.log, sympy.Add, sympy.Mul, sympy.atan): if isinstance(integrand, cls): return cls def integral_is_subclass(*klasses): def _integral_is_subclass(integral): k = key(integral) return k and issubclass(k, klasses) return _integral_is_subclass result = do_one( null_safe(switch(key, { sympy.Pow: do_one(null_safe(power_rule), null_safe(arctan_rule)), sympy.Symbol: power_rule, sympy.exp: exp_rule, sympy.Add: add_rule, sympy.Mul: do_one(null_safe(mul_rule), null_safe(trig_product_rule)), sympy.Derivative: derivative_rule, TrigonometricFunction: trig_rule, sympy.Number: constant_rule })), null_safe( alternatives( substitution_rule, condition( integral_is_subclass(sympy.Mul, sympy.log, sympy.atan), parts_rule), condition( integral_is_subclass(sympy.Mul), partial_fractions_rule), condition( integral_is_subclass(sympy.Mul, sympy.Pow), distribute_expand_rule), trig_powers_products_rule ) ), fallback_rule)(integral) _integral_cache[cachekey] = result return result @evaluates(ConstantRule) def eval_constant(constant, integrand, symbol): return constant * symbol @evaluates(ConstantTimesRule) def eval_constanttimes(constant, other, substep, integrand, symbol): return constant * _manualintegrate(substep) @evaluates(PowerRule) def eval_power(base, exp, integrand, symbol): return (base ** (exp + 1)) / (exp + 1) @evaluates(ExpRule) def eval_exp(base, exp, integrand, symbol): return integrand / sympy.ln(base) @evaluates(AddRule) def eval_add(substeps, integrand, symbol): return sum(map(_manualintegrate, substeps)) @evaluates(URule) def eval_u(u_var, u_func, constant, substep, integrand, symbol): result = _manualintegrate(substep) return result.subs(u_var, u_func) @evaluates(PartsRule) def eval_parts(u, dv, v_step, second_step, integrand, symbol): v = _manualintegrate(v_step) return u * v - _manualintegrate(second_step) @evaluates(CyclicPartsRule) def eval_cyclicparts(parts_rules, coefficient, integrand, symbol): coefficient = 1 - coefficient result = [] sign = 1 for rule in parts_rules: result.append(sign * rule.u * _manualintegrate(rule.v_step)) sign *= -1 return sympy.Add(*result) / coefficient @evaluates(TrigRule) def eval_trig(func, arg, integrand, symbol): if func == 'sin': return -sympy.cos(arg) elif func == 'cos': return sympy.sin(arg) elif func == 'sec*tan': return sympy.sec(arg) elif func == 'csc*cot': return sympy.csc(arg) @evaluates(LogRule) def eval_log(func, integrand, symbol): return sympy.ln(func) @evaluates(ArctanRule) def eval_arctan(integrand, symbol): return sympy.atan(symbol) @evaluates(AlternativeRule) def eval_alternative(alternatives, integrand, symbol): return _manualintegrate(alternatives[0]) @evaluates(RewriteRule) def eval_rewrite(rewritten, substep, integrand, symbol): return _manualintegrate(substep) @evaluates(DerivativeRule) def eval_derivativerule(integrand, symbol): # isinstance(integrand, Derivative) should be True if len(integrand.args) == 2: return integrand.args[0] else: return sympy.Derivative(integrand.args[0], *integrand.args[1:-1]) @evaluates(DontKnowRule) def eval_dontknowrule(integrand, symbol): return sympy.Integral(integrand, symbol) def _manualintegrate(rule): evaluator = evaluators.get(rule.__class__) if not evaluator: raise ValueError("Cannot evaluate rule %s" % rule) return evaluator(*rule) def manualintegrate(f, var): """manualintegrate(f, var) Compute indefinite integral of a single variable using an algorithm that resembles what a student would do by hand. Unlike ``integrate``, var can only be a single symbol. Examples ======== >>> from sympy import sin, cos, tan, exp, log, integrate >>> from sympy.integrals.manualintegrate import manualintegrate >>> from sympy.abc import x >>> manualintegrate(1 / x, x) log(x) >>> integrate(1/x) log(x) >>> manualintegrate(log(x), x) x*log(x) - x >>> integrate(log(x)) x*log(x) - x >>> manualintegrate(exp(x) / (1 + exp(2 * x)), x) atan(exp(x)) >>> integrate(exp(x) / (1 + exp(2 * x))) RootSum(4*_z**2 + 1, Lambda(_i, _i*log(2*_i + exp(x)))) >>> manualintegrate(cos(x)**4 * sin(x), x) -cos(x)**5/5 >>> integrate(cos(x)**4 * sin(x), x) -cos(x)**5/5 >>> manualintegrate(cos(x)**4 * sin(x)**3, x) cos(x)**7/7 - cos(x)**5/5 >>> integrate(cos(x)**4 * sin(x)**3, x) cos(x)**7/7 - cos(x)**5/5 >>> manualintegrate(tan(x), x) -log(cos(x)) >>> integrate(tan(x), x) -log(sin(x)**2 - 1)/2 See Also ======== sympy.integrals.integrals.integrate sympy.integrals.integrals.Integral.doit sympy.integrals.integrals.Integral """ return _manualintegrate(integral_steps(f, var))
bsd-3-clause
pong3489/TEST_Mission
Lib/site-packages/scipy/stats/_support.py
57
8775
from numpy import asarray import numpy as np import copy ListType = list TupleType = tuple StringType = str def abut(source, *args): # comment: except for the repetition, this is equivalent to hstack. """\nLike the |Stat abut command. It concatenates two arrays column-wise and returns the result. CAUTION: If one array is shorter, it will be repeated until it is as long as the other. Format: abut (source, args) where args=any # of arrays Returns: an array as long as the LONGEST array past, source appearing on the 'left', arrays in <args> attached on the 'right'.\n""" source = asarray(source) if len(source.shape)==1: width = 1 source = np.resize(source,[source.shape[0],width]) else: width = source.shape[1] for addon in args: if len(addon.shape)==1: width = 1 addon = np.resize(addon,[source.shape[0],width]) else: width = source.shape[1] if len(addon) < len(source): addon = np.resize(addon,[source.shape[0],addon.shape[1]]) elif len(source) < len(addon): source = np.resize(source,[addon.shape[0],source.shape[1]]) source = np.concatenate((source,addon),1) return source def unique(inarray): """Returns unique items in the FIRST dimension of the passed array. Only works on arrays NOT including string items (e.g., type 'O' or 'c'). """ inarray = asarray(inarray) uniques = np.array([inarray[0]]) if len(uniques.shape) == 1: # IF IT'S A 1D ARRAY for item in inarray[1:]: if np.add.reduce(np.equal(uniques,item).flat) == 0: try: uniques = np.concatenate([uniques,np.array[np.newaxis,:]]) except TypeError: uniques = np.concatenate([uniques,np.array([item])]) else: # IT MUST BE A 2+D ARRAY if inarray.dtype.char != 'O': # not an Object array for item in inarray[1:]: if not np.sum(np.alltrue(np.equal(uniques,item),1),axis=0): try: uniques = np.concatenate( [uniques,item[np.newaxis,:]] ) except TypeError: # the item to add isn't a list uniques = np.concatenate([uniques,np.array([item])]) else: pass # this item is already in the uniques array else: # must be an Object array, alltrue/equal functions don't work for item in inarray[1:]: newflag = 1 for unq in uniques: # NOTE: cmp --> 0=same, -1=<, 1=> test = np.sum(abs(np.array(map(cmp,item,unq))),axis=0) if test == 0: # if item identical to any 1 row in uniques newflag = 0 # then not a novel item to add break if newflag == 1: try: uniques = np.concatenate( [uniques,item[np.newaxis,:]] ) except TypeError: # the item to add isn't a list uniques = np.concatenate([uniques,np.array([item])]) return uniques def colex(a, indices, axis=1): """\nExtracts specified indices (a list) from passed array, along passed axis (column extraction is default). BEWARE: A 1D array is presumed to be a column-array (and that the whole array will be returned as a column). Returns: the columns of a specified by indices\n""" if type(indices) not in [ListType,TupleType,np.ndarray]: indices = [indices] if len(np.shape(a)) == 1: cols = np.resize(a,[a.shape[0],1]) else: cols = np.take(a,indices,axis) return cols def adm(a, criterion): """\nReturns rows from the passed list of lists that meet the criteria in the passed criterion expression (a string). Format: adm (a,criterion) where criterion is like 'x[2]==37'\n""" lines = eval('filter(lambda x: '+criterion+',a)') try: lines = np.array(lines) except: lines = np.array(lines,'O') return lines def linexand(a, columnlist, valuelist): """Returns the rows of an array where col (from columnlist) = val (from valuelist). One value is required for each column in columnlist. Returns: the rows of a where columnlist[i]=valuelist[i] for ALL i\n""" a = asarray(a) if type(columnlist) not in [ListType,TupleType,np.ndarray]: columnlist = [columnlist] if type(valuelist) not in [ListType,TupleType,np.ndarray]: valuelist = [valuelist] criterion = '' for i in range(len(columnlist)): if type(valuelist[i])==StringType: critval = '\'' + valuelist[i] + '\'' else: critval = str(valuelist[i]) criterion = criterion + ' x['+str(columnlist[i])+']=='+critval+' and' criterion = criterion[0:-3] # remove the "and" after the last crit return adm(a,criterion) def collapse(a, keepcols, collapsecols, stderr=0, ns=0, cfcn=None): """Averages data in collapsecol, keeping all unique items in keepcols (using unique, which keeps unique LISTS of column numbers), retaining the unique sets of values in keepcols, the mean for each. If the sterr or N of the mean are desired, set either or both parameters to 1. Returns: unique 'conditions' specified by the contents of columns specified by keepcols, abutted with the mean(s,axis=0) of column(s) specified by collapsecols Examples -------- import numpy as np from scipy import stats xx = np.array([[ 0., 0., 1.], [ 1., 1., 1.], [ 2., 2., 1.], [ 0., 3., 1.], [ 1., 4., 1.], [ 2., 5., 1.], [ 0., 6., 1.], [ 1., 7., 1.], [ 2., 8., 1.], [ 0., 9., 1.]]) >>> stats._support.collapse(xx, (0), (1,2), stderr=0, ns=0, cfcn=None) array([[ 0. , 4.5, 1. ], [ 0. , 4.5, 1. ], [ 1. , 4. , 1. ], [ 1. , 4. , 1. ], [ 2. , 5. , 1. ], [ 2. , 5. , 1. ]]) >>> stats._support.collapse(xx, (0), (1,2), stderr=1, ns=1, cfcn=None) array([[ 0. , 4.5 , 1.93649167, 4. , 1. , 0. , 4. ], [ 0. , 4.5 , 1.93649167, 4. , 1. , 0. , 4. ], [ 1. , 4. , 1.73205081, 3. , 1. , 0. , 3. ], [ 1. , 4. , 1.73205081, 3. , 1. , 0. , 3. ], [ 2. , 5. , 1.73205081, 3. , 1. , 0. , 3. ], [ 2. , 5. , 1.73205081, 3. , 1. , 0. , 3. ]]) """ if cfcn is None: cfcn = lambda(x): np.mean(x, axis=0) a = asarray(a) if keepcols == []: avgcol = colex(a,collapsecols) means = cfcn(avgcol) return means else: if type(keepcols) not in [ListType,TupleType,np.ndarray]: keepcols = [keepcols] values = colex(a,keepcols) # so that "item" can be appended (below) uniques = unique(values).tolist() # get a LIST, so .sort keeps rows intact uniques.sort() newlist = [] for item in uniques: if type(item) not in [ListType,TupleType,np.ndarray]: item =[item] tmprows = linexand(a,keepcols,item) for col in collapsecols: avgcol = colex(tmprows,col) item.append(cfcn(avgcol)) if stderr: if len(avgcol)>1: item.append(compute_stderr(avgcol)) else: item.append('N/A') if ns: item.append(len(avgcol)) newlist.append(item) try: new_a = np.array(newlist) except TypeError: new_a = np.array(newlist,'O') return new_a def _chk_asarray(a, axis): if axis is None: a = np.ravel(a) outaxis = 0 else: a = np.asarray(a) outaxis = axis return a, outaxis def _chk2_asarray(a, b, axis): if axis is None: a = np.ravel(a) b = np.ravel(b) outaxis = 0 else: a = np.asarray(a) b = np.asarray(b) outaxis = axis return a, b, outaxis def compute_stderr(a, axis=0, ddof=1): a, axis = _chk_asarray(a, axis) return np.std(a,axis,ddof=1) / float(np.sqrt(a.shape[axis]))
gpl-3.0
kalrey/swift
test/probe/common.py
4
9721
# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from httplib import HTTPConnection import os from subprocess import Popen, PIPE import sys from time import sleep, time from collections import defaultdict from nose import SkipTest from swiftclient import get_auth, head_account from swift.common.ring import Ring from swift.common.utils import readconf from swift.common.manager import Manager from swift.common.storage_policy import POLICIES from test.probe import CHECK_SERVER_TIMEOUT, VALIDATE_RSYNC def get_server_number(port, port2server): server_number = port2server[port] server, number = server_number[:-1], server_number[-1:] try: number = int(number) except ValueError: # probably the proxy return server_number, None return server, number def start_server(port, port2server, pids, check=True): server, number = get_server_number(port, port2server) err = Manager([server]).start(number=number, wait=False) if err: raise Exception('unable to start %s' % ( server if not number else '%s%s' % (server, number))) if check: return check_server(port, port2server, pids) return None def check_server(port, port2server, pids, timeout=CHECK_SERVER_TIMEOUT): server = port2server[port] if server[:-1] in ('account', 'container', 'object'): if int(server[-1]) > 4: return None path = '/connect/1/2' if server[:-1] == 'container': path += '/3' elif server[:-1] == 'object': path += '/3/4' try_until = time() + timeout while True: try: conn = HTTPConnection('127.0.0.1', port) conn.request('GET', path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception( 'Unexpected status %s' % resp.status) break except Exception as err: if time() > try_until: print err print 'Giving up on %s:%s after %s seconds.' % ( server, port, timeout) raise err sleep(0.1) else: try_until = time() + timeout while True: try: url, token = get_auth('http://127.0.0.1:8080/auth/v1.0', 'test:tester', 'testing') account = url.split('/')[-1] head_account(url, token) return url, token, account except Exception as err: if time() > try_until: print err print 'Giving up on proxy:8080 after 30 seconds.' raise err sleep(0.1) return None def kill_server(port, port2server, pids): server, number = get_server_number(port, port2server) err = Manager([server]).kill(number=number) if err: raise Exception('unable to kill %s' % (server if not number else '%s%s' % (server, number))) try_until = time() + 30 while True: try: conn = HTTPConnection('127.0.0.1', port) conn.request('GET', '/') conn.getresponse() except Exception as err: break if time() > try_until: raise Exception( 'Still answering on port %s after 30 seconds' % port) sleep(0.1) def kill_servers(port2server, pids): Manager(['all']).kill() def kill_nonprimary_server(primary_nodes, port2server, pids): primary_ports = [n['port'] for n in primary_nodes] for port, server in port2server.iteritems(): if port in primary_ports: server_type = server[:-1] break else: raise Exception('Cannot figure out server type for %r' % primary_nodes) for port, server in list(port2server.iteritems()): if server[:-1] == server_type and port not in primary_ports: kill_server(port, port2server, pids) return port def get_ring(ring_name, server=None, force_validate=None): if not server: server = ring_name ring = Ring('/etc/swift', ring_name=ring_name) if not VALIDATE_RSYNC and not force_validate: return ring # easy sanity checks if ring.replica_count != 3: print 'WARNING: %s has %s replicas instead of 3' % ( ring.serialized_path, ring.replica_count) assert 4 == len(ring.devs), '%s has %s devices instead of 4' % ( ring.serialized_path, len(ring.devs)) # map server to config by port port_to_config = {} for server_ in Manager([server]): for config_path in server_.conf_files(): conf = readconf(config_path, section_name='%s-replicator' % server_.type) port_to_config[int(conf['bind_port'])] = conf for dev in ring.devs: # verify server is exposing mounted device conf = port_to_config[dev['port']] for device in os.listdir(conf['devices']): if device == dev['device']: dev_path = os.path.join(conf['devices'], device) full_path = os.path.realpath(dev_path) assert os.path.exists(full_path), \ 'device %s in %s was not found (%s)' % ( device, conf['devices'], full_path) break else: raise AssertionError( "unable to find ring device %s under %s's devices (%s)" % ( dev['device'], server, conf['devices'])) # verify server is exposing rsync device if port_to_config[dev['port']].get('vm_test_mode', False): rsync_export = '%s%s' % (server, dev['replication_port']) else: rsync_export = server cmd = "rsync rsync://localhost/%s" % rsync_export p = Popen(cmd, shell=True, stdout=PIPE) stdout, _stderr = p.communicate() if p.returncode: raise AssertionError('unable to connect to rsync ' 'export %s (%s)' % (rsync_export, cmd)) for line in stdout.splitlines(): if line.rsplit(None, 1)[-1] == dev['device']: break else: raise AssertionError("unable to find ring device %s under rsync's " "exported devices for %s (%s)" % ( dev['device'], rsync_export, cmd)) return ring def reset_environment(): p = Popen("resetswift 2>&1", shell=True, stdout=PIPE) stdout, _stderr = p.communicate() print stdout Manager(['all']).stop() pids = {} try: account_ring = get_ring('account') container_ring = get_ring('container') policy = POLICIES.default object_ring = get_ring(policy.ring_name, 'object') Manager(['main']).start(wait=False) port2server = {} for server, port in [('account', 6002), ('container', 6001), ('object', 6000)]: for number in xrange(1, 9): port2server[port + (number * 10)] = '%s%d' % (server, number) for port in port2server: check_server(port, port2server, pids) port2server[8080] = 'proxy' url, token, account = check_server(8080, port2server, pids) config_dict = defaultdict(dict) for name in ('account', 'container', 'object'): for server_name in (name, '%s-replicator' % name): for server in Manager([server_name]): for i, conf in enumerate(server.conf_files(), 1): config_dict[server.server][i] = conf except BaseException: try: raise except AssertionError as e: raise SkipTest(e) finally: try: kill_servers(port2server, pids) except Exception: pass return pids, port2server, account_ring, container_ring, object_ring, \ policy, url, token, account, config_dict def get_to_final_state(): replicators = Manager(['account-replicator', 'container-replicator', 'object-replicator']) replicators.stop() updaters = Manager(['container-updater', 'object-updater']) updaters.stop() replicators.once() updaters.once() replicators.once() if __name__ == "__main__": for server in ('account', 'container'): try: get_ring(server, force_validate=True) except AssertionError as err: sys.exit('%s ERROR: %s' % (server, err)) print '%s OK' % server for policy in POLICIES: try: get_ring(policy.ring_name, server='object', force_validate=True) except AssertionError as err: sys.exit('object ERROR (%s): %s' % (policy.name, err)) print 'object OK (%s)' % policy.name
apache-2.0
zhuochenKIDD/FrameworkBenchmarks
frameworks/Python/bottle/app.py
58
5667
from functools import partial from operator import attrgetter, itemgetter from random import randint import os import sys from bottle import Bottle, route, request, run, template, response from bottle.ext import sqlalchemy from sqlalchemy import create_engine, Column, Integer, Unicode from sqlalchemy.ext.declarative import declarative_base try: import ujson as json except ImportError: import json if sys.version_info[0] == 3: xrange = range _is_pypy = hasattr(sys, 'pypy_version_info') DBDRIVER = 'mysql+pymysql' if _is_pypy else 'mysql' DBHOSTNAME = os.environ.get('DBHOST', 'localhost') DATABASE_URI = '%s://benchmarkdbuser:benchmarkdbpass@%s:3306/hello_world?charset=utf8' % (DBDRIVER, DBHOSTNAME) app = Bottle() Base = declarative_base() db_engine = create_engine(DATABASE_URI) plugin = sqlalchemy.Plugin(db_engine, keyword='db') app.install(plugin) # Engine for raw operation. Use autocommit. raw_engine = create_engine(DATABASE_URI, connect_args={'autocommit': True}, pool_reset_on_return=None) class World(Base): __tablename__ = "World" id = Column(Integer, primary_key=True) randomNumber = Column(Integer) def serialize(self): """Return object data in easily serializeable format""" return { 'id': self.id, 'randomNumber': self.randomNumber, } class Fortune(Base): __tablename__ = "Fortune" id = Column(Integer, primary_key=True) message = Column(Unicode) @app.route("/json") def hello(): response.content_type = 'application/json' resp = {"message": "Hello, World!"} return json.dumps(resp) @app.route("/db") def get_random_world_single(db): """Test Type 2: Single Database Query""" wid = randint(1, 10000) world = db.query(World).get(wid).serialize() response.content_type = 'application/json' return json.dumps(world) @app.route("/raw-db") def get_random_world_single_raw(): connection = raw_engine.connect() wid = randint(1, 10000) try: result = connection.execute("SELECT id, randomNumber FROM world WHERE id = " + str(wid)).fetchone() world = {'id': result[0], 'randomNumber': result[1]} response.content_type = 'application/json' return json.dumps(world) finally: connection.close() @app.route("/queries") def get_random_world(db): """Test Type 3: Multiple database queries""" num_queries = request.query.get('queries', 1, type=int) if num_queries < 1: num_queries = 1 if num_queries > 500: num_queries = 500 rp = partial(randint, 1, 10000) get = db.query(World).get worlds = [get(rp()).serialize() for _ in xrange(num_queries)] response.content_type = 'application/json' return json.dumps(worlds) @app.route("/raw-queries") def get_random_world_raw(): num_queries = request.query.get('queries', 1, type=int) if num_queries < 1: num_queries = 1 if num_queries > 500: num_queries = 500 worlds = [] rp = partial(randint, 1, 10000) connection = raw_engine.connect() try: for i in xrange(num_queries): result = connection.execute("SELECT id, randomNumber FROM world WHERE id = " + str(rp())).fetchone() worlds.append({'id': result[0], 'randomNumber': result[1]}) finally: connection.close() response.content_type = 'application/json' return json.dumps(worlds) @app.route("/fortune") def fortune_orm(db): fortunes=db.query(Fortune).all() fortunes.append(Fortune(id=0, message="Additional fortune added at request time.")) fortunes.sort(key=attrgetter('message')) return template('fortune-obj', fortunes=fortunes) @app.route("/raw-fortune") def fortune_raw(): connection = raw_engine.connect() try: fortunes=[(f.id, f.message) for f in connection.execute("SELECT * FROM Fortune")] fortunes.append((0, u'Additional fortune added at request time.')) fortunes=sorted(fortunes, key=itemgetter(1)) finally: connection.close() return template('fortune', fortunes=fortunes) @app.route("/updates") def updates(db): """Test 5: Database Updates""" num_queries = request.query.get('queries', 1, type=int) if num_queries < 1: num_queries = 1 if num_queries > 500: num_queries = 500 worlds = [] rp = partial(randint, 1, 10000) ids = [rp() for _ in xrange(num_queries)] ids.sort() # To avoid deadlock for id in ids: world = db.query(World).get(id) world.randomNumber = rp() worlds.append(world.serialize()) response.content_type = 'application/json' return json.dumps(worlds) @app.route("/raw-updates") def raw_updates(): """Test 5: Database Updates""" num_queries = request.query.get('queries', 1, type=int) if num_queries < 1: num_queries = 1 if num_queries > 500: num_queries = 500 conn = raw_engine.connect() worlds = [] rp = partial(randint, 1, 10000) for i in xrange(num_queries): world = conn.execute("SELECT * FROM World WHERE id=%s", (rp(),)).fetchone() randomNumber = rp() worlds.append({'id': world['id'], 'randomNumber': randomNumber}) conn.execute("UPDATE World SET randomNumber=%s WHERE id=%s", (randomNumber, world['id'])) conn.close() response.content_type = 'application/json' return json.dumps(worlds) @app.route('/plaintext') def plaintext(): """Test 6: Plaintext""" response.content_type = 'text/plain' return b'Hello, World!' if __name__ == "__main__": app.run(host='0.0.0.0', debug=False)
bsd-3-clause
TheStrix/android_kernel_xiaomi_armani
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
zimmermant/dlvo_lammps
tools/moltemplate/src/postprocess_input_script.py
19
5958
#!/usr/bin/env python """ Reorder the integer arguments to the commands in a LAMMPS input file if these arguments violate LAMMPS order requirements. We have to do this because the moltemplate.sh script will automatically assign these integers in a way which may violate these restrictions and the user has little control over this. This script: swaps the I and J integers in "pair_coeff I J ..." commands when I > J Other features may be added later """ import sys lines_orig = [] f = None fname = None num_lines_ignore = 0 # Lines from files passed as arguments are read and processed silently. # (Why? Sometimes it's necessary to read the contents of previous input scripts # in order to be able to understand a script command which appears later. # I'm assuming these files will be processed by lammps in the same order. So I # must insure that moltemplate.sh passes them to this program in that order. # I'm too lazy to read the "include" commands in input scripts correctly.) if len(sys.argv) > 1: for fname in sys.argv[1:]: f = open(fname, 'r') in_stream = f lines_orig += in_stream.readlines() num_lines_ignore += len(lines_orig) f.close() # Lines read from the standard input are read, processed, and printed to stdout in_stream = sys.stdin lines_orig += in_stream.readlines() pair_style_list=[] swap_occured = False warn_wildcard = False i=0 while i < len(lines_orig): # Read the next logical line # Any lines ending in '&' should be merged with the next line before breaking line_orig = '' while i < len(lines_orig): line_counter = 1 + i - num_lines_ignore line_orig += lines_orig[i] if ((len(line_orig) < 2) or (line_orig[-2:] != '&\n')): break i += 1 line = line_orig.replace('&\n','\n').rstrip('\n') comment = '' if '#' in line_orig: ic = line.find('#') line = line_orig[:ic] comment = line_orig[ic:] # keep track of comments (put them back later) tokens = line.strip().split() if ((len(tokens) >= 2) and (tokens[0] == 'pair_style')): pair_style_list = tokens[1:] if ((len(tokens) >= 3) and (tokens[0] == 'pair_coeff')): if ((tokens[1].isdigit() and (tokens[2].isdigit())) and (int(tokens[1]) > int(tokens[2]))): swap_occured = True tmp = tokens[2] tokens[2] = tokens[1] tokens[1] = tmp if i >= num_lines_ignore: # polite warning: sys.stderr.write('swapped pair_coeff order on line '+str(line_counter)) #if (fname != None): # sys.stderr.write(' of file \"'+fname+'\"') sys.stderr.write('\n') # Deal with the "hbond/" pair coeffs. # # The hbond/dreiding pair style designates one of the two atom types # as a donor, and the other as an acceptor (using the 'i','j' flags) # If swapped atom types eariler, we also need to swap 'i' with 'j'. # # If "hbond/dreiding.." pair style is used with "hybrid" or # "hybrid/overlay" then tokens[3] is the name of the pair style # and tokens[5] is either 'i' or 'j'. if len(pair_style_list) > 0: if ((pair_style_list[0] == 'hybrid') or (pair_style_list[0] == 'hybrid/overlay')): if ((len(tokens) > 5) and (tokens[5] == 'i') and (tokens[3][0:6]=='hbond/')): tokens[5] = 'j' sys.stderr.write(' (and replaced \"i\" with \"j\")\n') elif ((len(tokens) > 5) and (tokens[5] == 'j') and (tokens[3][0:6]=='hbond/')): tokens[5] = 'i' sys.stderr.write(' (and replaced \"j\" with \"i\")\n') elif (pair_style_list[0][0:6] == 'hbond/'): if ((len(tokens) > 4) and (tokens[4] == 'i')): tokens[4] = 'j' sys.stderr.write(' (and replaced \"i\" with \"j\")\n') elif ((len(tokens) > 4) and (tokens[4] == 'j')): tokens[4] = 'i' sys.stderr.write(' (and replaced \"j\" with \"i\")\n') sys.stdout.write((' '.join(tokens)+comment).replace('\n','&\n')+'\n') else: if ((('*' in tokens[1]) or ('*' in tokens[2])) and (not (('*' == tokens[1]) and ('*' == tokens[2])))): warn_wildcard = True if i >= num_lines_ignore: sys.stdout.write(line_orig) else: if i >= num_lines_ignore: sys.stdout.write(line_orig) i += 1 if swap_occured: sys.stderr.write('\n' ' WARNING: Atom order in some pair_coeff commands was swapped to pacify LAMMPS.\n' ' For some exotic pair_styles such as hbond/dreiding, this is not enough. If you\n' ' use exotic pair_styles, please verify the \"pair_coeff\" commands are correct.\n') if warn_wildcard: sys.stderr.write('\n' ' WARNING: The use of wildcard characters (\"*\") in your \"pair_coeff\"\n' ' commands is not recommended.\n' ' (It is safer to specify each interaction pair manually.\n' ' Check every pair_coeff command. Make sure that every atom type in\n' ' the first group is <= atom types in the second group.\n' ' Moltemplate does NOT do this when wildcards are used.)\n' ' If you are using a many-body pair style then ignore this warning.\n')
gpl-2.0
RDXT/geopy
geopy/geocoders/geonames.py
13
5902
""" :class:`GeoNames` geocoder. """ from geopy.compat import urlencode from geopy.geocoders.base import Geocoder, DEFAULT_TIMEOUT from geopy.location import Location from geopy.exc import ( GeocoderInsufficientPrivileges, GeocoderServiceError, ConfigurationError ) from geopy.util import logger __all__ = ("GeoNames", ) class GeoNames(Geocoder): # pylint: disable=W0223 """ GeoNames geocoder, documentation at: http://www.geonames.org/export/geonames-search.html Reverse geocoding documentation at: http://www.geonames.org/maps/us-reverse-geocoder.html """ def __init__( self, country_bias=None, username=None, timeout=DEFAULT_TIMEOUT, proxies=None, user_agent=None, ): """ :param string country_bias: :param string username: :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. .. versionadded:: 0.97 :param dict proxies: If specified, routes this geocoder's requests through the specified proxy. E.g., {"https": "192.0.2.0"}. For more information, see documentation on :class:`urllib2.ProxyHandler`. .. versionadded:: 0.96 """ super(GeoNames, self).__init__( scheme='http', timeout=timeout, proxies=proxies, user_agent=user_agent ) if username == None: raise ConfigurationError( 'No username given, required for api access. If you do not ' 'have a GeoNames username, sign up here: ' 'http://www.geonames.org/login' ) self.username = username self.country_bias = country_bias self.api = "%s://api.geonames.org/searchJSON" % self.scheme self.api_reverse = ( "%s://api.geonames.org/findNearbyPlaceNameJSON" % self.scheme ) def geocode(self, query, exactly_one=True, timeout=None): # pylint: disable=W0221 """ Geocode a location query. :param string query: The address or query you wish to geocode. :param bool exactly_one: Return one result or a list of results, if available. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. .. versionadded:: 0.97 """ params = { 'q': query, 'username': self.username } if self.country_bias: params['countryBias'] = self.country_bias if exactly_one is True: params['maxRows'] = 1 url = "?".join((self.api, urlencode(params))) logger.debug("%s.geocode: %s", self.__class__.__name__, url) return self._parse_json( self._call_geocoder(url, timeout=timeout), exactly_one, ) def reverse( self, query, exactly_one=False, timeout=None, ): """ Given a point, find an address. .. versionadded:: 1.2.0 :param string query: The coordinates for which you wish to obtain the closest human-readable addresses. :type query: :class:`geopy.point.Point`, list or tuple of (latitude, longitude), or string as "%(latitude)s, %(longitude)s" :param boolean exactly_one: Return one result or a list of results, if available. :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. """ try: lat, lng = [ x.strip() for x in self._coerce_point_to_string(query).split(',') ] except ValueError: raise ValueError("Must be a coordinate pair or Point") params = { 'lat': lat, 'lng': lng, 'username': self.username } url = "?".join((self.api_reverse, urlencode(params))) logger.debug("%s.reverse: %s", self.__class__.__name__, url) return self._parse_json( self._call_geocoder(url, timeout=timeout), exactly_one ) def _parse_json(self, doc, exactly_one): """ Parse JSON response body. """ places = doc.get('geonames', []) err = doc.get('status', None) if err and 'message' in err: if err['message'].startswith("user account not enabled to use"): raise GeocoderInsufficientPrivileges(err['message']) else: raise GeocoderServiceError(err['message']) if not len(places): return None def parse_code(place): """ Parse each record. """ latitude = place.get('lat', None) longitude = place.get('lng', None) if latitude and longitude: latitude = float(latitude) longitude = float(longitude) else: return None placename = place.get('name') state = place.get('adminCode1', None) country = place.get('countryCode', None) location = ', '.join( [x for x in [placename, state, country] if x] ) return Location(location, (latitude, longitude), place) if exactly_one: return parse_code(places[0]) else: return [parse_code(place) for place in places]
mit
iqas/e2gui
lib/python/Components/Renderer/LcdPicon.py
13
6378
import os, re, unicodedata from Renderer import Renderer from enigma import ePixmap, ePicLoad from Tools.Alternatives import GetWithAlternative from Tools.Directories import pathExists, SCOPE_ACTIVE_SKIN, resolveFilename from Components.Harddisk import harddiskmanager from boxbranding import getBoxType from ServiceReference import ServiceReference from Components.SystemInfo import SystemInfo searchPaths = [] lastLcdPiconPath = None def initLcdPiconPaths(): global searchPaths searchPaths = [] for part in harddiskmanager.getMountedPartitions(): onMountpointAdded(part.mountpoint) for mp in ('/usr/share/enigma2/', '/'): onMountpointAdded(mp) def onMountpointAdded(mountpoint): global searchPaths try: if getBoxType() in ('vuultimo', 'et10000', 'mutant2400', 'xpeedlx3', 'quadbox2400', 'sezammarvel', 'atemionemesis', 'mbultra', 'beyonwizt4', 'dm7020hd', 'dm7080') and not SystemInfo["grautec"] or os.path.isdir(mountpoint + 'piconlcd'): path = os.path.join(mountpoint, 'piconlcd') + '/' else: path = os.path.join(mountpoint, 'picon') + '/' if os.path.isdir(path) and path not in searchPaths: for fn in os.listdir(path): if fn.endswith('.png'): print "[LcdPicon] adding path:", path searchPaths.append(path) break except Exception, ex: print "[LcdPicon] Failed to investigate %s:" % mountpoint, ex def onMountpointRemoved(mountpoint): global searchPaths if getBoxType() in ('vuultimo', 'et10000', 'mutant2400', 'xpeedlx3', 'quadbox2400', 'sezammarvel', 'atemionemesis', 'mbultra', 'beyonwizt4', 'dm7020hd', 'dm7080') and not SystemInfo["grautec"] or os.path.isdir(mountpoint + 'piconlcd'): path = os.path.join(mountpoint, 'piconlcd') + '/' else: path = os.path.join(mountpoint, 'picon') + '/' try: searchPaths.remove(path) print "[LcdPicon] removed path:", path except: pass def onPartitionChange(why, part): if why == 'add': onMountpointAdded(part.mountpoint) elif why == 'remove': onMountpointRemoved(part.mountpoint) def findLcdPicon(serviceName): global lastLcdPiconPath if lastLcdPiconPath is not None: pngname = lastLcdPiconPath + serviceName + ".png" if pathExists(pngname): return pngname else: return "" else: global searchPaths pngname = "" for path in searchPaths: if pathExists(path) and not path.startswith('/media/net'): pngname = path + serviceName + ".png" if pathExists(pngname): lastLcdPiconPath = path break elif pathExists(path): pngname = path + serviceName + ".png" if pathExists(pngname): lastLcdPiconPath = path break if pathExists(pngname): return pngname else: return "" def getLcdPiconName(serviceName): #remove the path and name fields, and replace ':' by '_' sname = '_'.join(GetWithAlternative(serviceName).split(':', 10)[:10]) pngname = findLcdPicon(sname) if not pngname: fields = sname.split('_', 3) if len(fields) > 2 and fields[2] != '2': #fallback to 1 for tv services with nonstandard servicetypes fields[2] = '1' if len(fields) > 0 and fields[0] == '4097': #fallback to 1 for IPTV streams fields[0] = '1' pngname = findLcdPicon('_'.join(fields)) if not pngname: # picon by channel name name = ServiceReference(serviceName).getServiceName() name = unicodedata.normalize('NFKD', unicode(name, 'utf_8', errors='ignore')).encode('ASCII', 'ignore') name = re.sub('[^a-z0-9]', '', name.replace('&', 'and').replace('+', 'plus').replace('*', 'star').lower()) if len(name) > 0: pngname = findLcdPicon(name) if not pngname and len(name) > 2 and name.endswith('hd'): pngname = findLcdPicon(name[:-2]) return pngname class LcdPicon(Renderer): def __init__(self): Renderer.__init__(self) self.PicLoad = ePicLoad() self.PicLoad.PictureData.get().append(self.updatePicon) self.piconsize = (0,0) self.pngname = "" self.lastPath = None if getBoxType() in ('vuultimo', 'et10000', 'mutant2400', 'xpeedlx3', 'quadbox2400', 'sezammarvel', 'atemionemesis', 'mbultra', 'beyonwizt4', 'dm7020hd', 'dm7080') and not SystemInfo["grautec"]: pngname = findLcdPicon("lcd_picon_default") else: pngname = findLcdPicon("picon_default") self.defaultpngname = None if not pngname: if getBoxType() in ('vuultimo', 'et10000', 'mutant2400', 'xpeedlx3', 'quadbox2400', 'sezammarvel', 'atemionemesis', 'mbultra', 'beyonwizt4', 'dm7020hd', 'dm7080') and not SystemInfo["grautec"]: tmp = resolveFilename(SCOPE_ACTIVE_SKIN, "lcd_picon_default.png") else: tmp = resolveFilename(SCOPE_ACTIVE_SKIN, "picon_default.png") if pathExists(tmp): pngname = tmp else: if getBoxType() in ('vuultimo', 'et10000', 'mutant2400', 'xpeedlx3', 'quadbox2400', 'sezammarvel', 'atemionemesis', 'mbultra', 'beyonwizt4', 'dm7020hd', 'dm7080') and not SystemInfo["grautec"]: pngname = resolveFilename(SCOPE_ACTIVE_SKIN, "lcd_picon_default.png") else: pngname = resolveFilename(SCOPE_ACTIVE_SKIN, "picon_default.png") if os.path.getsize(pngname): self.defaultpngname = pngname def addPath(self, value): if pathExists(value): global searchPaths if not value.endswith('/'): value += '/' if value not in searchPaths: searchPaths.append(value) def applySkin(self, desktop, parent): attribs = self.skinAttributes[:] for (attrib, value) in self.skinAttributes: if attrib == "path": self.addPath(value) attribs.remove((attrib,value)) elif attrib == "size": self.piconsize = value self.skinAttributes = attribs return Renderer.applySkin(self, desktop, parent) GUI_WIDGET = ePixmap def postWidgetCreate(self, instance): self.changed((self.CHANGED_DEFAULT,)) def updatePicon(self, picInfo=None): ptr = self.PicLoad.getData() if ptr is not None: self.instance.setPixmap(ptr.__deref__()) self.instance.show() def changed(self, what): if self.instance: pngname = "" if what[0] == 1 or what[0] == 3: pngname = getLcdPiconName(self.source.text) if not pathExists(pngname): # no picon for service found pngname = self.defaultpngname if self.pngname != pngname: if pngname: self.PicLoad.setPara((self.piconsize[0], self.piconsize[1], 0, 0, 1, 1, "#FF000000")) self.PicLoad.startDecode(pngname) else: self.instance.hide() self.pngname = pngname harddiskmanager.on_partition_list_change.append(onPartitionChange) initLcdPiconPaths()
gpl-2.0
jeffdasilva/stattrack
db/player/strings.py
1
4472
import unittest class PlayerStrings(object): Name = 'name' Position = 'position' Team = 'team' FantasyOwner = 'owner' Link = 'link' Stats = 'stats' GamesPlayed = 'GamesPlayed' ProjectedPrefix = 'Projected' def __init__(self, prefix=None): if prefix is None: self.prefix = "" else: self.prefix = prefix self.map = {} self.map['gp'] = PlayerStrings.GamesPlayed self.map['ggp'] = PlayerStrings.GamesPlayed def name(self): return PlayerStrings.Name def position(self): return PlayerStrings.Position def team(self): return PlayerStrings.Team def owner(self): return PlayerStrings.FantasyOwner def addprefix(self): if self.prefix == "": return "" else: return self.prefix + "." def link(self, sublink=None): if sublink is not None and sublink != "": sublink_string = str(sublink) + "." else: sublink_string = "" return self.addprefix() + sublink_string + PlayerStrings.Link def stats(self, statstype=None): if statstype is not None and statstype != "": sublink_string = str(statstype) + "." else: sublink_string = "" return self.addprefix() + sublink_string + PlayerStrings.Stats def statString(self,string): return self.addprefix() + string def projectedString(self,string): return self.addprefix() + PlayerStrings.ProjectedPrefix + '.' + string def gamesPlayed(self): return self.statString(PlayerStrings.GamesPlayed) def projectedGamesPlayed(self): return self.projectedString(PlayerStrings.GamesPlayed) def sanitize(self,stat_name): if isinstance(stat_name, list): sanitized_list = [] for stat in stat_name: sanitized_list.append(self.sanitize(stat)) return sanitized_list else: stat_name = stat_name.lower() if stat_name in self.map: stat_name = self.map[stat_name] return stat_name class HockeyPlayerStrings(PlayerStrings): Goals = "Goals" Assists = "Assists" Points = "Points" Wins = "Wins" Ties = "Ties" Ties = "Loses" Shutouts = "Shutouts" ProjectedGoals = PlayerStrings.ProjectedPrefix + Goals ProjectedAssists = PlayerStrings.ProjectedPrefix + Assists ProjectedWins = PlayerStrings.ProjectedPrefix + Wins ProjectedTies = PlayerStrings.ProjectedPrefix + Ties ProjectedShutouts = PlayerStrings.ProjectedPrefix + Shutouts def __init__(self, prefix=None): super(HockeyPlayerStrings, self).__init__(prefix=prefix) self.map['g'] = HockeyPlayerStrings.Goals self.map['a'] = HockeyPlayerStrings.Assists self.map['pts'] = HockeyPlayerStrings.Points self.map['w'] = HockeyPlayerStrings.Wins self.map['so'] = HockeyPlayerStrings.Shutouts def goals(self): return self.statString(HockeyPlayerStrings.Goals) def projectedGoals(self): return self.projectedString(HockeyPlayerStrings.Goals) def assists(self): return self.statString(HockeyPlayerStrings.Assists) def projectedAssists(self): return self.projectedString(HockeyPlayerStrings.Assists) def points(self): return self.statString(HockeyPlayerStrings.Points) def projectedPoints(self): return self.projectedString(HockeyPlayerStrings.Points) def wins(self): return self.statString(HockeyPlayerStrings.Wins) def projectedWins(self): return self.projectedString(HockeyPlayerStrings.Wins) def ties(self): return self.statString(HockeyPlayerStrings.Ties) def projectedTies(self): return self.projectedString(HockeyPlayerStrings.Ties) def shutouts(self): return self.statString(HockeyPlayerStrings.Shutouts) def projectedShutouts(self): return self.projectedString(HockeyPlayerStrings.Shutouts) class TestHockeyPlayerStrings(unittest.TestCase): def testHockeyPlayerStrings(self): s = HockeyPlayerStrings("foo") self.assertEqual(s.gamesPlayed(),'foo.GamesPlayed') self.assertEqual(s.projectedGamesPlayed(),'foo.Projected.GamesPlayed') self.assertEqual(s.wins(),'foo.Wins') self.assertEqual(s.projectedWins(),'foo.Projected.Wins')
gpl-2.0
skycucumber/Messaging-Gateway
webapp/venv/lib/python2.7/site-packages/twisted/conch/ssh/session.py
59
10817
# -*- test-case-name: twisted.conch.test.test_session -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ This module contains the implementation of SSHSession, which (by default) allows access to a shell and a python interpreter over SSH. Maintainer: Paul Swartz """ import struct import signal import sys import os from zope.interface import implements from twisted.internet import interfaces, protocol from twisted.python import log from twisted.conch.interfaces import ISession from twisted.conch.ssh import common, channel class SSHSession(channel.SSHChannel): name = 'session' def __init__(self, *args, **kw): channel.SSHChannel.__init__(self, *args, **kw) self.buf = '' self.client = None self.session = None def request_subsystem(self, data): subsystem, ignored= common.getNS(data) log.msg('asking for subsystem "%s"' % subsystem) client = self.avatar.lookupSubsystem(subsystem, data) if client: pp = SSHSessionProcessProtocol(self) proto = wrapProcessProtocol(pp) client.makeConnection(proto) pp.makeConnection(wrapProtocol(client)) self.client = pp return 1 else: log.msg('failed to get subsystem') return 0 def request_shell(self, data): log.msg('getting shell') if not self.session: self.session = ISession(self.avatar) try: pp = SSHSessionProcessProtocol(self) self.session.openShell(pp) except: log.deferr() return 0 else: self.client = pp return 1 def request_exec(self, data): if not self.session: self.session = ISession(self.avatar) f,data = common.getNS(data) log.msg('executing command "%s"' % f) try: pp = SSHSessionProcessProtocol(self) self.session.execCommand(pp, f) except: log.deferr() return 0 else: self.client = pp return 1 def request_pty_req(self, data): if not self.session: self.session = ISession(self.avatar) term, windowSize, modes = parseRequest_pty_req(data) log.msg('pty request: %s %s' % (term, windowSize)) try: self.session.getPty(term, windowSize, modes) except: log.err() return 0 else: return 1 def request_window_change(self, data): if not self.session: self.session = ISession(self.avatar) winSize = parseRequest_window_change(data) try: self.session.windowChanged(winSize) except: log.msg('error changing window size') log.err() return 0 else: return 1 def dataReceived(self, data): if not self.client: #self.conn.sendClose(self) self.buf += data return self.client.transport.write(data) def extReceived(self, dataType, data): if dataType == connection.EXTENDED_DATA_STDERR: if self.client and hasattr(self.client.transport, 'writeErr'): self.client.transport.writeErr(data) else: log.msg('weird extended data: %s'%dataType) def eofReceived(self): if self.session: self.session.eofReceived() elif self.client: self.conn.sendClose(self) def closed(self): if self.session: self.session.closed() elif self.client: self.client.transport.loseConnection() #def closeReceived(self): # self.loseConnection() # don't know what to do with this def loseConnection(self): if self.client: self.client.transport.loseConnection() channel.SSHChannel.loseConnection(self) class _ProtocolWrapper(protocol.ProcessProtocol): """ This class wraps a L{Protocol} instance in a L{ProcessProtocol} instance. """ def __init__(self, proto): self.proto = proto def connectionMade(self): self.proto.connectionMade() def outReceived(self, data): self.proto.dataReceived(data) def processEnded(self, reason): self.proto.connectionLost(reason) class _DummyTransport: def __init__(self, proto): self.proto = proto def dataReceived(self, data): self.proto.transport.write(data) def write(self, data): self.proto.dataReceived(data) def writeSequence(self, seq): self.write(''.join(seq)) def loseConnection(self): self.proto.connectionLost(protocol.connectionDone) def wrapProcessProtocol(inst): if isinstance(inst, protocol.Protocol): return _ProtocolWrapper(inst) else: return inst def wrapProtocol(proto): return _DummyTransport(proto) # SUPPORTED_SIGNALS is a list of signals that every session channel is supposed # to accept. See RFC 4254 SUPPORTED_SIGNALS = ["ABRT", "ALRM", "FPE", "HUP", "ILL", "INT", "KILL", "PIPE", "QUIT", "SEGV", "TERM", "USR1", "USR2"] class SSHSessionProcessProtocol(protocol.ProcessProtocol): """I am both an L{IProcessProtocol} and an L{ITransport}. I am a transport to the remote endpoint and a process protocol to the local subsystem. """ implements(interfaces.ITransport) # once initialized, a dictionary mapping signal values to strings # that follow RFC 4254. _signalValuesToNames = None def __init__(self, session): self.session = session self.lostOutOrErrFlag = False def connectionMade(self): if self.session.buf: self.transport.write(self.session.buf) self.session.buf = None def outReceived(self, data): self.session.write(data) def errReceived(self, err): self.session.writeExtended(connection.EXTENDED_DATA_STDERR, err) def outConnectionLost(self): """ EOF should only be sent when both STDOUT and STDERR have been closed. """ if self.lostOutOrErrFlag: self.session.conn.sendEOF(self.session) else: self.lostOutOrErrFlag = True def errConnectionLost(self): """ See outConnectionLost(). """ self.outConnectionLost() def connectionLost(self, reason = None): self.session.loseConnection() def _getSignalName(self, signum): """ Get a signal name given a signal number. """ if self._signalValuesToNames is None: self._signalValuesToNames = {} # make sure that the POSIX ones are the defaults for signame in SUPPORTED_SIGNALS: signame = 'SIG' + signame sigvalue = getattr(signal, signame, None) if sigvalue is not None: self._signalValuesToNames[sigvalue] = signame for k, v in signal.__dict__.items(): # Check for platform specific signals, ignoring Python specific # SIG_DFL and SIG_IGN if k.startswith('SIG') and not k.startswith('SIG_'): if v not in self._signalValuesToNames: self._signalValuesToNames[v] = k + '@' + sys.platform return self._signalValuesToNames[signum] def processEnded(self, reason=None): """ When we are told the process ended, try to notify the other side about how the process ended using the exit-signal or exit-status requests. Also, close the channel. """ if reason is not None: err = reason.value if err.signal is not None: signame = self._getSignalName(err.signal) if (getattr(os, 'WCOREDUMP', None) is not None and os.WCOREDUMP(err.status)): log.msg('exitSignal: %s (core dumped)' % (signame,)) coreDumped = 1 else: log.msg('exitSignal: %s' % (signame,)) coreDumped = 0 self.session.conn.sendRequest(self.session, 'exit-signal', common.NS(signame[3:]) + chr(coreDumped) + common.NS('') + common.NS('')) elif err.exitCode is not None: log.msg('exitCode: %r' % (err.exitCode,)) self.session.conn.sendRequest(self.session, 'exit-status', struct.pack('>L', err.exitCode)) self.session.loseConnection() def getHost(self): """ Return the host from my session's transport. """ return self.session.conn.transport.getHost() def getPeer(self): """ Return the peer from my session's transport. """ return self.session.conn.transport.getPeer() def write(self, data): self.session.write(data) def writeSequence(self, seq): self.session.write(''.join(seq)) def loseConnection(self): self.session.loseConnection() class SSHSessionClient(protocol.Protocol): def dataReceived(self, data): if self.transport: self.transport.write(data) # methods factored out to make live easier on server writers def parseRequest_pty_req(data): """Parse the data from a pty-req request into usable data. @returns: a tuple of (terminal type, (rows, cols, xpixel, ypixel), modes) """ term, rest = common.getNS(data) cols, rows, xpixel, ypixel = struct.unpack('>4L', rest[: 16]) modes, ignored= common.getNS(rest[16:]) winSize = (rows, cols, xpixel, ypixel) modes = [(ord(modes[i]), struct.unpack('>L', modes[i+1: i+5])[0]) for i in range(0, len(modes)-1, 5)] return term, winSize, modes def packRequest_pty_req(term, (rows, cols, xpixel, ypixel), modes): """Pack a pty-req request so that it is suitable for sending. NOTE: modes must be packed before being sent here. """ termPacked = common.NS(term) winSizePacked = struct.pack('>4L', cols, rows, xpixel, ypixel) modesPacked = common.NS(modes) # depend on the client packing modes return termPacked + winSizePacked + modesPacked def parseRequest_window_change(data): """Parse the data from a window-change request into usuable data. @returns: a tuple of (rows, cols, xpixel, ypixel) """ cols, rows, xpixel, ypixel = struct.unpack('>4L', data) return rows, cols, xpixel, ypixel def packRequest_window_change((rows, cols, xpixel, ypixel)): """Pack a window-change request so that it is suitable for sending. """ return struct.pack('>4L', cols, rows, xpixel, ypixel) import connection
gpl-2.0
wasade/qiime
qiime/denoiser/cluster_utils.py
1
9533
#!/usr/bin/env python """Some utility functions for operating on a cluster or MP machine.""" __author__ = "Jens Reeder" __copyright__ = "Copyright 2011, The QIIME Project" # remember to add yourself if you make changes __credits__ = ["Jens Reeder", "Rob Knight", "Nigel Cook", "Jai Ram Rideout"] __license__ = "GPL" __version__ = "1.8.0-dev" __maintainer__ = "Jens Reeder" __email__ = "[email protected]" from os import remove, system from string import join, lowercase from os.path import exists, join from time import sleep, time from random import sample from asynchat import async_chat from socket import socket, AF_INET, SOCK_STREAM, gethostname, error from burrito.util import ApplicationNotFoundError from burrito.util import which from qiime.util import load_qiime_config, get_qiime_temp_dir def submit_jobs(commands, prefix): """submit jobs using exe pointed to by cluster_jobs_fp. commands: List of commands (strings) that should be executed prefix: A uniq prefix used to name submit script """ qiime_config = load_qiime_config() CLUSTER_JOBS_SCRIPT = qiime_config['cluster_jobs_fp'] if not CLUSTER_JOBS_SCRIPT: raise ApplicationNotFoundError( "cluster_jobs_fp not set in config file!") if not (exists(CLUSTER_JOBS_SCRIPT) or which(CLUSTER_JOBS_SCRIPT)): raise ApplicationNotFoundError( "cluster_jobs_fp not in $PATH or provided as full path!") outfilename = join(get_qiime_temp_dir(), "%s_commands.txt" % prefix) fh = open(outfilename, "w") fh.write("\n".join(commands)) fh.close() cmd = '%s -ms %s %s' % (CLUSTER_JOBS_SCRIPT, outfilename, prefix) system(cmd) remove(outfilename) def setup_workers(num_cpus, outdir, server_socket, verbose=True, error_profile=None): """Start workers waiting for data. num_cpus: number of cores outdir: directory were the workers will work in server_socket: an open socket to the server verbose: verbose flag passed to the workers error_profile: filepath to the error profiles, passed to workers """ DENOISE_WORKER = "denoiser_worker.py" workers = [] client_sockets = [] # somewhat unique id for cluster job tmpname = "".join(sample(list(lowercase), 8)) host, port = server_socket.getsockname() # TODO: this should be set to a defined wait time using alarm() for i in range(num_cpus): name = outdir + ("/%sworker%d" % (tmpname, i)) workers.append(name) cmd = "%s -f %s -s %s -p %s" % (DENOISE_WORKER, name, host, port) if verbose: cmd += " -v" if error_profile: cmd += " -e %s" % error_profile submit_jobs([cmd], tmpname) # wait until the client connects # This might be a race condition -> make the client robust client_socket, client_address = server_socket.accept() client_sockets.append((client_socket, client_address)) return workers, client_sockets def adjust_workers(num_flows, num_cpus, worker_sockets, log_fh=None): """Stop workers no longer needed. num_flows: number of flowgrams num_cpus: number of CPUs currently used worker_sockets: list of connected sockets log_fh: open fh to log file Returns new number of CPUs """ qiime_config = load_qiime_config() min_per_core = int(qiime_config['denoiser_min_per_core']) if(num_flows < (num_cpus - 1) * min_per_core): if log_fh: log_fh.write("Adjusting number of workers:\n") log_fh.write("flows: %d cpus:%d\n" % (num_flows, num_cpus)) # TODO: make sure this works with __future__ division per_core = max(min_per_core, (num_flows / num_cpus) + 1) for i in range(num_cpus): if(i * per_core > num_flows): worker_sock = worker_sockets.pop() worker_sock.close() num_cpus = num_cpus - 1 if log_fh: log_fh.write("released worker %d\n" % i) if log_fh: log_fh.write("New number of cpus:%d\n" % num_cpus) if (num_cpus == 0 or num_cpus != len(worker_sockets)): raise ValueError("Adjust_workers screwed up!") return num_cpus def stop_workers(worker_sockets, log_fh=None): """Stop all workers. worker_sockets: list of connected sockets log_fh: open fh to log file """ for i, worker in enumerate(worker_sockets): try: worker.send("Server shutting down all clients") except error: # socket already closed, client dead if log_fh: log_fh.write( "Worker %s seems to be dead already. Check for runaways!\n" % i) worker.close() def check_workers(workers, worker_sockets, log_fh=None): """Check if all workers are still alive. Exit otherwise. workers: list of worker names worker_sockets: list of connected sockets log_fh: open fh to log file """ # Do a dummy send and see if it fails for worker, sock in zip(workers, worker_sockets): try: sock.send("") except error: if log_fh: log_fh.write( "FATAL ERROR\nWorker %s not alive. Aborting\n" % worker) stop_workers(worker_sockets, log_fh) return False return True def setup_server(port=0, verbose=False): """Open a port on the server for workers to connect to. port: the port number to use, 0 means let OS decide verbose: a verbose flag """ host = gethostname() sock = socket(AF_INET, SOCK_STREAM) try: sock.bind((host, port)) except error as msg: raise error("Could not open Socket on server: " + str(msg)) sock.listen(5) # max num of queued connections usually [1..5] if verbose: print "Server listening on %s" % str(sock.getsockname()) return sock def setup_cluster(num_cpus, outdir, verbose, error_profile): """Setup server and clients""" server_socket = setup_server() workers, client_socks_and_adrs = setup_workers( num_cpus, outdir, server_socket, verbose=verbose, error_profile=error_profile) # we don't need the client adresses anywhere, so get rid of them client_sockets = [sock for sock, addr in client_socks_and_adrs] return client_sockets, workers, server_socket def save_send(socket, data): """send data to a socket. socket: a connected socket object data: string to send over the socket """ # We have no control about how much data the clients accepts, # thus we send in chunks until done while len(data) > 0: try: send_data_size = socket.send(data) # remove sent portion form data data = data[send_data_size:] except error as msg: # most likely socket busy, buffer full or not yet ready sleep(0.01) def send_flowgram_to_socket(identifier, flowgram, socket, trim=False): """send one flowgram over a socket. id: identifier of this flowgram flowgram: the flowgram itself socket: socket to write to trim: Boolean flag for quality trimming flowgrams """ if trim: flowgram = flowgram.getQualityTrimmedFlowgram() # store space separated string representation of flowgram # storing this is much quicker than re-generating everyt we send it if (not hasattr(flowgram, "spaced_flowgram")): spaced_flowgram_seq = " ".join(map(str, flowgram.flowgram)) flowgram.spaced_flowgram = spaced_flowgram_seq else: spaced_flowgram_seq = flowgram.spaced_flowgram data = "%s %d %s\n" % (identifier, len(flowgram), spaced_flowgram_seq) save_send(socket, data) class ClientHandler(async_chat): """A convenience wrapper around a socket to collect incoming data""" # This handler is called from the main routine with an open socket. # It waits for the client to return its data on the socket and stores it # in a global variable result_array. Afterwards the handler is deleted # by removing it from the global asyncore map # Note: the incomgn socket is expected to be connected upon initialization # and remains connected after this handler is destroyed def __init__(self, sock, worker_number, result_array, timing): async_chat.__init__(self, sock) self.in_buffer = [] self.set_terminator("--END--") self.number = worker_number self.results = result_array self.timing = timing def collect_incoming_data(self, data): """Buffer the data""" # Note data might come in chunks of arbitrary size self.in_buffer.append(data) def found_terminator(self): """Action performed when the terminator is found.""" # Note this function is event-triggered # Data on sockets comes in chunks of strings. Cat first then split on # \n data = "".join(self.in_buffer) self.results[self.number] = [map(float, (s.split())) for s in data.split("\n") if s != ""] self.in_buffer = [] self.timing[self.number] = time() # delete this channel from the global map, but don't close the socket # as we will use it again in the next round. # Once global map is empty, asynchronous loop in server will finish self.del_channel()
gpl-2.0
sterlingbaldwin/acme_workbench
workbench-backend/workbench/urls.py
1
1920
"""workbench URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url from django.contrib import admin from index import views as index_views from file_manager import views as file_manager_views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^$', index_views.index, name='index'), url(r'^login/?$', index_views.user_login, name='login'), url(r'^logout/?$', index_views.user_logout, name='logout'), url(r'^register/?$', index_views.user_register, name='register'), url(r'^workbench/?$', index_views.workbench, name='workbench'), url(r'^get_user_list/?$', index_views.get_user_list, name='getUserList'), url(r'^file_manager/get_data_set_list/?$', file_manager_views.get_data_set_list, name='getDataSetList'), url(r'^file_manager/get_data_set/?$', file_manager_views.get_data_set, name='getDataSet'), url(r'^file_manager/get_file_info/?$', file_manager_views.get_file_info, name='getFileInfo'), url(r'^file_manager/upload_dataset/(?P<dataset_name>.+)$', file_manager_views.upload_dataset, name='uploadDataSet'), url(r'^file_manager/delete_dataset/(?P<dataset_name>.+)$', file_manager_views.delete_dataset, name='deleteDataSet'), url(r'^file_manager/change_file_permissions/?$', file_manager_views.change_file_permissions, name='changeFilePermissions') ]
bsd-2-clause
radicalbit/ambari
ambari-agent/src/test/python/ambari_agent/TestAlerts.py
2
61637
#!/usr/bin/env python ''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import os import socket import sys import urllib2 import tempfile import random from alerts.ams_alert import AmsAlert from ambari_agent.AlertSchedulerHandler import AlertSchedulerHandler from ambari_agent.RecoveryManager import RecoveryManager from ambari_agent.alerts.collector import AlertCollector from ambari_agent.alerts.base_alert import BaseAlert from ambari_agent.alerts.metric_alert import MetricAlert from ambari_agent.alerts.port_alert import PortAlert from ambari_agent.alerts.script_alert import ScriptAlert from ambari_agent.alerts.web_alert import WebAlert from ambari_agent.alerts.recovery_alert import RecoveryAlert from ambari_agent.apscheduler.scheduler import Scheduler from ambari_agent.ClusterConfiguration import ClusterConfiguration from ambari_commons.urllib_handlers import RefreshHeaderProcessor from collections import namedtuple from mock.mock import MagicMock, patch from unittest import TestCase from AmbariConfig import AmbariConfig class TestAlerts(TestCase): def setUp(self): # save original open() method for later use self.original_open = open self.original_osfdopen = os.fdopen self.config = AmbariConfig() def tearDown(self): sys.stdout == sys.__stdout__ @patch.object(Scheduler, "add_interval_job") @patch.object(Scheduler, "start") def test_start(self, aps_add_interval_job_mock, aps_start_mock): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_extensions_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_extensions_path, test_host_scripts_path, cluster_configuration, self.config, None) ash.start() self.assertTrue(aps_add_interval_job_mock.called) self.assertTrue(aps_start_mock.called) @patch('time.time') @patch.object(socket.socket,"connect") def test_port_alert(self, socket_connect_mock, time_mock): definition_json = self._get_port_alert_definition() configuration = { 'hdfs-site' : { 'my-key': 'value1' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # called 3x with 3 calls per alert # - 900ms and then a time.time() for the date from base_alert # - 2000ms and then a time.time() for the date from base_alert # - socket.timeout to simulate a timeout and then a time.time() for the date from base_alert time_mock.side_effect = [0,900,336283000000, 0,2000,336283100000, socket.timeout,336283200000] alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) # 900ms is OK alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # 2000ms is WARNING alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) # throws a socket.timeout exception, causes a CRITICAL alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) @patch.object(RecoveryManager, "is_action_info_stale") @patch.object(RecoveryManager, "get_actions_copy") def test_recovery_alert(self, rm_get_actions_mock, is_stale_mock): definition_json = self._get_recovery_alert_definition() is_stale_mock.return_value = False rm_get_actions_mock.return_value = { "METRICS_COLLECTOR": { "count": 0, "lastAttempt": 1447860184, "warnedLastReset": False, "lastReset": 1447860184, "warnedThresholdReached": False, "lifetimeCount": 1, "warnedLastAttempt": False } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, {}) rm = RecoveryManager(tempfile.mktemp(), True) alert = RecoveryAlert(definition_json, definition_json['source'], self.config, rm) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(1, alert.interval()) # OK - "count": 0 alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # WARN - "count": 1 rm_get_actions_mock.return_value = { "METRICS_COLLECTOR": { "count": 1, "lastAttempt": 1447860184, "warnedLastReset": False, "lastReset": 1447860184, "warnedThresholdReached": False, "lifetimeCount": 1, "warnedLastAttempt": False } } alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) # CRIT - "count": 5 rm_get_actions_mock.return_value = { "METRICS_COLLECTOR": { "count": 5, "lastAttempt": 1447860184, "warnedLastReset": False, "lastReset": 1447860184, "warnedThresholdReached": False, "lifetimeCount": 1, "warnedLastAttempt": False } } alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) # OK again, after recovery manager window expired is_stale_mock.return_value = True alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) # CRIT, after recovery manager window expired, # but max_lifetime_count reached, warnedThresholdReached == True rm_get_actions_mock.return_value = { "METRICS_COLLECTOR": { "count": 5, "lastAttempt": 1447860184, "warnedLastReset": False, "lastReset": 1447860184, "warnedThresholdReached": True, "lifetimeCount": 12, "warnedLastAttempt": False } } is_stale_mock.return_value = True alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) @patch.object(socket.socket,"connect") def test_port_alert_complex_uri(self, socket_connect_mock): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site' : { 'my-key': 'c6401.ambari.apache.org:2181,c6402.ambari.apache.org:2181,c6403.ambari.apache.org:2181'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") # use a URI that has commas to verify that we properly parse it alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertTrue('(Unit Tests)' in alerts[0]['text']) self.assertTrue('response time on port 2181' in alerts[0]['text']) def test_port_alert_no_sub(self): definition_json = { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "http://c6401.ambari.apache.org", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "critical": { "text": "(Unit Tests) Could not load process info: {0}" } } } } cluster_configuration = self.__get_cluster_configuration() alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(AlertCollector(), cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals('http://c6401.ambari.apache.org', alert.uri) alert.collect() def test_script_alert(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['extensions_directory'] = os.path.join('ambari_agent', 'extensions') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals(definition_json['source']['extensions_directory'], alert.extensions_dir) self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text']) def test_script_alert_with_parameters(self): definition_json = self._get_script_alert_definition_with_parameters() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['extensions_directory'] = os.path.join('ambari_agent', 'extensions') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['extensions_directory'], alert.extensions_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('Script parameter detected: foo bar baz', alerts[0]['text']) @patch.object(MetricAlert, "_load_jmx") def test_metric_alert(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() configuration = {'hdfs-site' : { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # trip an OK ma_load_jmx_mock.return_value = ([1, 25], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 125', alerts[0]['text']) # trip a warning ma_load_jmx_mock.return_value = ([1, 75], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) Warning: 1 75 175', alerts[0]['text']) # trip a critical now ma_load_jmx_mock.return_value = ([1, 150], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) Critical: 1 150 250', alerts[0]['text']) del definition_json['source']['jmx']['value'] collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # now try without any jmx value to compare to ma_load_jmx_mock.return_value = ([1, 25], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 1 25 None', alerts[0]['text']) @patch.object(AmsAlert, "_load_metric") def test_ams_alert(self, ma_load_metric_mock): definition_json = self._get_ams_alert_definition() configuration = {'ams-site': {'timeline.metrics.service.webapp.address': '0.0.0.0:6188'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = AmsAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # trip an OK ma_load_metric_mock.return_value = ([{1:100,2:100,3:200,4:200}], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: the mean used heap size is 150 MB.', alerts[0]['text']) # trip a warning ma_load_metric_mock.return_value = ([{1:800,2:800,3:900,4:900}], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) Warning: the mean used heap size is 850 MB.', alerts[0]['text']) # trip a critical now ma_load_metric_mock.return_value = ([{1:1000,2:1000,3:2000,4:2000}], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) Critical: the mean used heap size is 1500 MB.', alerts[0]['text']) @patch.object(MetricAlert, "_load_jmx") def test_alert_uri_structure(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition() ma_load_jmx_mock.return_value = ([0,0], None) # run the alert without specifying any keys; an exception should be thrown # indicating that there was no URI and the result is UNKNOWN collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set properties that make no sense wihtout the main URI properties configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('UNKNOWN', collector.alerts()[0]['state']) # set an actual property key (http) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set an actual property key (https) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) # set both (http and https) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals('OK', collector.alerts()[0]['state']) @patch.object(WebAlert, "_make_web_request") def test_web_alert(self, wa_make_web_request_mock): definition_json = self._get_web_alert_definition() WebResponse = namedtuple('WebResponse', 'status_code time_millis error_msg') wa_make_web_request_mock.return_value = WebResponse(200,1.234,None) # run the alert and check HTTP 200 configuration = {'hdfs-site' : { 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = WebAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('(Unit Tests) ok: 200', alerts[0]['text']) self.assertEquals('OK', alerts[0]['state']) # run the alert and check HTTP 500 wa_make_web_request_mock.return_value = WebResponse(500,1.234,"Internal Server Error") collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('(Unit Tests) warning: 500 (Internal Server Error)', alerts[0]['text']) # run the alert and check critical wa_make_web_request_mock.return_value = WebResponse(0,0,'error message') collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # http assertion indicating that we properly determined non-SSL self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) critical: http://c6401.ambari.apache.org:80. error message', alerts[0]['text']) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTPS_ONLY', 'dfs.datanode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.datanode.https.address' : 'c6401.ambari.apache.org:443/test/path' } } self.__update_cluster_configuration(cluster_configuration, configuration) collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) # SSL assertion self.assertEquals('CRITICAL', alerts[0]['state']) self.assertEquals('(Unit Tests) critical: https://c6401.ambari.apache.org:443/test/path. error message', alerts[0]['text']) # test custom codes code = random.choice((600, 700, 800)) wa_make_web_request_mock.return_value = WebResponse(code, 1.234 , "Custom Code") collector = AlertCollector() alert = WebAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) ok: {code}'.format(code=code), alerts[0]['text']) def test_reschedule(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_extensions_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_extensions_path, test_host_scripts_path, cluster_configuration, self.config, None) ash.start() self.assertEquals(1, ash.get_job_count()) ash.reschedule() self.assertEquals(1, ash.get_job_count()) def test_alert_collector_purge(self): definition_json = self._get_port_alert_definition() configuration = {'hdfs-site' : { 'my-key': 'value1' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) res = alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertTrue(alerts[0] is not None) self.assertEquals('CRITICAL', alerts[0]['state']) collector.remove_by_uuid('c1f73191-4481-4435-8dae-fd380e4c0be1') self.assertEquals(0,len(collector.alerts())) def test_disabled_definitions(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_extensions_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_extensions_path, test_host_scripts_path, cluster_configuration, self.config, None) ash.start() self.assertEquals(1, ash.get_job_count()) definition_json = self._get_port_alert_definition() alert = PortAlert(definition_json, definition_json['source'], self.config) ash.schedule_definition(alert) self.assertEquals(2, ash.get_job_count()) definition_json['enabled'] = False alert = PortAlert(definition_json, definition_json['source'], self.config) ash.schedule_definition(alert) # verify disabled alert not scheduled self.assertEquals(2, ash.get_job_count()) definition_json['enabled'] = True pa = PortAlert(definition_json, definition_json['source'], self.config) ash.schedule_definition(pa) # verify enabled alert was scheduled self.assertEquals(3, ash.get_job_count()) def test_immediate_alert(self): test_file_path = os.path.join('ambari_agent', 'dummy_files') test_stack_path = os.path.join('ambari_agent', 'dummy_files') test_common_services_path = os.path.join('ambari_agent', 'dummy_files') test_extensions_path = os.path.join('ambari_agent', 'dummy_files') test_host_scripts_path = os.path.join('ambari_agent', 'dummy_files') cluster_configuration = self.__get_cluster_configuration() ash = AlertSchedulerHandler(test_file_path, test_stack_path, test_common_services_path, test_extensions_path, test_host_scripts_path, cluster_configuration, self.config, None) ash.start() self.assertEquals(1, ash.get_job_count()) self.assertEquals(0, len(ash._collector.alerts())) execution_commands = [ { "clusterName": "c1", "hostName": "c6401.ambari.apache.org", "alertDefinition": self._get_port_alert_definition() } ] # execute the alert immediately and verify that the collector has the result ash.execute_alert(execution_commands) self.assertEquals(1, len(ash._collector.alerts())) def test_skipped_alert(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['extensions_directory'] = os.path.join('ambari_agent', 'extensions') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'skip': 'true' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = ScriptAlert(definition_json, definition_json['source'], self.config) # instruct the test alert script to be skipped alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertEquals(definition_json['source']['path'], alert.path) self.assertEquals(definition_json['source']['stacks_directory'], alert.stacks_dir) self.assertEquals(definition_json['source']['extensions_directory'], alert.extensions_dir) self.assertEquals(definition_json['source']['common_services_directory'], alert.common_services_dir) self.assertEquals(definition_json['source']['host_scripts_directory'], alert.host_scripts_dir) # ensure that the skipped alert was still placed into the collector; it's up to # the server to decide how to handle skipped alerts self.assertEquals(1,len(collector.alerts())) def test_default_reporting_text(self): definition_json = self._get_script_alert_definition() alert = ScriptAlert(definition_json, definition_json['source'], self.config) self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), '{0}') self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), '{0}') self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), '{0}') definition_json['source']['type'] = 'PORT' alert = PortAlert(definition_json, definition_json['source'], self.config) self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), 'TCP OK - {0:.4f} response on port {1}') self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), 'TCP OK - {0:.4f} response on port {1}') self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), 'Connection failed: {0} to {1}:{2}') definition_json['source']['type'] = 'WEB' alert = WebAlert(definition_json, definition_json['source'], self.config) self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), 'HTTP {0} response in {2:.4f} seconds') self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), 'HTTP {0} response in {2:.4f} seconds') self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), 'Connection failed to {1}') definition_json['source']['type'] = 'METRIC' alert = MetricAlert(definition_json, definition_json['source'], self.config) self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), '{0}') self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), '{0}') self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), '{0}') rm = RecoveryManager(tempfile.mktemp()) definition_json['source']['type'] = 'RECOVERY' alert = RecoveryAlert(definition_json, definition_json['source'], self.config, rm) self.assertEquals(alert._get_reporting_text(alert.RESULT_OK), 'No recovery operations executed for {2}{0}.') self.assertEquals(alert._get_reporting_text(alert.RESULT_WARNING), '{1} recovery operations executed for {2}{0}.') self.assertEquals(alert._get_reporting_text(alert.RESULT_CRITICAL), '{1} recovery operations executed for {2}{0}.') def test_configuration_updates(self): definition_json = self._get_script_alert_definition() # normally set by AlertSchedulerHandler definition_json['source']['stacks_directory'] = os.path.join('ambari_agent', 'dummy_files') definition_json['source']['common_services_directory'] = os.path.join('ambari_agent', 'common-services') definition_json['source']['extensions_directory'] = os.path.join('ambari_agent', 'extensions') definition_json['source']['host_scripts_directory'] = os.path.join('ambari_agent', 'host_scripts') configuration = {'foo-site' : { 'bar': 'rendered-bar', 'baz' : 'rendered-baz' } } # populate the configuration cache with the initial configs collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) # run the alert and verify the output alert = ScriptAlert(definition_json, definition_json['source'], MagicMock()) alert.set_helpers(collector, cluster_configuration ) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar, baz is rendered-baz', alerts[0]['text']) # now update only the configs and run the same alert again and check # for different output configuration = {'foo-site' : { 'bar': 'rendered-bar2', 'baz' : 'rendered-baz2' } } # populate the configuration cache with the initial configs self.__update_cluster_configuration(cluster_configuration, configuration) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('WARNING', alerts[0]['state']) self.assertEquals('bar is rendered-bar2, baz is rendered-baz2', alerts[0]['text']) def test_uri_structure_parsing(self): uri_structure = { "http": "{{hdfs-site/dfs.namenode.http.address}}", "https": "{{hdfs-site/dfs.namenode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY", "high_availability": { "nameservice": "{{hdfs-site/dfs.internal.nameservices}}", "alias_key" : "{{hdfs-site/dfs.ha.namenodes.{{ha-nameservice}}}}", "http_pattern" : "{{hdfs-site/dfs.namenode.http-address.{{ha-nameservice}}.{{alias}}}}", "https_pattern" : "{{hdfs-site/dfs.namenode.https-address.{{ha-nameservice}}.{{alias}}}}" } } configuration = {'hdfs-site' : { 'dfs.namenode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address' : 'c6401.ambari.apache.org:443' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MockAlert() alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6401.ambari.apache.org:80', uri.uri ) self.assertEqual( False, uri.is_ssl_enabled ) configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.namenode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6401.ambari.apache.org:80', uri.uri ) self.assertEqual( False, uri.is_ssl_enabled ) # switch to SSL configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTPS_ONLY', 'dfs.namenode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address' : 'c6401.ambari.apache.org:443' } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6401.ambari.apache.org:443', uri.uri ) self.assertEqual( True, uri.is_ssl_enabled ) # test HA configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTP_ONLY', 'dfs.namenode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address' : 'c6401.ambari.apache.org:443', 'dfs.internal.nameservices' : 'c1ha', 'dfs.ha.namenodes.c1ha' : 'nn1, nn2', 'dfs.namenode.http-address.c1ha.nn1' : 'c6401.ambari.apache.org:8080', 'dfs.namenode.http-address.c1ha.nn2' : 'c6402.ambari.apache.org:8080', } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertFalse(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6401.ambari.apache.org:8080', uri.uri ) self.assertEqual( False, uri.is_ssl_enabled ) # test HA SSL configuration = {'hdfs-site' : { 'dfs.http.policy' : 'HTTPS_ONLY', 'dfs.namenode.http.address' : 'c6401.ambari.apache.org:80', 'dfs.namenode.https.address' : 'c6401.ambari.apache.org:443', 'dfs.internal.nameservices' : 'c1ha', 'dfs.ha.namenodes.c1ha' : 'nn1, nn2', 'dfs.namenode.http-address.c1ha.nn1' : 'c6401.ambari.apache.org:8080', 'dfs.namenode.http-address.c1ha.nn2' : 'c6402.ambari.apache.org:8080', 'dfs.namenode.https-address.c1ha.nn1' : 'c6401.ambari.apache.org:8443', 'dfs.namenode.https-address.c1ha.nn2' : 'c6402.ambari.apache.org:8443', } } self.__update_cluster_configuration(cluster_configuration, configuration) uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6401.ambari.apache.org:8443', uri.uri ) self.assertEqual( True, uri.is_ssl_enabled ) def test_uri_structure_parsing_without_namespace(self): """ Tests that we can parse an HA URI that only includes an alias and not a namespace :return: """ uri_structure = { "http": "{{yarn-site/yarn.resourcemanager.webapp.address}}", "https": "{{yarn-site/yarn.resourcemanager.webapp.http.address}}", "https_property": "{{yarn-site/yarn.http.policy}}", "https_property_value": "HTTPS_ONLY", "high_availability": { "alias_key" : "{{yarn-site/yarn.resourcemanager.ha.rm-ids}}", "http_pattern" : "{{yarn-site/yarn.resourcemanager.webapp.address.{{alias}}}}", "https_pattern" : "{{yarn-site/yarn.resourcemanager.webapp.https.address.{{alias}}}}" } } configuration = { 'yarn-site' : { 'yarn.http.policy' : 'HTTPS_ONLY', 'yarn.resourcemanager.webapp.address' : 'c6401.ambari.apache.org:80', 'yarn.resourcemanager.webapp.http.address' : 'c6401.ambari.apache.org:443', 'yarn.resourcemanager.webapp.address.rm1' : 'c6401.ambari.apache.org:8080', 'yarn.resourcemanager.webapp.https.address.rm1' : 'c6401.ambari.apache.org:8443', 'yarn.resourcemanager.webapp.address.rm2' : 'c6402.ambari.apache.org:8080', 'yarn.resourcemanager.webapp.https.address.rm2' : 'c6402.ambari.apache.org:8443', 'yarn.resourcemanager.ha.rm-ids' : 'rm1, rm2' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MockAlert() alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") uri_keys = alert._lookup_uri_property_keys(uri_structure) self.assertTrue(alert._check_uri_ssl_property(uri_keys)) uri = alert._get_uri_from_structure(uri_keys) self.assertEqual( 'c6402.ambari.apache.org:8443', uri.uri ) self.assertEqual( True, uri.is_ssl_enabled ) @patch('httplib.HTTPConnection') @patch.object(RefreshHeaderProcessor, 'http_response') def test_metric_alert_uses_refresh_processor(self, http_response_mock, http_connection_mock): """ Tests that the RefreshHeaderProcessor is correctly chained and called :param http_response_mock: :param http_connection_mock: :return: """ http_conn = http_connection_mock.return_value http_conn.getresponse.return_value = MagicMock(status=200) http_response_mock.return_value = MagicMock(code=200) url_opener = urllib2.build_opener(RefreshHeaderProcessor()) response = url_opener.open("http://foo.bar.baz/jmx") self.assertFalse(response is None) self.assertTrue(http_conn.request.called) self.assertTrue(http_conn.getresponse.called) self.assertTrue(http_response_mock.called) # now we know that the refresh header is intercepting, reset the mocks # and try with a METRIC alert MagicMock.reset_mock(http_response_mock) MagicMock.reset_mock(http_connection_mock) definition_json = self._get_metric_alert_definition() configuration = {'hdfs-site' : { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") alert.collect() self.assertFalse(response is None) self.assertTrue(http_conn.request.called) self.assertTrue(http_conn.getresponse.called) self.assertTrue(http_response_mock.called) def test_urllib2_refresh_header_processor(self): from urllib2 import Request # setup the original request original_url = "http://foo.bar.baz/jmx?qry=someQuery" request = Request(original_url) # ensure that we get back a 200 with a refresh header to redirect us response = MagicMock(code=200) info_response = MagicMock() info_response.keys.return_value = ["Refresh"] info_response.getheader.return_value = "3; url=http://foobar.baz.qux:8080" response.info.return_value = info_response # add a mock parent to the refresh processor parent_mock = MagicMock() refresh_processor = RefreshHeaderProcessor() refresh_processor.parent = parent_mock # execute refresh_processor.http_response(request, response) # ensure that the parent was called with the modified URL parent_mock.open.assert_called_with("http://foobar.baz.qux:8080/jmx?qry=someQuery") # reset mocks MagicMock.reset_mock(parent_mock) # alter the refresh header to remove the time value info_response.getheader.return_value = "url=http://foobar.baz.qux:8443" # execute refresh_processor.http_response(request, response) # ensure that the parent was called with the modified URL parent_mock.open.assert_called_with("http://foobar.baz.qux:8443/jmx?qry=someQuery") # reset mocks MagicMock.reset_mock(parent_mock) # use an invalid refresh header info_response.getheader.return_value = "http://foobar.baz.qux:8443" # execute refresh_processor.http_response(request, response) # ensure that the parent was not called self.assertFalse(parent_mock.open.called) # reset mocks MagicMock.reset_mock(parent_mock) # remove the refresh header info_response.keys.return_value = ["SomeOtherHeaders"] # execute refresh_processor.http_response(request, response) # ensure that the parent was not called self.assertFalse(parent_mock.open.called) # reset mocks MagicMock.reset_mock(parent_mock) # use and invalid http code but include a refresh header response.code = 401 info_response.keys.return_value = ["Refresh"] info_response.getheader.return_value = "3; url=http://foobar.baz.qux:8080" # execute refresh_processor.http_response(request, response) # ensure that the parent was not called self.assertFalse(parent_mock.open.called) def test_uri_timeout(self): # the web alert will have a timeout value definition_json = self._get_web_alert_definition() alert = WebAlert(definition_json, definition_json['source'], self.config) self.assertEquals(5.678, alert.connection_timeout) self.assertEquals(5, alert.curl_connection_timeout) # the metric definition will not and should default to 5.0 definition_json = self._get_metric_alert_definition() alert = MetricAlert(definition_json, definition_json['source'], self.config) self.assertEquals(5.0, alert.connection_timeout) def test_get_configuration_values(self): """ Tests that we are able to extract parameters correctly from the cached configuration. :return: """ configuration = { 'foo-site' : { 'foo-key1' : 'value1', 'foo-key2' : 'value2', 'special-character-*' : 'asterisk', 'special-character-$' : 'dollar sign', 'special-character-%' : 'percent', 'special-character-#' : 'hash', 'special-character-!' : 'bang', 'special-character-&' : 'ampersand' } } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MockAlert() alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals("constant", alert._get_configuration_value("constant")) self.assertEquals("value1", alert._get_configuration_value("{{foo-site/foo-key1}}")) self.assertEquals("value2", alert._get_configuration_value("{{foo-site/foo-key2}}")) self.assertEquals("asterisk", alert._get_configuration_value("{{foo-site/special-character-*}}")) self.assertEquals("dollar sign", alert._get_configuration_value("{{foo-site/special-character-$}}")) self.assertEquals("hash", alert._get_configuration_value("{{foo-site/special-character-#}}")) self.assertEquals("bang", alert._get_configuration_value("{{foo-site/special-character-!}}")) self.assertEquals("ampersand", alert._get_configuration_value("{{foo-site/special-character-&}}")) # try a mix of parameter and constant self.assertEquals("http://value1/servlet", alert._get_configuration_value("http://{{foo-site/foo-key1}}/servlet")) self.assertEquals("http://value1/servlet/value2", alert._get_configuration_value("http://{{foo-site/foo-key1}}/servlet/{{foo-site/foo-key2}}")) # try to request a dictionary object instead of a property self.assertEquals(configuration["foo-site"], alert._get_configuration_value("{{foo-site}}")) @patch.object(MetricAlert, "_load_jmx") def test_metric_alert_floating_division(self, ma_load_jmx_mock): definition_json = self._get_metric_alert_definition_with_float_division() configuration = {'hdfs-site' : { 'dfs.datanode.http.address': 'c6401.ambari.apache.org:80'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = MetricAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") # 10 / 5 ma_load_jmx_mock.return_value = ([10, 5], None) alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertEquals('(Unit Tests) OK: 10 5 2.0', alerts[0]['text']) @patch.object(socket.socket,"connect") def test_alert_definition_value_error_conversion(self, socket_connect_mock): """ Tests that an alert definition with text that doesn't match the type of positional arguments can recover and retry the ValueError. :param socket_connect_mock: :return: """ definition_json = self._get_alert_definition_with_value_error_text() configuration = {'hdfs-site' : { 'my-key': 'c6401.ambari.apache.org:2181'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") # use a URI that has commas to verify that we properly parse it alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) # the collect should catch the invalid text in the definition # ValueError: Unknown format code 'd' for object of type 'float' alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('OK', alerts[0]['state']) self.assertTrue('(Unit Tests) TCP OK' in alerts[0]['text']) @patch.object(socket.socket,"connect") def test_alert_definition_too_many_positional_arguments(self, socket_connect_mock): """ Tests that an alert definition with too many arguments produces an alert to collect after the exceptioin is raised. :param socket_connect_mock: :return: """ definition_json = self._get_alert_definition_with_too_many_positional_arguments() configuration = {'hdfs-site' : { 'my-key': 'c6401.ambari.apache.org:2181'} } collector = AlertCollector() cluster_configuration = self.__get_cluster_configuration() self.__update_cluster_configuration(cluster_configuration, configuration) alert = PortAlert(definition_json, definition_json['source'], self.config) alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6402.ambari.apache.org") # use a URI that has commas to verify that we properly parse it alert.set_helpers(collector, cluster_configuration) alert.set_cluster("c1", "c6401.ambari.apache.org") self.assertEquals(6, alert.interval()) # the collect should catch the invalid text in the definition # ValueError: Unknown format code 'd' for object of type 'float' alert.collect() alerts = collector.alerts() self.assertEquals(0, len(collector.alerts())) self.assertEquals('UNKNOWN', alerts[0]['state']) self.assertTrue('There is a problem with the alert definition' in alerts[0]['text']) def __get_cluster_configuration(self): """ Gets an instance of the cluster cache where the file read and write operations have been mocked out :return: """ with patch("__builtin__.open") as open_mock: open_mock.side_effect = self.open_side_effect cluster_configuration = ClusterConfiguration("") return cluster_configuration @patch("os.open") @patch("os.fdopen") def __update_cluster_configuration(self, cluster_configuration, configuration, osfdopen_mock, osopen_mock): """ Updates the configuration cache, using as mock file as the disk based cache so that a file is not created during tests :return: """ osfdopen_mock.side_effect = self.osfdopen_side_effect cluster_configuration._update_configurations("c1", configuration) def open_side_effect(self, file, mode): if mode == 'w': file_mock = MagicMock() return file_mock else: return self.original_open(file, mode) def osfdopen_side_effect(self, fd, mode): if mode == 'w': file_mock = MagicMock() return file_mock else: return self.original_open(file, mode) def _get_script_alert_definition(self): return { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "SCRIPT", "path": "test_script.py", } } def _get_script_alert_definition_with_parameters(self): return { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "SCRIPT", "path": "test_script.py", "parameters": [ { "name": "script.parameter.foo", "value": "foo bar baz" } ] } } def _get_port_alert_definition(self): return { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK - {0:.4f} response time on port {1}" }, "warning": { "text": "(Unit Tests) TCP WARN - {0:.4f} response time on port {1}", "value": 1.5 }, "critical": { "text": "(Unit Tests) Could not load process info: {0}", "value": 5.0 } } } } def _get_recovery_alert_definition(self): return { "componentName": "METRICS_COLLECTOR", "name": "ams_metrics_collector_autostart", "label": "Metrics Collector Recovery", "description": "This alert is triggered if the Metrics Collector has been auto-started for number of times equal to threshold.", "interval": 1, "scope": "HOST", "enabled": True, "source": { "type": "RECOVERY", "reporting": { "ok": { "text": "Metrics Collector has not been auto-started and is running normally{0}." }, "warning": { "text": "Metrics Collector has been auto-started {1} times{0}.", "count": 1 }, "critical": { "text": "Metrics Collector has been auto-started {1} times{0}.", "count": 5 } } } } def _get_metric_alert_definition(self): return { "name": "DataNode CPU Check", "service": "HDFS", "component": "DATANODE", "label": "DataNode Process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "METRIC", "uri": { "http": "{{hdfs-site/dfs.datanode.http.address}}", "https": "{{hdfs-site/dfs.datanode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY" }, "jmx": { "property_list": [ "someJmxObject/value", "someOtherJmxObject/value" ], "value": "({0} * 100) + {1}" }, "reporting": { "ok": { "text": "(Unit Tests) OK: {0} {1} {2}", }, "warning": { "text": "(Unit Tests) Warning: {0} {1} {2}", "value": 150 }, "critical": { "text": "(Unit Tests) Critical: {0} {1} {2}", "value": 200 } } } } def _get_ams_alert_definition(self): return { "ignore_host": False, "name": "namenode_mean_heapsize_used", "componentName": "NAMENODE", "interval": 1, "clusterId": 2, "uuid": "8a857295-ad11-4985-896e-d866dc27b963", "label": "NameNode Mean Used Heap Size (Hourly)", "definitionId": 28, "source": { "ams": { "compute": "mean", "interval": 30, "app_id": "NAMENODE", "value": "{0}", "metric_list": [ "jvm.JvmMetrics.MemHeapUsedM" ], "minimum_value": -1 }, "reporting": { "units": "#", "warning": { "text": "(Unit Tests) Warning: the mean used heap size is {0} MB.", "value": 768 }, "ok": { "text": "(Unit Tests) OK: the mean used heap size is {0} MB." }, "critical": { "text": "(Unit Tests) Critical: the mean used heap size is {0} MB.", "value": 1024 } }, "type": "AMS", "uri": { "http": "{{ams-site/timeline.metrics.service.webapp.address}}", "https_property_value": "HTTPS_ONLY", "https_property": "{{ams-site/timeline.metrics.service.http.policy}}", "https": "{{ams-site/timeline.metrics.service.webapp.address}}", "connection_timeout": 5.0 } }, } def _get_metric_alert_definition_with_float_division(self): return { "name": "DataNode CPU Check", "service": "HDFS", "component": "DATANODE", "label": "DataNode Process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "METRIC", "uri": { "http": "{{hdfs-site/dfs.datanode.http.address}}", "https": "{{hdfs-site/dfs.datanode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY" }, "jmx": { "property_list": [ "someJmxObject/value", "someOtherJmxObject/value" ], "value": "{0} / {1}" }, "reporting": { "ok": { "text": "(Unit Tests) OK: {0} {1} {2}", }, "warning": { "text": "(Unit Tests) Warning: {0} {1} {2}", "value": 150 }, "critical": { "text": "(Unit Tests) Critical: {0} {1} {2}", "value": 200 } } } } def _get_web_alert_definition(self): return { "name": "webalert_test", "service": "HDFS", "component": "DATANODE", "label": "WebAlert Test", "interval": 1, "scope": "HOST", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "WEB", "uri": { "http": "{{hdfs-site/dfs.datanode.http.address}}", "https": "{{hdfs-site/dfs.datanode.https.address}}", "https_property": "{{hdfs-site/dfs.http.policy}}", "https_property_value": "HTTPS_ONLY", "connection_timeout": 5.678, "acceptable_codes": [600, 700, 800] }, "reporting": { "ok": { "text": "(Unit Tests) ok: {0}", }, "warning": { "text": "(Unit Tests) warning: {0} ({3})", }, "critical": { "text": "(Unit Tests) critical: {1}. {3}", } } } } def _get_alert_definition_with_value_error_text(self): return { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "(Unit Tests) TCP OK {0:.4d}" }, "warning": { "text": "(Unit Tests) TCP Warning {0:.4d}", "value": 1.5 }, "critical": { "text": "(Unit Tests) TCP Critical {0:.4d}", "value": 5.0 } } } } def _get_alert_definition_with_too_many_positional_arguments(self): return { "name": "namenode_process", "service": "HDFS", "component": "NAMENODE", "label": "NameNode process", "interval": 6, "scope": "host", "enabled": True, "uuid": "c1f73191-4481-4435-8dae-fd380e4c0be1", "source": { "type": "PORT", "uri": "{{hdfs-site/my-key}}", "default_port": 50070, "reporting": { "ok": { "text": "Bad Syntax Going To Mess You Up {0:.4d} {1} {2} {3} {4}" }, "warning": { "text": "Bad Syntax Going To Mess You Up {0:.4d} {1} {2} {3} {4}", "value": 1.5 }, "critical": { "text": "Bad Syntax Going To Mess You Up {0:.4d} {1} {2} {3} {4}", "value": 5.0 } } } } class MockAlert(BaseAlert): """ Mock class for testing """ def __init__(self): super(MockAlert, self).__init__(None, None, AmbariConfig()) def get_name(self): return "mock_alert"
apache-2.0
blckshrk/Weboob
weboob/tools/test.py
4
2247
# -*- coding: utf-8 -*- # Copyright(C) 2010-2011 Romain Bignon, Laurent Bachelier # # This file is part of weboob. # # weboob is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # weboob is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with weboob. If not, see <http://www.gnu.org/licenses/>. from unittest import TestCase from random import choice from nose.plugins.skip import SkipTest from weboob.core import Weboob __all__ = ['TestCase', 'BackendTest'] class BackendTest(TestCase): BACKEND = None def __init__(self, *args, **kwargs): TestCase.__init__(self, *args, **kwargs) self.backends = {} self.backend_instance = None self.backend = None self.weboob = Weboob() if self.weboob.load_backends(modules=[self.BACKEND]): # provide the tests with all available backends self.backends = self.weboob.backend_instances # chose one backend (enough for most tests) self.backend_instance = choice(self.backends.keys()) self.backend = self.backends[self.backend_instance] def run(self, result): """ Call the parent run() for each backend instance. Skip the test if we have no backends. """ try: if not len(self.backends): result.startTest(self) result.stopTest(self) raise SkipTest('No backends configured for this module.') TestCase.run(self, result) finally: self.weboob.deinit() def shortDescription(self): """ Generate a description with the backend instance name. """ # do not use TestCase.shortDescription as it returns None return '%s [%s]' % (str(self), self.backend_instance)
agpl-3.0
hwu25/AppPkg
Applications/Python/Python-2.7.2/Lib/test/test_mimetools.py
13
1831
import unittest from test import test_support import string import StringIO mimetools = test_support.import_module("mimetools", deprecated=True) msgtext1 = mimetools.Message(StringIO.StringIO( """Content-Type: text/plain; charset=iso-8859-1; format=flowed Content-Transfer-Encoding: 8bit Foo! """)) class MimeToolsTest(unittest.TestCase): def test_decodeencode(self): start = string.ascii_letters + "=" + string.digits + "\n" for enc in ['7bit','8bit','base64','quoted-printable', 'uuencode', 'x-uuencode', 'uue', 'x-uue']: i = StringIO.StringIO(start) o = StringIO.StringIO() mimetools.encode(i, o, enc) i = StringIO.StringIO(o.getvalue()) o = StringIO.StringIO() mimetools.decode(i, o, enc) self.assertEqual(o.getvalue(), start) def test_boundary(self): s = set([""]) for i in xrange(100): nb = mimetools.choose_boundary() self.assertNotIn(nb, s) s.add(nb) def test_message(self): msg = mimetools.Message(StringIO.StringIO(msgtext1)) self.assertEqual(msg.gettype(), "text/plain") self.assertEqual(msg.getmaintype(), "text") self.assertEqual(msg.getsubtype(), "plain") self.assertEqual(msg.getplist(), ["charset=iso-8859-1", "format=flowed"]) self.assertEqual(msg.getparamnames(), ["charset", "format"]) self.assertEqual(msg.getparam("charset"), "iso-8859-1") self.assertEqual(msg.getparam("format"), "flowed") self.assertEqual(msg.getparam("spam"), None) self.assertEqual(msg.getencoding(), "8bit") def test_main(): test_support.run_unittest(MimeToolsTest) if __name__=="__main__": test_main()
bsd-2-clause
rharrison10/openshift-ansible
playbooks/common/openshift-cluster/upgrades/library/openshift_upgrade_config.py
91
5294
#!/usr/bin/python # -*- coding: utf-8 -*- # vim: expandtab:tabstop=4:shiftwidth=4 """Ansible module for modifying OpenShift configs during an upgrade""" import os import yaml DOCUMENTATION = ''' --- module: openshift_upgrade_config short_description: OpenShift Upgrade Config author: Jason DeTiberus requirements: [ ] ''' EXAMPLES = ''' ''' def modify_api_levels(level_list, remove, ensure, msg_prepend='', msg_append=''): """ modify_api_levels """ changed = False changes = [] if not isinstance(remove, list): remove = [] if not isinstance(ensure, list): ensure = [] if not isinstance(level_list, list): new_list = [] changed = True changes.append("%s created missing %s" % (msg_prepend, msg_append)) else: new_list = level_list for level in remove: if level in new_list: new_list.remove(level) changed = True changes.append("%s removed %s %s" % (msg_prepend, level, msg_append)) for level in ensure: if level not in new_list: new_list.append(level) changed = True changes.append("%s added %s %s" % (msg_prepend, level, msg_append)) return {'new_list': new_list, 'changed': changed, 'changes': changes} def upgrade_master_3_0_to_3_1(ansible_module, config_base, backup): """Main upgrade method for 3.0 to 3.1.""" changes = [] # Facts do not get transferred to the hosts where custom modules run, # need to make some assumptions here. master_config = os.path.join(config_base, 'master/master-config.yaml') master_cfg_file = open(master_config, 'r') config = yaml.safe_load(master_cfg_file.read()) master_cfg_file.close() # Remove unsupported api versions and ensure supported api versions from # master config unsupported_levels = ['v1beta1', 'v1beta2', 'v1beta3'] supported_levels = ['v1'] result = modify_api_levels(config.get('apiLevels'), unsupported_levels, supported_levels, 'master-config.yaml:', 'from apiLevels') if result['changed']: config['apiLevels'] = result['new_list'] changes.append(result['changes']) if 'kubernetesMasterConfig' in config and 'apiLevels' in config['kubernetesMasterConfig']: config['kubernetesMasterConfig'].pop('apiLevels') changes.append('master-config.yaml: removed kubernetesMasterConfig.apiLevels') # Add masterCA to serviceAccountConfig if 'serviceAccountConfig' in config and 'masterCA' not in config['serviceAccountConfig']: config['serviceAccountConfig']['masterCA'] = config['oauthConfig'].get('masterCA', 'ca.crt') # Add proxyClientInfo to master-config if 'proxyClientInfo' not in config['kubernetesMasterConfig']: config['kubernetesMasterConfig']['proxyClientInfo'] = { 'certFile': 'master.proxy-client.crt', 'keyFile': 'master.proxy-client.key' } changes.append("master-config.yaml: added proxyClientInfo") if len(changes) > 0: if backup: # TODO: Check success: ansible_module.backup_local(master_config) # Write the modified config: out_file = open(master_config, 'w') out_file.write(yaml.safe_dump(config, default_flow_style=False)) out_file.close() return changes def upgrade_master(ansible_module, config_base, from_version, to_version, backup): """Upgrade entry point.""" if from_version == '3.0': if to_version == '3.1': return upgrade_master_3_0_to_3_1(ansible_module, config_base, backup) def main(): """ main """ # disabling pylint errors for global-variable-undefined and invalid-name # for 'global module' usage, since it is required to use ansible_facts # pylint: disable=global-variable-undefined, invalid-name, # redefined-outer-name global module module = AnsibleModule( argument_spec=dict( config_base=dict(required=True), from_version=dict(required=True, choices=['3.0']), to_version=dict(required=True, choices=['3.1']), role=dict(required=True, choices=['master']), backup=dict(required=False, default=True, type='bool') ), supports_check_mode=True, ) from_version = module.params['from_version'] to_version = module.params['to_version'] role = module.params['role'] backup = module.params['backup'] config_base = module.params['config_base'] try: changes = [] if role == 'master': changes = upgrade_master(module, config_base, from_version, to_version, backup) changed = len(changes) > 0 return module.exit_json(changed=changed, changes=changes) # ignore broad-except error to avoid stack trace to ansible user # pylint: disable=broad-except except Exception, e: return module.fail_json(msg=str(e)) # ignore pylint errors related to the module_utils import # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
apache-2.0
nevir/plexability
extern/depot_tools/third_party/pylint/checkers/imports.py
20
15213
# Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE). # http://www.logilab.fr/ -- mailto:[email protected] # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free Software # Foundation; either version 2 of the License, or (at your option) any later # version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., # 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. """imports checkers for Python code""" from logilab.common.graph import get_cycles, DotBackend from logilab.common.modutils import is_standard_module from logilab.common.ureports import VerbatimText, Paragraph from logilab import astng from logilab.astng import are_exclusive from pylint.interfaces import IASTNGChecker from pylint.checkers import BaseChecker, EmptyReport def get_first_import(node, context, name, base, level): """return the node where [base.]<name> is imported or None if not found """ first = None found = False for first in context.values(): if isinstance(first, astng.Import): if name in [iname[0] for iname in first.names]: found = True break elif isinstance(first, astng.From): if base == first.modname and level == first.level and \ name in [iname[0] for iname in first.names]: found = True break if found and first is not node and not are_exclusive(first, node): return first # utilities to represents import dependencies as tree and dot graph ########### def filter_dependencies_info(dep_info, package_dir, mode='external'): """filter external or internal dependencies from dep_info (return a new dictionary containing the filtered modules only) """ if mode == 'external': filter_func = lambda x: not is_standard_module(x, (package_dir,)) else: assert mode == 'internal' filter_func = lambda x: is_standard_module(x, (package_dir,)) result = {} for importee, importers in dep_info.items(): if filter_func(importee): result[importee] = importers return result def make_tree_defs(mod_files_list): """get a list of 2-uple (module, list_of_files_which_import_this_module), it will return a dictionary to represent this as a tree """ tree_defs = {} for mod, files in mod_files_list: node = (tree_defs, ()) for prefix in mod.split('.'): node = node[0].setdefault(prefix, [{}, []]) node[1] += files return tree_defs def repr_tree_defs(data, indent_str=None): """return a string which represents imports as a tree""" lines = [] nodes = data.items() for i, (mod, (sub, files)) in enumerate(sorted(nodes, key=lambda x: x[0])): if not files: files = '' else: files = '(%s)' % ','.join(files) if indent_str is None: lines.append('%s %s' % (mod, files)) sub_indent_str = ' ' else: lines.append('%s\-%s %s' % (indent_str, mod, files)) if i == len(nodes)-1: sub_indent_str = '%s ' % indent_str else: sub_indent_str = '%s| ' % indent_str if sub: lines.append(repr_tree_defs(sub, sub_indent_str)) return '\n'.join(lines) def dependencies_graph(filename, dep_info): """write dependencies as a dot (graphviz) file """ done = {} printer = DotBackend(filename[:-4], rankdir = "LR") printer.emit('URL="." node[shape="box"]') for modname, dependencies in dep_info.items(): done[modname] = 1 printer.emit_node(modname) for modname in dependencies: if modname not in done: done[modname] = 1 printer.emit_node(modname) for depmodname, dependencies in dep_info.items(): for modname in dependencies: printer.emit_edge(modname, depmodname) printer.generate(filename) def make_graph(filename, dep_info, sect, gtype): """generate a dependencies graph and add some information about it in the report's section """ dependencies_graph(filename, dep_info) sect.append(Paragraph('%simports graph has been written to %s' % (gtype, filename))) # the import checker itself ################################################### MSGS = { 'F0401': ('Unable to import %s', 'Used when pylint has been unable to import a module.'), 'R0401': ('Cyclic import (%s)', 'Used when a cyclic import between two or more modules is \ detected.'), 'W0401': ('Wildcard import %s', 'Used when `from module import *` is detected.'), 'W0402': ('Uses of a deprecated module %r', 'Used a module marked as deprecated is imported.'), 'W0403': ('Relative import %r, should be %r', 'Used when an import relative to the package directory is \ detected.'), 'W0404': ('Reimport %r (imported line %s)', 'Used when a module is reimported multiple times.'), 'W0406': ('Module import itself', 'Used when a module is importing itself.'), 'W0410': ('__future__ import is not the first non docstring statement', 'Python 2.5 and greater require __future__ import to be the \ first non docstring statement in the module.'), } class ImportsChecker(BaseChecker): """checks for * external modules dependencies * relative / wildcard imports * cyclic imports * uses of deprecated modules """ __implements__ = IASTNGChecker name = 'imports' msgs = MSGS priority = -2 options = (('deprecated-modules', {'default' : ('regsub', 'string', 'TERMIOS', 'Bastion', 'rexec'), 'type' : 'csv', 'metavar' : '<modules>', 'help' : 'Deprecated modules which should not be used, \ separated by a comma'} ), ('import-graph', {'default' : '', 'type' : 'string', 'metavar' : '<file.dot>', 'help' : 'Create a graph of every (i.e. internal and \ external) dependencies in the given file (report RP0402 must not be disabled)'} ), ('ext-import-graph', {'default' : '', 'type' : 'string', 'metavar' : '<file.dot>', 'help' : 'Create a graph of external dependencies in the \ given file (report RP0402 must not be disabled)'} ), ('int-import-graph', {'default' : '', 'type' : 'string', 'metavar' : '<file.dot>', 'help' : 'Create a graph of internal dependencies in the \ given file (report RP0402 must not be disabled)'} ), ) def __init__(self, linter=None): BaseChecker.__init__(self, linter) self.stats = None self.import_graph = None self.__int_dep_info = self.__ext_dep_info = None self.reports = (('RP0401', 'External dependencies', self.report_external_dependencies), ('RP0402', 'Modules dependencies graph', self.report_dependencies_graph), ) def open(self): """called before visiting project (i.e set of modules)""" self.linter.add_stats(dependencies={}) self.linter.add_stats(cycles=[]) self.stats = self.linter.stats self.import_graph = {} def close(self): """called before visiting project (i.e set of modules)""" # don't try to compute cycles if the associated message is disabled if self.linter.is_message_enabled('R0401'): for cycle in get_cycles(self.import_graph): self.add_message('R0401', args=' -> '.join(cycle)) def visit_import(self, node): """triggered when an import statement is seen""" modnode = node.root() for name, _ in node.names: importedmodnode = self.get_imported_module(modnode, node, name) if importedmodnode is None: continue self._check_relative_import(modnode, node, importedmodnode, name) self._add_imported_module(node, importedmodnode.name) self._check_deprecated_module(node, name) self._check_reimport(node, name) def visit_from(self, node): """triggered when a from statement is seen""" basename = node.modname if basename == '__future__': # check if this is the first non-docstring statement in the module prev = node.previous_sibling() if prev: # consecutive future statements are possible if not (isinstance(prev, astng.From) and prev.modname == '__future__'): self.add_message('W0410', node=node) return modnode = node.root() importedmodnode = self.get_imported_module(modnode, node, basename) if importedmodnode is None: return self._check_relative_import(modnode, node, importedmodnode, basename) self._check_deprecated_module(node, basename) for name, _ in node.names: if name == '*': self.add_message('W0401', args=basename, node=node) continue self._add_imported_module(node, '%s.%s' % (importedmodnode.name, name)) self._check_reimport(node, name, basename, node.level) def get_imported_module(self, modnode, importnode, modname): try: return importnode.do_import_module(modname) except astng.InferenceError, ex: if str(ex) != modname: args = '%r (%s)' % (modname, ex) else: args = repr(modname) self.add_message("F0401", args=args, node=importnode) def _check_relative_import(self, modnode, importnode, importedmodnode, importedasname): """check relative import. node is either an Import or From node, modname the imported module name. """ if 'W0403' not in self.active_msgs: return if importedmodnode.file is None: return False # built-in module if modnode is importedmodnode: return False # module importing itself if modnode.absolute_import_activated() or getattr(importnode, 'level', None): return False if importedmodnode.name != importedasname: # this must be a relative import... self.add_message('W0403', args=(importedasname, importedmodnode.name), node=importnode) def _add_imported_module(self, node, importedmodname): """notify an imported module, used to analyze dependencies""" context_name = node.root().name if context_name == importedmodname: # module importing itself ! self.add_message('W0406', node=node) elif not is_standard_module(importedmodname): # handle dependencies importedmodnames = self.stats['dependencies'].setdefault( importedmodname, set()) if not context_name in importedmodnames: importedmodnames.add(context_name) if is_standard_module( importedmodname, (self.package_dir(),) ): # update import graph mgraph = self.import_graph.setdefault(context_name, set()) if not importedmodname in mgraph: mgraph.add(importedmodname) def _check_deprecated_module(self, node, mod_path): """check if the module is deprecated""" for mod_name in self.config.deprecated_modules: if mod_path == mod_name or mod_path.startswith(mod_name + '.'): self.add_message('W0402', node=node, args=mod_path) def _check_reimport(self, node, name, basename=None, level=0): """check if the import is necessary (i.e. not already done)""" if 'W0404' not in self.active_msgs: return frame = node.frame() root = node.root() contexts = [(frame, level)] if root is not frame: contexts.append((root, 0)) for context, level in contexts: first = get_first_import(node, context, name, basename, level) if first is not None: self.add_message('W0404', node=node, args=(name, first.fromlineno)) def report_external_dependencies(self, sect, _, dummy): """return a verbatim layout for displaying dependencies""" dep_info = make_tree_defs(self._external_dependencies_info().items()) if not dep_info: raise EmptyReport() tree_str = repr_tree_defs(dep_info) sect.append(VerbatimText(tree_str)) def report_dependencies_graph(self, sect, _, dummy): """write dependencies as a dot (graphviz) file""" dep_info = self.stats['dependencies'] if not dep_info or not (self.config.import_graph or self.config.ext_import_graph or self.config.int_import_graph): raise EmptyReport() filename = self.config.import_graph if filename: make_graph(filename, dep_info, sect, '') filename = self.config.ext_import_graph if filename: make_graph(filename, self._external_dependencies_info(), sect, 'external ') filename = self.config.int_import_graph if filename: make_graph(filename, self._internal_dependencies_info(), sect, 'internal ') def _external_dependencies_info(self): """return cached external dependencies information or build and cache them """ if self.__ext_dep_info is None: self.__ext_dep_info = filter_dependencies_info( self.stats['dependencies'], self.package_dir(), 'external') return self.__ext_dep_info def _internal_dependencies_info(self): """return cached internal dependencies information or build and cache them """ if self.__int_dep_info is None: self.__int_dep_info = filter_dependencies_info( self.stats['dependencies'], self.package_dir(), 'internal') return self.__int_dep_info def register(linter): """required method to auto register this checker """ linter.register_checker(ImportsChecker(linter))
gpl-2.0
andela-ifageyinbo/django
tests/settings_tests/tests.py
74
17758
import os import sys import unittest import warnings from types import ModuleType from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, modify_settings, override_settings, signals, ) from django.utils import six @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer') class FullyDecoratedTranTestCase(TransactionTestCase): available_apps = [] def test_override(self): self.assertListEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertListEqual(settings.ITEMS_OUTER, [1, 2, 3]) self.assertEqual(settings.TEST, 'override') self.assertEqual(settings.TEST_OUTER, 'outer') @modify_settings(ITEMS={ 'append': ['e', 'f'], 'prepend': ['a'], 'remove': ['d', 'c'], }) def test_method_list_override(self): self.assertListEqual(settings.ITEMS, ['a', 'b', 'e', 'f']) self.assertListEqual(settings.ITEMS_OUTER, [1, 2, 3]) @modify_settings(ITEMS={ 'append': ['b'], 'prepend': ['d'], 'remove': ['a', 'c', 'e'], }) def test_method_list_override_no_ops(self): self.assertListEqual(settings.ITEMS, ['b', 'd']) @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) def test_method_list_override_strings(self): self.assertListEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) @modify_settings(ITEMS={'remove': ['b', 'd']}) @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']}) def test_method_list_override_nested_order(self): self.assertListEqual(settings.ITEMS, ['d', 'c', 'b']) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') self.assertEqual(settings.TEST_OUTER, 'outer') def test_decorated_testcase_name(self): self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase') def test_decorated_testcase_module(self): self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], TEST='override') class FullyDecoratedTestCase(TestCase): def test_override(self): self.assertListEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.TEST, 'override') @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) @override_settings(TEST='override2') def test_method_override(self): self.assertListEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) self.assertEqual(settings.TEST, 'override2') class ClassDecoratedTestCaseSuper(TestCase): """ Dummy class for testing max recursion error in child class call to super(). Refs #17011. """ def test_max_recursion_error(self): pass @override_settings(TEST='override') class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super(ClassDecoratedTestCase, cls).setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self): self.assertEqual(settings.TEST, 'override') def test_setupclass_override(self): """Test that settings are overridden within setUpClass -- refs #21281""" self.assertEqual(self.foo, 'override') @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') def test_max_recursion_error(self): """ Overriding a method on a super class and then calling that method on the super class should not trigger infinite recursion. See #17011. """ try: super(ClassDecoratedTestCase, self).test_max_recursion_error() except RuntimeError: self.fail() @modify_settings(ITEMS={'append': 'mother'}) @override_settings(ITEMS=['father'], TEST='override-parent') class ParentDecoratedTestCase(TestCase): pass @modify_settings(ITEMS={'append': ['child']}) @override_settings(TEST='override-child') class ChildDecoratedTestCase(ParentDecoratedTestCase): def test_override_settings_inheritance(self): self.assertEqual(settings.ITEMS, ['father', 'mother', 'child']) self.assertEqual(settings.TEST, 'override-child') class SettingsTests(SimpleTestCase): def setUp(self): self.testvalue = None signals.setting_changed.connect(self.signal_callback) def tearDown(self): signals.setting_changed.disconnect(self.signal_callback) def signal_callback(self, sender, setting, value, **kwargs): if setting == 'TEST': self.testvalue = value def test_override(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) self.assertEqual('test', settings.TEST) del settings.TEST def test_override_change(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test2' self.assertEqual('test', settings.TEST) del settings.TEST def test_override_doesnt_leak(self): self.assertRaises(AttributeError, getattr, settings, 'TEST') with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test' self.assertRaises(AttributeError, getattr, settings, 'TEST') @override_settings(TEST='override') def test_decorator(self): self.assertEqual('override', settings.TEST) def test_context_manager(self): self.assertRaises(AttributeError, getattr, settings, 'TEST') override = override_settings(TEST='override') self.assertRaises(AttributeError, getattr, settings, 'TEST') override.enable() self.assertEqual('override', settings.TEST) override.disable() self.assertRaises(AttributeError, getattr, settings, 'TEST') def test_class_decorator(self): # SimpleTestCase can be decorated by override_settings, but not ut.TestCase class SimpleTestCaseSubclass(SimpleTestCase): pass class UnittestTestCaseSubclass(unittest.TestCase): pass decorated = override_settings(TEST='override')(SimpleTestCaseSubclass) self.assertIsInstance(decorated, type) self.assertTrue(issubclass(decorated, SimpleTestCase)) with six.assertRaisesRegex(self, Exception, "Only subclasses of Django SimpleTestCase*"): decorated = override_settings(TEST='override')(UnittestTestCaseSubclass) def test_signal_callback_context_manager(self): self.assertRaises(AttributeError, getattr, settings, 'TEST') with self.settings(TEST='override'): self.assertEqual(self.testvalue, 'override') self.assertEqual(self.testvalue, None) @override_settings(TEST='override') def test_signal_callback_decorator(self): self.assertEqual(self.testvalue, 'override') # # Regression tests for #10130: deleting settings. # def test_settings_delete(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) del settings.TEST self.assertRaises(AttributeError, getattr, settings, 'TEST') def test_settings_delete_wrapped(self): self.assertRaises(TypeError, delattr, settings, '_wrapped') def test_override_settings_delete(self): """ Allow deletion of a setting in an overridden settings set (#18824) """ previous_i18n = settings.USE_I18N previous_l10n = settings.USE_L10N with self.settings(USE_I18N=False): del settings.USE_I18N self.assertRaises(AttributeError, getattr, settings, 'USE_I18N') # Should also work for a non-overridden setting del settings.USE_L10N self.assertRaises(AttributeError, getattr, settings, 'USE_L10N') self.assertEqual(settings.USE_I18N, previous_i18n) self.assertEqual(settings.USE_L10N, previous_l10n) def test_override_settings_nested(self): """ Test that override_settings uses the actual _wrapped attribute at runtime, not when it was instantiated. """ self.assertRaises(AttributeError, getattr, settings, 'TEST') self.assertRaises(AttributeError, getattr, settings, 'TEST2') inner = override_settings(TEST2='override') with override_settings(TEST='override'): self.assertEqual('override', settings.TEST) with inner: self.assertEqual('override', settings.TEST) self.assertEqual('override', settings.TEST2) # inner's __exit__ should have restored the settings of the outer # context manager, not those when the class was instantiated self.assertEqual('override', settings.TEST) self.assertRaises(AttributeError, getattr, settings, 'TEST2') self.assertRaises(AttributeError, getattr, settings, 'TEST') self.assertRaises(AttributeError, getattr, settings, 'TEST2') class TestComplexSettingOverride(SimpleTestCase): def setUp(self): self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN') def tearDown(self): signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS) def test_complex_override_warning(self): """Regression test for #19031""" with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with override_settings(TEST_WARN='override'): self.assertEqual(settings.TEST_WARN, 'override') self.assertEqual(len(w), 1) # File extension may by .py, .pyc, etc. Compare only basename. self.assertEqual(os.path.splitext(w[0].filename)[0], os.path.splitext(__file__)[0]) self.assertEqual(str(w[0].message), 'Overriding setting TEST_WARN can lead to unexpected behavior.') class TrailingSlashURLTests(SimpleTestCase): """ Tests for the MEDIA_URL and STATIC_URL settings. They must end with a slash to ensure there's a deterministic way to build paths in templates. """ settings_module = settings def setUp(self): self._original_media_url = self.settings_module.MEDIA_URL self._original_static_url = self.settings_module.STATIC_URL def tearDown(self): self.settings_module.MEDIA_URL = self._original_media_url self.settings_module.STATIC_URL = self._original_static_url def test_blank(self): """ The empty string is accepted, even though it doesn't end in a slash. """ self.settings_module.MEDIA_URL = '' self.assertEqual('', self.settings_module.MEDIA_URL) self.settings_module.STATIC_URL = '' self.assertEqual('', self.settings_module.STATIC_URL) def test_end_slash(self): """ It works if the value ends in a slash. """ self.settings_module.MEDIA_URL = '/foo/' self.assertEqual('/foo/', self.settings_module.MEDIA_URL) self.settings_module.MEDIA_URL = 'http://media.foo.com/' self.assertEqual('http://media.foo.com/', self.settings_module.MEDIA_URL) self.settings_module.STATIC_URL = '/foo/' self.assertEqual('/foo/', self.settings_module.STATIC_URL) self.settings_module.STATIC_URL = 'http://static.foo.com/' self.assertEqual('http://static.foo.com/', self.settings_module.STATIC_URL) def test_no_end_slash(self): """ An ImproperlyConfigured exception is raised if the value doesn't end in a slash. """ with self.assertRaises(ImproperlyConfigured): self.settings_module.MEDIA_URL = '/foo' with self.assertRaises(ImproperlyConfigured): self.settings_module.MEDIA_URL = 'http://media.foo.com' with self.assertRaises(ImproperlyConfigured): self.settings_module.STATIC_URL = '/foo' with self.assertRaises(ImproperlyConfigured): self.settings_module.STATIC_URL = 'http://static.foo.com' def test_double_slash(self): """ If the value ends in more than one slash, presume they know what they're doing. """ self.settings_module.MEDIA_URL = '/wrong//' self.assertEqual('/wrong//', self.settings_module.MEDIA_URL) self.settings_module.MEDIA_URL = 'http://media.foo.com/wrong//' self.assertEqual('http://media.foo.com/wrong//', self.settings_module.MEDIA_URL) self.settings_module.STATIC_URL = '/wrong//' self.assertEqual('/wrong//', self.settings_module.STATIC_URL) self.settings_module.STATIC_URL = 'http://static.foo.com/wrong//' self.assertEqual('http://static.foo.com/wrong//', self.settings_module.STATIC_URL) class SecureProxySslHeaderTest(SimpleTestCase): settings_module = settings def setUp(self): self._original_setting = self.settings_module.SECURE_PROXY_SSL_HEADER def tearDown(self): self.settings_module.SECURE_PROXY_SSL_HEADER = self._original_setting def test_none(self): self.settings_module.SECURE_PROXY_SSL_HEADER = None req = HttpRequest() self.assertEqual(req.is_secure(), False) def test_set_without_xheader(self): self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https') req = HttpRequest() self.assertEqual(req.is_secure(), False) def test_set_with_xheader_wrong(self): self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https') req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue' self.assertEqual(req.is_secure(), False) def test_set_with_xheader_right(self): self.settings_module.SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https') req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https' self.assertEqual(req.is_secure(), True) class IsOverriddenTest(SimpleTestCase): def test_configure(self): s = LazySettings() s.configure(SECRET_KEY='foo') self.assertTrue(s.is_overridden('SECRET_KEY')) def test_module(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' sys.modules['fake_settings_module'] = settings_module try: s = Settings('fake_settings_module') self.assertTrue(s.is_overridden('SECRET_KEY')) self.assertFalse(s.is_overridden('ALLOWED_HOSTS')) finally: del sys.modules['fake_settings_module'] def test_override(self): self.assertFalse(settings.is_overridden('ALLOWED_HOSTS')) with override_settings(ALLOWED_HOSTS=[]): self.assertTrue(settings.is_overridden('ALLOWED_HOSTS')) def test_unevaluated_lazysettings_repr(self): lazy_settings = LazySettings() expected = '<LazySettings [Unevaluated]>' self.assertEqual(repr(lazy_settings), expected) def test_evaluated_lazysettings_repr(self): lazy_settings = LazySettings() module = os.environ.get(ENVIRONMENT_VARIABLE) expected = '<LazySettings "%s">' % module # Force evaluation of the lazy object. lazy_settings.APPEND_SLASH self.assertEqual(repr(lazy_settings), expected) def test_usersettingsholder_repr(self): lazy_settings = LazySettings() lazy_settings.configure(APPEND_SLASH=False) expected = '<UserSettingsHolder>' self.assertEqual(repr(lazy_settings._wrapped), expected) def test_settings_repr(self): module = os.environ.get(ENVIRONMENT_VARIABLE) lazy_settings = Settings(module) expected = '<Settings "%s">' % module self.assertEqual(repr(lazy_settings), expected) class TestListSettings(unittest.TestCase): """ Make sure settings that should be lists or tuples throw ImproperlyConfigured if they are set to a string instead of a list or tuple. """ list_or_tuple_settings = ( "INSTALLED_APPS", "TEMPLATE_DIRS", "LOCALE_PATHS", ) def test_tuple_settings(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' for setting in self.list_or_tuple_settings: setattr(settings_module, setting, ('non_list_or_tuple_value')) sys.modules['fake_settings_module'] = settings_module try: with self.assertRaises(ImproperlyConfigured): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] delattr(settings_module, setting)
bsd-3-clause
MediaKraken/MediaKraken_Deployment
source/common/common_libblueray.py
1
1325
""" Copyright (C) 2017 Quinn D Granfor <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License version 2, as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License version 2 for more details. You should have received a copy of the GNU General Public License version 2 along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. """ import bluread # help(bluread) # https://github.com/cmlburnett/PyBluRead/blob/master/setup.py # sudo apt-get install libbluray-dev def com_bray_read_titles(drive_name): track_data = [] with bluread.Bluray(drive_name) as b: b.Open() # print(("Volume ID: %s" % b.VolumeId), flush=True) # print(("Org ID: %s" % b.OrgId), flush=True) for i in range(b.NumberOfTitles): t = b.GetTitle(i) track_data.append((i, t.NumberOfAngles, t.NumberOfChapters, t.NumberOfClips, t.LengthFancy)) return track_data
gpl-3.0
wong2/sentry
tests/sentry/api/endpoints/test_release_files.py
26
2450
from __future__ import absolute_import from django.core.files.uploadedfile import SimpleUploadedFile from django.core.urlresolvers import reverse from sentry.models import File, Release, ReleaseFile from sentry.testutils import APITestCase class ReleaseFilesListTest(APITestCase): def test_simple(self): project = self.create_project(name='foo') release = Release.objects.create( project=project, version='1', ) releasefile = ReleaseFile.objects.create( project=project, release=release, file=File.objects.create( path='http://example.com', name='application.js', type='release.file', ), name='http://example.com/application.js' ) url = reverse('sentry-api-0-release-files', kwargs={ 'organization_slug': project.organization.slug, 'project_slug': project.slug, 'version': release.version, }) self.login_as(user=self.user) response = self.client.get(url) assert response.status_code == 200, response.content assert len(response.data) == 1 assert response.data[0]['id'] == str(releasefile.id) class ReleaseFileCreateTest(APITestCase): def test_simple(self): project = self.create_project(name='foo') release = Release.objects.create( project=project, version='1', ) url = reverse('sentry-api-0-release-files', kwargs={ 'organization_slug': project.organization.slug, 'project_slug': project.slug, 'version': release.version, }) self.login_as(user=self.user) response = self.client.post(url, { 'name': 'http://example.com/application.js', 'header': 'X-SourceMap: http://example.com', 'file': SimpleUploadedFile('application.js', 'function() { }', content_type='application/javascript'), }, format='multipart') assert response.status_code == 201, response.content releasefile = ReleaseFile.objects.get(release=release) assert releasefile.name == 'http://example.com/application.js' assert releasefile.file.headers == { 'Content-Type': 'application/javascript', 'X-SourceMap': 'http://example.com', }
bsd-3-clause
sasukeh/neutron
neutron/tests/unit/agent/metadata/test_driver.py
16
6206
# Copyright 2014 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_config import cfg from oslo_utils import uuidutils from neutron.agent.common import config as agent_config from neutron.agent.l3 import agent as l3_agent from neutron.agent.l3 import config as l3_config from neutron.agent.l3 import ha as l3_ha_agent from neutron.agent.metadata import config from neutron.agent.metadata import driver as metadata_driver from neutron.common import constants from neutron.tests import base _uuid = uuidutils.generate_uuid class TestMetadataDriverRules(base.BaseTestCase): def test_metadata_nat_rules(self): rules = ('PREROUTING', '-d 169.254.169.254/32 -i qr-+ ' '-p tcp -m tcp --dport 80 -j REDIRECT --to-port 8775') self.assertEqual( [rules], metadata_driver.MetadataDriver.metadata_nat_rules(8775)) def test_metadata_filter_rules(self): rules = [('INPUT', '-m mark --mark 0x1/%s -j ACCEPT' % constants.ROUTER_MARK_MASK), ('INPUT', '-p tcp -m tcp --dport 8775 -j DROP')] self.assertEqual( rules, metadata_driver.MetadataDriver.metadata_filter_rules(8775, '0x1')) def test_metadata_mangle_rules(self): rule = ('PREROUTING', '-d 169.254.169.254/32 -i qr-+ ' '-p tcp -m tcp --dport 80 ' '-j MARK --set-xmark 0x1/%s' % constants.ROUTER_MARK_MASK) self.assertEqual( [rule], metadata_driver.MetadataDriver.metadata_mangle_rules('0x1')) class TestMetadataDriverProcess(base.BaseTestCase): EUID = 123 EGID = 456 EUNAME = 'neutron' def setUp(self): super(TestMetadataDriverProcess, self).setUp() mock.patch('eventlet.spawn').start() agent_config.register_interface_driver_opts_helper(cfg.CONF) cfg.CONF.set_override('interface_driver', 'neutron.agent.linux.interface.NullDriver') agent_config.register_use_namespaces_opts_helper(cfg.CONF) mock.patch('neutron.agent.l3.agent.L3PluginApi').start() mock.patch('neutron.agent.l3.ha.AgentMixin' '._init_ha_conf_path').start() cfg.CONF.register_opts(l3_config.OPTS) cfg.CONF.register_opts(l3_ha_agent.OPTS) cfg.CONF.register_opts(config.SHARED_OPTS) cfg.CONF.register_opts(config.DRIVER_OPTS) def _test_spawn_metadata_proxy(self, expected_user, expected_group, user='', group='', watch_log=True): router_id = _uuid() router_ns = 'qrouter-%s' % router_id metadata_port = 8080 ip_class_path = 'neutron.agent.linux.ip_lib.IPWrapper' is_effective_user = 'neutron.agent.linux.utils.is_effective_user' fake_is_effective_user = lambda x: x in [self.EUNAME, str(self.EUID)] cfg.CONF.set_override('metadata_proxy_user', user) cfg.CONF.set_override('metadata_proxy_group', group) cfg.CONF.set_override('log_file', 'test.log') cfg.CONF.set_override('debug', True) agent = l3_agent.L3NATAgent('localhost') with mock.patch('os.geteuid', return_value=self.EUID),\ mock.patch('os.getegid', return_value=self.EGID),\ mock.patch(is_effective_user, side_effect=fake_is_effective_user),\ mock.patch(ip_class_path) as ip_mock: agent.metadata_driver.spawn_monitored_metadata_proxy( agent.process_monitor, router_ns, metadata_port, agent.conf, router_id=router_id) netns_execute_args = [ 'neutron-ns-metadata-proxy', mock.ANY, mock.ANY, '--router_id=%s' % router_id, mock.ANY, '--metadata_port=%s' % metadata_port, '--metadata_proxy_user=%s' % expected_user, '--metadata_proxy_group=%s' % expected_group, '--debug', '--verbose', '--log-file=neutron-ns-metadata-proxy-%s.log' % router_id] if not watch_log: netns_execute_args.append( '--nometadata_proxy_watch_log') ip_mock.assert_has_calls([ mock.call(namespace=router_ns), mock.call().netns.execute(netns_execute_args, addl_env=None, run_as_root=False) ]) def test_spawn_metadata_proxy_with_agent_user(self): self._test_spawn_metadata_proxy( self.EUNAME, str(self.EGID), user=self.EUNAME) def test_spawn_metadata_proxy_with_nonagent_user(self): self._test_spawn_metadata_proxy( 'notneutron', str(self.EGID), user='notneutron', watch_log=False) def test_spawn_metadata_proxy_with_agent_uid(self): self._test_spawn_metadata_proxy( str(self.EUID), str(self.EGID), user=str(self.EUID)) def test_spawn_metadata_proxy_with_nonagent_uid(self): self._test_spawn_metadata_proxy( '321', str(self.EGID), user='321', watch_log=False) def test_spawn_metadata_proxy_with_group(self): self._test_spawn_metadata_proxy(str(self.EUID), 'group', group='group') def test_spawn_metadata_proxy_with_gid(self): self._test_spawn_metadata_proxy(str(self.EUID), '654', group='654') def test_spawn_metadata_proxy(self): self._test_spawn_metadata_proxy(str(self.EUID), str(self.EGID))
apache-2.0
wathen/PhD
MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/ParamTests/MHDmatrixPrecondSetup.py
7
6639
from dolfin import * import petsc4py import sys petsc4py.init(sys.argv) from petsc4py import PETSc import HiptmairSetup import time import MatrixOperations as MO import NSprecondSetup def FluidLinearSetup(Pressure,mu): MO.PrintStr("Preconditioning Fluid linear setup",3,"=","\n\n") parameters['linear_algebra_backend'] = 'uBLAS' p = TrialFunction(Pressure) q = TestFunction(Pressure) N = FacetNormal(Pressure.mesh()) h = CellSize(Pressure.mesh()) h_avg =avg(h) alpha = 10.0 gamma =10.0 tic() L = assemble(mu*(inner(grad(q), grad(p))*dx(Pressure.mesh()) \ - inner(avg(grad(q)), outer(p('+'),N('+'))+outer(p('-'),N('-')))*dS(Pressure.mesh()) \ - inner(outer(q('+'),N('+'))+outer(q('-'),N('-')), avg(grad(p)))*dS(Pressure.mesh()) \ + alpha/h_avg*inner(outer(q('+'),N('+'))+outer(q('-'),N('-')),outer(p('+'),N('+'))+outer(p('-'),N('-')))*dS(Pressure.mesh()) \ - inner(outer(q,N), grad(p))*ds(Pressure.mesh()) \ - inner(grad(q), outer(p,N))*ds(Pressure.mesh()) \ + gamma/h*inner(q,p)*ds(Pressure.mesh()))) L = PETSc.Mat().createAIJ(size=L.sparray().shape,csr=(L.sparray().indptr, L.sparray().indices, L.sparray().data)) print ("{:40}").format("DG scalar Laplacian assemble, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() Q = assemble((1/mu)*inner(p,q)*dx) Q = PETSc.Mat().createAIJ(size=Q.sparray().shape,csr=(Q.sparray().indptr, Q.sparray().indices, Q.sparray().data)) print ("{:40}").format("DG scalar mass matrix assemble, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() kspA, kspQ = NSprecondSetup.PCDKSPlinear(Q, L) print ("{:40}").format("Linear fluid precond setup, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) return [kspA,kspQ], [L,Q] def FluidNonLinearSetup(Pressure,mu, u_k): MO.PrintStr("Preconditioning Fluid linear setup",3,"=") parameters['linear_algebra_backend'] = 'uBLAS' p = TrialFunction(Pressure) q = TestFunction(Pressure) mesh = Pressure.mesh() N = FacetNormal(Pressure.mesh()) h = CellSize(Pressure.mesh()) h_avg =avg(h) alpha = 10.0 gamma =10.0 tic() Fp = assemble(mu*(inner(grad(q), grad(p))*dx(mesh) \ - inner(avg(grad(q)), outer(p('+'),N('+'))+outer(p('-'),N('-')))*dS (Pressure.mesh())\ - inner(outer(q('+'),N('+'))+outer(q('-'),N('-')), avg(grad(p)))*dS (Pressure.mesh())\ + alpha/h_avg*inner(outer(q('+'),N('+'))+outer(q('-'),N('-')),outer(p('+'),N('+'))+outer(p('-'),N('-')))*dS(Pressure.mesh()) \ - inner(outer(q,N), grad(p))*ds(mesh) \ - inner(grad(q), outer(p,N))*ds(mesh) \ + gamma/h*inner(q,p)*ds(mesh)) \ + inner(inner(grad(p),u_k),q)*dx(mesh)- (1/2)*inner(u_k,N)*inner(q,p)*ds(mesh) \ -(1/2)*(inner(u_k('+'),N('+'))+inner(u_k('-'),N('-')))*avg(inner(q,p))*ds(mesh) \ -dot(avg(q),dot(outer(p('+'),N('+'))+outer(p('-'),N('-')),avg(u_k)))*dS(Pressure.mesh())) Fp = PETSc.Mat().createAIJ(size=Fp.sparray().shape,csr=(Fp.sparray().indptr, Fp.sparray().indices, Fp.sparray().data)) print ("{:40}").format("DG convection-diffusion assemble, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() kspFp= NSprecondSetup.PCDKSPnonlinear(Fp) print ("{:40}").format("Non-linear fluid precond, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) print "\n\n" return kspFp, Fp def MagneticSetup(Magnetic, Lagrange, u0, p0, CGtol,params): MO.PrintStr("Preconditioning Magnetic setup",3,"=") parameters['linear_algebra_backend'] = 'uBLAS' C, P = HiptmairSetup.HiptmairMatrixSetupBoundary(Magnetic.mesh(), Magnetic.dim(), Lagrange.dim(),Magnetic.mesh().geometry().dim()) G, P = HiptmairSetup.HiptmairBCsetupBoundary(C,P,Magnetic.mesh()) u = TrialFunction(Magnetic) v = TestFunction(Magnetic) p = TrialFunction(Lagrange) q = TestFunction(Lagrange) def boundary(x, on_boundary): return on_boundary bcp = DirichletBC(Lagrange, p0, boundary) bcu = DirichletBC(Magnetic, u0, boundary) tic() ScalarLaplacian, b1 = assemble_system(inner(grad(p),grad(q))*dx,inner(p0,q)*dx,bcp) VectorLaplacian, b2 = assemble_system(inner(grad(p),grad(q))*dx+inner(p,q)*dx,inner(p0,q)*dx,bcp) del b1, b2 print ("{:40}").format("Hiptmair Laplacians BC assembled, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() VectorLaplacian = PETSc.Mat().createAIJ(size=VectorLaplacian.sparray().shape,csr=(VectorLaplacian.sparray().indptr, VectorLaplacian.sparray().indices, VectorLaplacian.sparray().data)) ScalarLaplacian = PETSc.Mat().createAIJ(size=ScalarLaplacian.sparray().shape,csr=(ScalarLaplacian.sparray().indptr, ScalarLaplacian.sparray().indices, ScalarLaplacian.sparray().data)) print ("{:40}").format("PETSc Laplacians assembled, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() CurlCurlShift, b2 = assemble_system(params[0]*params[1]*inner(curl(u),curl(v))*dx+inner(u,v)*dx,inner(u0,v)*dx,bcu) CurlCurlShift = PETSc.Mat().createAIJ(size=CurlCurlShift.sparray().shape,csr=(CurlCurlShift.sparray().indptr, CurlCurlShift.sparray().indices, CurlCurlShift.sparray().data)) print ("{:40}").format("Shifted Curl-Curl assembled, time: "), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) tic() kspVector, kspScalar, kspCGScalar, diag = HiptmairSetup.HiptmairKSPsetup(VectorLaplacian, ScalarLaplacian, CurlCurlShift, CGtol) del VectorLaplacian, ScalarLaplacian print ("{:40}").format("Hiptmair Setup time:"), " ==> ",("{:4f}").format(toc()), ("{:9}").format(" time: "), ("{:4}").format(time.strftime('%X %x %Z')[0:5]) return [G, P, kspVector, kspScalar, kspCGScalar, diag, CurlCurlShift]
mit
ghmajx/asuswrt-merlin
release/src/router/samba3/source/python/examples/spoolss/psec.py
55
2139
#!/usr/bin/env python # # Get or set the security descriptor on a printer # import sys, re, string from samba import spoolss if len(sys.argv) != 3: print "Usage: psec.py getsec|setsec printername" sys.exit(1) op = sys.argv[1] printername = sys.argv[2] # Display security descriptor if op == "getsec": try: hnd = spoolss.openprinter(printername) except: print "error opening printer %s" % printername sys.exit(1) secdesc = hnd.getprinter(level = 3)["security_descriptor"] print secdesc["owner_sid"] print secdesc["group_sid"] for acl in secdesc["dacl"]["ace_list"]: print "%d %d 0x%08x %s" % (acl["type"], acl["flags"], acl["mask"], acl["trustee"]) spoolss.closeprinter(hnd) sys.exit(0) # Set security descriptor if op == "setsec": # Open printer try: hnd = spoolss.openprinter(printername, creds = {"domain": "NPSD-TEST2", "username": "Administrator", "password": "penguin"}) except: print "error opening printer %s" % printername sys.exit(1) # Read lines from standard input and build security descriptor lines = sys.stdin.readlines() secdesc = {} secdesc["owner_sid"] = lines[0] secdesc["group_sid"] = lines[1] secdesc["revision"] = 1 secdesc["dacl"] = {} secdesc["dacl"]["revision"] = 2 secdesc["dacl"]["ace_list"] = [] for acl in lines[2:]: match = re.match("(\d+) (\d+) (0[xX][\dA-Fa-f]+) (\S+)", acl) secdesc["dacl"]["ace_list"].append( {"type": int(match.group(1)), "flags": int(match.group(2)), "mask": string.atoi(match.group(3), 0), "trustee": match.group(4)}) # Build info3 structure info3 = {} info3["flags"] = 0x8004 # self-relative, dacl present info3["level"] = 3 info3["security_descriptor"] = secdesc hnd.setprinter(info3) spoolss.closeprinter(hnd) sys.exit(0) print "invalid operation %s" % op sys.exit(1)
gpl-2.0
xiaom/zenodo
zenodo/modules/theme/views.py
7
1315
# -*- coding: utf-8 -*- # # This file is part of Zenodo. # Copyright (C) 2015 CERN. # # Zenodo is free software; you can redistribute it # and/or modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Zenodo is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Zenodo; if not, write to the # Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307, USA. # # In applying this license, CERN does not # waive the privileges and immunities granted to it by virtue of its status # as an Intergovernmental Organization or submit itself to any jurisdiction. """Theme blueprint in order for template and static files to be loaded.""" from __future__ import absolute_import, print_function from flask import Blueprint blueprint = Blueprint( 'zenodo_theme', __name__, template_folder='templates', static_folder='static', ) """Theme blueprint used to define template and static folders."""
gpl-2.0
zepto/musio
musio/mp4v2/_mp4v2_wrapper.py
1
10422
#!/usr/bin/env python # vim: sw=4:ts=4:sts=4:fdm=indent:fdl=0: # -*- coding: UTF8 -*- # # mp4v2 object oriented wrapper module. # Copyright (C) 2010 Josiah Gordon <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Wrap various mp4v2 functions into python classes. """ from functools import partial from . import _mp4v2 class Mp4Handle(object): """ Wraps the mp4v2 mp4 file handle object. """ def __init__(self, filename): """ Initializes and creates partial functions. """ self._mp4_handle = _mp4v2.MP4FileHandle(_mp4v2.MP4Read(filename, 0)) self.track_read_sample = partial(_mp4v2.MP4ReadSample, self._mp4_handle) self.track_es_configuration = partial(_mp4v2.MP4GetTrackESConfiguration, self._mp4_handle) self.track_type = partial(_mp4v2.MP4GetTrackType, self._mp4_handle) self.track_esds_object_type = partial(_mp4v2.MP4GetTrackEsdsObjectTypeId, self._mp4_handle) self.track_audio_mpeg4_type = partial(_mp4v2.MP4GetTrackAudioMpeg4Type, self._mp4_handle) self.track_sample_count = partial(_mp4v2.MP4GetTrackNumberOfSamples, self._mp4_handle) self.track_rate = partial(_mp4v2.MP4GetTrackBitRate, self._mp4_handle) self.track_name = partial(_mp4v2.MP4GetTrackName, self._mp4_handle) self._closed = False self._number_of_tracks = _mp4v2.MP4GetNumberOfTracks(self._mp4_handle, None, 0) self._tags = _mp4v2.MP4TagsAlloc() _mp4v2.MP4TagsFetch(self._tags, self._mp4_handle) def close(self): """ Close the mp4 handle. """ if not self._closed: _mp4v2.MP4Close(self._mp4_handle, _mp4v2.MP4_CLOSE_DO_NOT_COMPUTE_BITRATE) _mp4v2.MP4TagsFree(self._tags) self._closed = True @property def tags(self): """ The mp4 tags. """ return self._tags @property def track_count(self): """ The number of tracks in this mp4. """ return self._number_of_tracks def tracks(self): """ Yield a track object for each track. """ for track in range(1, self.track_count + 1): yield Mp4Track(self, track) def get_aac_track(self): """ Returns the AAC track in the mp4 if there is any otherwise it returns 0. """ # Tracks start at 1. for track in self.tracks(): track_type = track.type.decode() # Only use audio tracks. if not track_type or _mp4v2.MP4_IS_AUDIO_TRACK_TYPE(track_type): continue object_type = track.object_type # Only return audio if it is AAC encoded. if object_type == _mp4v2.MP4_MPEG4_AUDIO_TYPE: object_type = track.audio_mpeg4_type # Check for AAC encoding. if _mp4v2.MP4_IS_MPEG4_AAC_AUDIO_TYPE(object_type): return track elif _mp4v2.MP4_IS_AAC_AUDIO_TYPE(object_type): return track # An invalid track. return None class Mp4Track(object): """ Wraps the mp4v2 track object. """ def __init__(self, mp4_handle, track=1): """ Initializes the track and creates partial functions. """ self._mp4_handle = mp4_handle self._mp4_track = track self._mp4_track_id = _mp4v2.MP4TrackId(track) self._read_sample = partial(mp4_handle.track_read_sample, self._mp4_track_id) self._es_configuration = partial(mp4_handle.track_es_configuration, self._mp4_track_id) self._type = mp4_handle.track_type(self._mp4_track_id) self._esds_object_type = mp4_handle.track_esds_object_type(self._mp4_track_id) self._audio_mpeg4_type = mp4_handle.track_audio_mpeg4_type(self._mp4_track_id) self._sample_count = mp4_handle.track_sample_count(self._mp4_track_id) self._rate = mp4_handle.track_rate(self._mp4_track_id) name = _mp4v2.c_char_p() ret = mp4_handle.track_name(self._mp4_track_id, name) self._name = name @property def track_id(self): """ The mp4 track id. """ return self._mp4_track_id def read_sample(self, sample_id): """ Return the sample and its size. """ # Is this the last sample. last = (sample_id == self._sample_count) data_buffer = _mp4v2.POINTER(_mp4v2.c_uint8)() buffer_size = _mp4v2.c_uint32() # Don't read past the end of the file. if sample_id <= self._sample_count: self._read_sample(sample_id, _mp4v2.byref(data_buffer), _mp4v2.byref(buffer_size), None, None, None, None) # Return a sample object. return Mp4Sample(sample_id, data_buffer, buffer_size, last) def get_configuration(self): """ Return a buffer and size to use with faad init functions to find the sample rate and channels. """ data_buffer = _mp4v2.POINTER(_mp4v2.c_ubyte)() buffer_size = _mp4v2.c_uint32() ret = self._es_configuration(_mp4v2.byref(data_buffer), buffer_size) # Reset the buffer and size if there was no configuration data. if not ret: data_buffer = _mp4v2.POINTER(_mp4v2.c_ubyte)() buffer_size = _mp4v2.c_uint32() return (data_buffer, _mp4v2.c_ulong(buffer_size.value)) @property def sample_count(self): """ The number of samples in the track. """ return self._sample_count @property def type(self): """ The type of the current track. """ return self._type @property def object_type(self): """ The track object type. """ return self._esds_object_type @property def audio_mpeg4_type(self): """ The type of mpeg4 audio for the track. """ return self._audio_mpeg4_type class Mp4Sample(object): """ An mp4 sample contains the data and size. """ def __init__(self, sample_id, data, size, last=False): """ Initialize the sample. """ self._mp4_sample_id = _mp4v2.MP4SampleId(sample_id) self._data = data self._size = size self._id = sample_id self._last = last @property def sample_id(self): """ The mp4 sample id. """ return self._mp4_sample_id def islast(self): """ True if this is a the last sample. """ return self._last @property def id(self): """ The current sample id. """ return self._id @property def data(self): """ The sample data. """ return self._data @property def size(self): """ The size of the sample. """ return self._size class Mp4(object): """ Provides easy access to the AAC audio in mp4s. """ def __init__(self, filename): """ Initialize class variables. """ self._mp4_handle = Mp4Handle(filename) self._aac_track = self._mp4_handle.get_aac_track() if not self._aac_track: raise Exception("No AAC track in %s" % filename) self._sample_count = self._aac_track.sample_count self._current_sample = 1 def close(self): """ Close the mp4. """ self._mp4_handle.close() self._mp4_handle = None def get_tag_dict(self): """ Returns a dictionary of tags from the mp4 or an empty dict. """ tag_dict = {} tags = self._mp4_handle.tags for i in dir(tags.contents): item = getattr(tags.contents, i) if item: if hasattr(item, 'contents'): tag_dict[i] = item.contents elif hasattr(item, 'value'): tag_dict[i] = item.value else: tag_dict[i] = item # try: # if item.contents: # tag_dict[i] = item.contents # except Exception as err: # try: # if item.value: # tag_dict[i] = item.value # except Exception as err: # if item: # tag_dict[i] = item return tag_dict def get_configuration(self): """ Return a buffer and size to use with faad init functions to find the sample rate and channels. """ return self._aac_track.get_configuration() def read(self): """ Read the next sample from the aac audio in the open mp4. """ if not self._mp4_handle: return (b'', 0) sample = self._aac_track.read_sample(self._current_sample) self._current_sample += 1 return sample @property def current_sample(self): """ The next sample to read. """ return self._current_sample @current_sample.setter def current_sample(self, value): """ The next sample to read. """ if value in range(1, self._sample_count): self._current_sample = value else: self._current_sample = 1 @property def sample_count(self): """ Number of samples in the aac track. """ return self._sample_count
gpl-3.0
Medigate/cutiuta-server
cutiuta-server/env/lib/python3.4/site-packages/django/contrib/gis/geos/prototypes/misc.py
483
1300
""" This module is for the miscellaneous GEOS routines, particularly the ones that return the area, distance, and length. """ from ctypes import POINTER, c_double, c_int from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOSFuncFactory from django.contrib.gis.geos.prototypes.errcheck import check_dbl, check_string from django.contrib.gis.geos.prototypes.geom import geos_char_p from django.utils.six.moves import range __all__ = ['geos_area', 'geos_distance', 'geos_length', 'geos_isvalidreason'] class DblFromGeom(GEOSFuncFactory): """ Argument is a Geometry, return type is double that is passed in by reference as the last argument. """ restype = c_int # Status code returned errcheck = staticmethod(check_dbl) def get_func(self, num_geom=1): argtypes = [GEOM_PTR for i in range(num_geom)] argtypes += [POINTER(c_double)] self.argtypes = argtypes return super(DblFromGeom, self).get_func() # ### ctypes prototypes ### # Area, distance, and length prototypes. geos_area = DblFromGeom('GEOSArea') geos_distance = DblFromGeom('GEOSDistance', num_geom=2) geos_length = DblFromGeom('GEOSLength') geos_isvalidreason = GEOSFuncFactory( 'GEOSisValidReason', restype=geos_char_p, errcheck=check_string, argtypes=[GEOM_PTR] )
gpl-3.0
d3trax/kubernetes
cluster/juju/charms/trusty/kubernetes/unit_tests/lib/test_registrator.py
232
2215
#!/usr/bin/env python # Copyright 2015 The Kubernetes Authors All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from mock import MagicMock, patch, call from path import Path import pytest import sys d = Path('__file__').parent.abspath() / 'hooks' sys.path.insert(0, d.abspath()) from lib.registrator import Registrator class TestRegistrator(): def setup_method(self, method): self.r = Registrator() def test_data_type(self): if type(self.r.data) is not dict: pytest.fail("Invalid type") @patch('json.loads') @patch('httplib.HTTPConnection') def test_register(self, httplibmock, jsonmock): result = self.r.register('foo', 80, '/v1/test') httplibmock.assert_called_with('foo', 80) requestmock = httplibmock().request requestmock.assert_called_with( "POST", "/v1/test", json.dumps(self.r.data), {"Content-type": "application/json", "Accept": "application/json"}) def test_command_succeeded(self): response = MagicMock() result = json.loads('{"status": "Failure", "kind": "Status", "code": 409, "apiVersion": "v1", "reason": "AlreadyExists", "details": {"kind": "node", "name": "10.200.147.200"}, "message": "node \\"10.200.147.200\\" already exists", "creationTimestamp": null}') response.status = 200 self.r.command_succeeded(response, result) response.status = 500 with pytest.raises(RuntimeError): self.r.command_succeeded(response, result) response.status = 409 with pytest.raises(ValueError): self.r.command_succeeded(response, result)
apache-2.0
ClaudeZoo/volatility
volatility/plugins/gui/gditimers.py
44
2791
# Volatility # Copyright (C) 2007-2013 Volatility Foundation # Copyright (C) 2010,2011,2012 Michael Hale Ligh <[email protected]> # # This file is part of Volatility. # # Volatility is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Volatility is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Volatility. If not, see <http://www.gnu.org/licenses/>. # import volatility.plugins.common as common import volatility.utils as utils import volatility.plugins.gui.sessions as sessions class GDITimers(common.AbstractWindowsCommand, sessions.SessionsMixin): """Print installed GDI timers and callbacks""" def calculate(self): kernel_as = utils.load_as(self._config) for session in self.session_spaces(kernel_as): shared_info = session.find_shared_info() if not shared_info: continue filters = [lambda x : str(x.bType) == "TYPE_TIMER"] for handle in shared_info.handles(filters): timer = handle.reference_object() yield session, handle, timer def render_text(self, outfd, data): self.table_header(outfd, [("Sess", "^6"), ("Handle", "[addr]"), ("Object", "[addrpad]"), ("Thread", "8"), ("Process", "20"), ("nID", "[addr]"), ("Rate(ms)", "10"), ("Countdown(ms)", "10"), ("Func", "[addrpad]"), ]) for session, handle, timer in data: # Get the process info from the object handle header if # available, otherwise from the timer object itself. p = handle.Process or timer.pti.ppi.Process process = "{0}:{1}".format(p.ImageFileName, p.UniqueProcessId) self.table_row(outfd, session.SessionId, handle.phead.h, timer.obj_offset, timer.pti.pEThread.Cid.UniqueThread, process, timer.nID, timer.cmsRate, timer.cmsCountdown, timer.pfn)
gpl-2.0
danlrobertson/servo
components/script/dom/bindings/codegen/CodegenRust.py
1
301911
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at https://mozilla.org/MPL/2.0/. # Common codegen classes. from collections import defaultdict from itertools import groupby import operator import os import re import string import textwrap import functools from WebIDL import ( BuiltinTypes, IDLBuiltinType, IDLEmptySequenceValue, IDLInterfaceMember, IDLNullableType, IDLNullValue, IDLObject, IDLPromiseType, IDLType, IDLUndefinedValue, IDLWrapperType, ) from Configuration import ( MakeNativeName, MemberIsUnforgeable, getModuleFromObject, getTypesFromCallback, getTypesFromDescriptor, getTypesFromDictionary, iteratorNativeType ) AUTOGENERATED_WARNING_COMMENT = \ "/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n" FINALIZE_HOOK_NAME = '_finalize' TRACE_HOOK_NAME = '_trace' CONSTRUCT_HOOK_NAME = '_constructor' HASINSTANCE_HOOK_NAME = '_hasInstance' RUST_KEYWORDS = {"abstract", "alignof", "as", "become", "box", "break", "const", "continue", "else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in", "let", "loop", "macro", "match", "mod", "move", "mut", "offsetof", "override", "priv", "proc", "pub", "pure", "ref", "return", "static", "self", "sizeof", "struct", "super", "true", "trait", "type", "typeof", "unsafe", "unsized", "use", "virtual", "where", "while", "yield"} def replaceFileIfChanged(filename, newContents): """ Read a copy of the old file, so that we don't touch it if it hasn't changed. Returns True if the file was updated, false otherwise. """ # XXXjdm This doesn't play well with make right now. # Force the file to always be updated, or else changing CodegenRust.py # will cause many autogenerated bindings to be regenerated perpetually # until the result is actually different. # oldFileContents = "" # try: # with open(filename, 'rb') as oldFile: # oldFileContents = ''.join(oldFile.readlines()) # except: # pass # if newContents == oldFileContents: # return False with open(filename, 'wb') as f: f.write(newContents) return True def toStringBool(arg): return str(not not arg).lower() def toBindingNamespace(arg): return re.sub("((_workers)?$)", "Binding\\1", MakeNativeName(arg)) def stripTrailingWhitespace(text): tail = '\n' if text.endswith('\n') else '' lines = text.splitlines() for i in range(len(lines)): lines[i] = lines[i].rstrip() return '\n'.join(lines) + tail def innerContainerType(type): assert type.isSequence() or type.isRecord() return type.inner.inner if type.nullable() else type.inner def wrapInNativeContainerType(type, inner): if type.isSequence(): containerType = "Vec" elif type.isRecord(): containerType = "MozMap" else: raise TypeError("Unexpected container type %s", type) return CGWrapper(inner, pre=containerType + "<", post=">") builtinNames = { IDLType.Tags.bool: 'bool', IDLType.Tags.int8: 'i8', IDLType.Tags.int16: 'i16', IDLType.Tags.int32: 'i32', IDLType.Tags.int64: 'i64', IDLType.Tags.uint8: 'u8', IDLType.Tags.uint16: 'u16', IDLType.Tags.uint32: 'u32', IDLType.Tags.uint64: 'u64', IDLType.Tags.unrestricted_float: 'f32', IDLType.Tags.float: 'Finite<f32>', IDLType.Tags.unrestricted_double: 'f64', IDLType.Tags.double: 'Finite<f64>' } numericTags = [ IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, IDLType.Tags.uint16, IDLType.Tags.int32, IDLType.Tags.uint32, IDLType.Tags.int64, IDLType.Tags.uint64, IDLType.Tags.unrestricted_float, IDLType.Tags.unrestricted_double ] # We'll want to insert the indent at the beginnings of lines, but we # don't want to indent empty lines. So only indent lines that have a # non-newline character on them. lineStartDetector = re.compile("^(?=[^\n#])", re.MULTILINE) def indent(s, indentLevel=2): """ Indent C++ code. Weird secret feature: this doesn't indent lines that start with # (such as #include lines or #ifdef/#endif). """ if s == "": return s return re.sub(lineStartDetector, indentLevel * " ", s) # dedent() and fill() are often called on the same string multiple # times. We want to memoize their return values so we don't keep # recomputing them all the time. def memoize(fn): """ Decorator to memoize a function of one argument. The cache just grows without bound. """ cache = {} @functools.wraps(fn) def wrapper(arg): retval = cache.get(arg) if retval is None: retval = cache[arg] = fn(arg) return retval return wrapper @memoize def dedent(s): """ Remove all leading whitespace from s, and remove a blank line at the beginning. """ if s.startswith('\n'): s = s[1:] return textwrap.dedent(s) # This works by transforming the fill()-template to an equivalent # string.Template. fill_multiline_substitution_re = re.compile(r"( *)\$\*{(\w+)}(\n)?") @memoize def compile_fill_template(template): """ Helper function for fill(). Given the template string passed to fill(), do the reusable part of template processing and return a pair (t, argModList) that can be used every time fill() is called with that template argument. argsModList is list of tuples that represent modifications to be made to args. Each modification has, in order: i) the arg name, ii) the modified name, iii) the indent depth. """ t = dedent(template) assert t.endswith("\n") or "\n" not in t argModList = [] def replace(match): """ Replaces a line like ' $*{xyz}\n' with '${xyz_n}', where n is the indent depth, and add a corresponding entry to argModList. Note that this needs to close over argModList, so it has to be defined inside compile_fill_template(). """ indentation, name, nl = match.groups() depth = len(indentation) # Check that $*{xyz} appears by itself on a line. prev = match.string[:match.start()] if (prev and not prev.endswith("\n")) or nl is None: raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name) # Now replace this whole line of template with the indented equivalent. modified_name = name + "_" + str(depth) argModList.append((name, modified_name, depth)) return "${" + modified_name + "}" t = re.sub(fill_multiline_substitution_re, replace, t) return (string.Template(t), argModList) def fill(template, **args): """ Convenience function for filling in a multiline template. `fill(template, name1=v1, name2=v2)` is a lot like `string.Template(template).substitute({"name1": v1, "name2": v2})`. However, it's shorter, and has a few nice features: * If `template` is indented, fill() automatically dedents it! This makes code using fill() with Python's multiline strings much nicer to look at. * If `template` starts with a blank line, fill() strips it off. (Again, convenient with multiline strings.) * fill() recognizes a special kind of substitution of the form `$*{name}`. Use this to paste in, and automatically indent, multiple lines. (Mnemonic: The `*` is for "multiple lines"). A `$*` substitution must appear by itself on a line, with optional preceding indentation (spaces only). The whole line is replaced by the corresponding keyword argument, indented appropriately. If the argument is an empty string, no output is generated, not even a blank line. """ t, argModList = compile_fill_template(template) # Now apply argModList to args for (name, modified_name, depth) in argModList: if not (args[name] == "" or args[name].endswith("\n")): raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name])) args[modified_name] = indent(args[name], depth) return t.substitute(args) class CGThing(): """ Abstract base class for things that spit out code. """ def __init__(self): pass # Nothing for now def define(self): """Produce code for a Rust file.""" raise NotImplementedError # Override me! class CGMethodCall(CGThing): """ A class to generate selection of a method signature from a set of signatures and generation of a call to that signature. """ def __init__(self, argsPre, nativeMethodName, static, descriptor, method): CGThing.__init__(self) methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name) def requiredArgCount(signature): arguments = signature[1] if len(arguments) == 0: return 0 requiredArgs = len(arguments) while requiredArgs and arguments[requiredArgs - 1].optional: requiredArgs -= 1 return requiredArgs signatures = method.signatures() def getPerSignatureCall(signature, argConversionStartsAt=0): signatureIndex = signatures.index(signature) return CGPerSignatureCall(signature[0], argsPre, signature[1], nativeMethodName + '_' * signatureIndex, static, descriptor, method, argConversionStartsAt) if len(signatures) == 1: # Special case: we can just do a per-signature method call # here for our one signature and not worry about switching # on anything. signature = signatures[0] self.cgRoot = CGList([getPerSignatureCall(signature)]) requiredArgs = requiredArgCount(signature) if requiredArgs > 0: code = ( "if argc < %d {\n" " throw_type_error(cx, \"Not enough arguments to %s.\");\n" " return false;\n" "}" % (requiredArgs, methodName)) self.cgRoot.prepend( CGWrapper(CGGeneric(code), pre="\n", post="\n")) return # Need to find the right overload maxArgCount = method.maxArgCount allowedArgCounts = method.allowedArgCounts argCountCases = [] for argCount in allowedArgCounts: possibleSignatures = method.signaturesForArgCount(argCount) if len(possibleSignatures) == 1: # easy case! signature = possibleSignatures[0] argCountCases.append(CGCase(str(argCount), getPerSignatureCall(signature))) continue distinguishingIndex = method.distinguishingIndexForArgCount(argCount) # We can't handle unions of non-object values at the distinguishing index. for (returnType, args) in possibleSignatures: type = args[distinguishingIndex].type if type.isUnion(): if type.nullable(): type = type.inner for type in type.flatMemberTypes: if not (type.isObject() or type.isNonCallbackInterface()): raise TypeError("No support for unions with non-object variants " "as distinguishing arguments yet: %s", args[distinguishingIndex].location) # Convert all our arguments up to the distinguishing index. # Doesn't matter which of the possible signatures we use, since # they all have the same types up to that point; just use # possibleSignatures[0] caseBody = [ CGArgumentConverter(possibleSignatures[0][1][i], i, "args", "argc", descriptor) for i in range(0, distinguishingIndex)] # Select the right overload from our set. distinguishingArg = "HandleValue::from_raw(args.get(%d))" % distinguishingIndex def pickFirstSignature(condition, filterLambda): sigs = filter(filterLambda, possibleSignatures) assert len(sigs) < 2 if len(sigs) > 0: call = getPerSignatureCall(sigs[0], distinguishingIndex) if condition is None: caseBody.append(call) else: caseBody.append(CGGeneric("if " + condition + " {")) caseBody.append(CGIndenter(call)) caseBody.append(CGGeneric("}")) return True return False # First check for null or undefined pickFirstSignature("%s.get().is_null_or_undefined()" % distinguishingArg, lambda s: (s[1][distinguishingIndex].type.nullable() or s[1][distinguishingIndex].type.isDictionary())) # Now check for distinguishingArg being an object that implements a # non-callback interface. That includes typed arrays and # arraybuffers. interfacesSigs = [ s for s in possibleSignatures if (s[1][distinguishingIndex].type.isObject() or s[1][distinguishingIndex].type.isUnion() or s[1][distinguishingIndex].type.isNonCallbackInterface())] # There might be more than one of these; we need to check # which ones we unwrap to. if len(interfacesSigs) > 0: # The spec says that we should check for "platform objects # implementing an interface", but it's enough to guard on these # being an object. The code for unwrapping non-callback # interfaces and typed arrays will just bail out and move on to # the next overload if the object fails to unwrap correctly. We # could even not do the isObject() check up front here, but in # cases where we have multiple object overloads it makes sense # to do it only once instead of for each overload. That will # also allow the unwrapping test to skip having to do codegen # for the null-or-undefined case, which we already handled # above. caseBody.append(CGGeneric("if %s.get().is_object() {" % (distinguishingArg))) for idx, sig in enumerate(interfacesSigs): caseBody.append(CGIndenter(CGGeneric("loop {"))) type = sig[1][distinguishingIndex].type # The argument at index distinguishingIndex can't possibly # be unset here, because we've already checked that argc is # large enough that we can examine this argument. info = getJSToNativeConversionInfo( type, descriptor, failureCode="break;", isDefinitelyObject=True) template = info.template declType = info.declType testCode = instantiateJSToNativeConversionTemplate( template, {"val": distinguishingArg}, declType, "arg%d" % distinguishingIndex, needsAutoRoot=type_needs_auto_root(type)) # Indent by 4, since we need to indent further than our "do" statement caseBody.append(CGIndenter(testCode, 4)) # If we got this far, we know we unwrapped to the right # interface, so just do the call. Start conversion with # distinguishingIndex + 1, since we already converted # distinguishingIndex. caseBody.append(CGIndenter( getPerSignatureCall(sig, distinguishingIndex + 1), 4)) caseBody.append(CGIndenter(CGGeneric("}"))) caseBody.append(CGGeneric("}")) # XXXbz Now we're supposed to check for distinguishingArg being # an array or a platform object that supports indexed # properties... skip that last for now. It's a bit of a pain. pickFirstSignature("%s.get().is_object() && is_array_like(cx, %s)" % (distinguishingArg, distinguishingArg), lambda s: (s[1][distinguishingIndex].type.isSequence() or s[1][distinguishingIndex].type.isObject())) # Check for Date objects # XXXbz Do we need to worry about security wrappers around the Date? pickFirstSignature("%s.get().is_object() && " "{ rooted!(in(cx) let obj = %s.get().to_object()); " "let mut is_date = false; " "assert!(JS_ObjectIsDate(cx, obj.handle(), &mut is_date)); " "is_date }" % (distinguishingArg, distinguishingArg), lambda s: (s[1][distinguishingIndex].type.isDate() or s[1][distinguishingIndex].type.isObject())) # Check for vanilla JS objects # XXXbz Do we need to worry about security wrappers? pickFirstSignature("%s.get().is_object() && !is_platform_object(%s.get().to_object())" % (distinguishingArg, distinguishingArg), lambda s: (s[1][distinguishingIndex].type.isCallback() or s[1][distinguishingIndex].type.isCallbackInterface() or s[1][distinguishingIndex].type.isDictionary() or s[1][distinguishingIndex].type.isObject())) # The remaining cases are mutually exclusive. The # pickFirstSignature calls are what change caseBody # Check for strings or enums if pickFirstSignature(None, lambda s: (s[1][distinguishingIndex].type.isString() or s[1][distinguishingIndex].type.isEnum())): pass # Check for primitives elif pickFirstSignature(None, lambda s: s[1][distinguishingIndex].type.isPrimitive()): pass # Check for "any" elif pickFirstSignature(None, lambda s: s[1][distinguishingIndex].type.isAny()): pass else: # Just throw; we have no idea what we're supposed to # do with this. caseBody.append(CGGeneric("throw_internal_error(cx, \"Could not convert JavaScript argument\");\n" "return false;")) argCountCases.append(CGCase(str(argCount), CGList(caseBody, "\n"))) overloadCGThings = [] overloadCGThings.append( CGGeneric("let argcount = cmp::min(argc, %d);" % maxArgCount)) overloadCGThings.append( CGSwitch("argcount", argCountCases, CGGeneric("throw_type_error(cx, \"Not enough arguments to %s.\");\n" "return false;" % methodName))) # XXXjdm Avoid unreachable statement warnings # overloadCGThings.append( # CGGeneric('panic!("We have an always-returning default case");\n' # 'return false;')) self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"), pre="\n") def define(self): return self.cgRoot.define() def dictionaryHasSequenceMember(dictionary): return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in dictionary.members) or (dictionary.parent and dictionaryHasSequenceMember(dictionary.parent))) def typeIsSequenceOrHasSequenceMember(type): if type.nullable(): type = type.inner if type.isSequence(): return True if type.isDictionary(): return dictionaryHasSequenceMember(type.inner) if type.isUnion(): return any(typeIsSequenceOrHasSequenceMember(m.type) for m in type.flatMemberTypes) return False def union_native_type(t): name = t.unroll().name return 'UnionTypes::%s' % name class JSToNativeConversionInfo(): """ An object representing information about a JS-to-native conversion. """ def __init__(self, template, default=None, declType=None): """ template: A string representing the conversion code. This will have template substitution performed on it as follows: ${val} is a handle to the JS::Value in question default: A string or None representing rust code for default value(if any). declType: A CGThing representing the native C++ type we're converting to. This is allowed to be None if the conversion code is supposed to be used as-is. """ assert isinstance(template, str) assert declType is None or isinstance(declType, CGThing) self.template = template self.default = default self.declType = declType def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None, isDefinitelyObject=False, isMember=False, isArgument=False, isAutoRooted=False, invalidEnumValueFatal=True, defaultValue=None, treatNullAs="Default", isEnforceRange=False, isClamp=False, exceptionCode=None, allowTreatNonObjectAsNull=False, isCallbackReturnValue=False, sourceDescription="value"): """ Get a template for converting a JS value to a native object based on the given type and descriptor. If failureCode is given, then we're actually testing whether we can convert the argument to the desired type. That means that failures to convert due to the JS value being the wrong type of value need to use failureCode instead of throwing exceptions. Failures to convert that are due to JS exceptions (from toString or valueOf methods) or out of memory conditions need to throw exceptions no matter what failureCode is. If isDefinitelyObject is True, that means we know the value isObject() and we have no need to recheck that. isMember is `False`, "Dictionary", "Union" or "Variadic", and affects whether this function returns code suitable for an on-stack rooted binding or suitable for storing in an appropriate larger structure. invalidEnumValueFatal controls whether an invalid enum value conversion attempt will throw (if true) or simply return without doing anything (if false). If defaultValue is not None, it's the IDL default value for this conversion If isEnforceRange is true, we're converting an integer and throwing if the value is out of range. If isClamp is true, we're converting an integer and clamping if the value is out of range. If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull] extended attributes on nullable callback functions will be honored. The return value from this function is an object of JSToNativeConversionInfo consisting of four things: 1) A string representing the conversion code. This will have template substitution performed on it as follows: ${val} replaced by an expression for the JS::Value in question 2) A string or None representing Rust code for the default value (if any). 3) A CGThing representing the native C++ type we're converting to (declType). This is allowed to be None if the conversion code is supposed to be used as-is. 4) A boolean indicating whether the caller has to root the result. """ # We should not have a defaultValue if we know we're an object assert not isDefinitelyObject or defaultValue is None # If exceptionCode is not set, we'll just rethrow the exception we got. # Note that we can't just set failureCode to exceptionCode, because setting # failureCode will prevent pending exceptions from being set in cases when # they really should be! if exceptionCode is None: exceptionCode = "return false;\n" if failureCode is None: failOrPropagate = "throw_type_error(cx, &error);\n%s" % exceptionCode else: failOrPropagate = failureCode def handleOptional(template, declType, default): assert (defaultValue is None) == (default is None) return JSToNativeConversionInfo(template, default, declType) # Unfortunately, .capitalize() on a string will lowercase things inside the # string, which we do not want. def firstCap(string): return string[0].upper() + string[1:] # Helper functions for dealing with failures due to the JS value being the # wrong type of value. def onFailureNotAnObject(failureCode): return CGWrapper( CGGeneric( failureCode or ('throw_type_error(cx, "%s is not an object.");\n' '%s' % (firstCap(sourceDescription), exceptionCode))), post="\n") def onFailureInvalidEnumValue(failureCode, passedVarName): return CGGeneric( failureCode or ('throw_type_error(cx, &format!("\'{}\' is not a valid enum value for enumeration \'%s\'.", %s)); %s' % (type.name, passedVarName, exceptionCode))) def onFailureNotCallable(failureCode): return CGGeneric( failureCode or ('throw_type_error(cx, \"%s is not callable.\");\n' '%s' % (firstCap(sourceDescription), exceptionCode))) # A helper function for handling default values. def handleDefault(nullValue): if defaultValue is None: return None if isinstance(defaultValue, IDLNullValue): assert type.nullable() or type.isDictionary() return nullValue elif isinstance(defaultValue, IDLEmptySequenceValue): assert type.isSequence() return "Vec::new()" raise TypeError("Can't handle non-null or non-empty sequence default value here") # A helper function for wrapping up the template body for # possibly-nullable objecty stuff def wrapObjectTemplate(templateBody, nullValue, isDefinitelyObject, type, failureCode=None): if not isDefinitelyObject: # Handle the non-object cases by wrapping up the whole # thing in an if cascade. templateBody = ( "if ${val}.get().is_object() {\n" + CGIndenter(CGGeneric(templateBody)).define() + "\n") if type.nullable(): templateBody += ( "} else if ${val}.get().is_null_or_undefined() {\n" " %s\n") % nullValue templateBody += ( "} else {\n" + CGIndenter(onFailureNotAnObject(failureCode)).define() + "}") return templateBody assert not (isEnforceRange and isClamp) # These are mutually exclusive if type.isSequence() or type.isRecord(): innerInfo = getJSToNativeConversionInfo(innerContainerType(type), descriptorProvider, isMember=isMember, isAutoRooted=isAutoRooted) declType = wrapInNativeContainerType(type, innerInfo.declType) config = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs) if type.nullable(): declType = CGWrapper(declType, pre="Option<", post=" >") templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n" " Ok(ConversionResult::Success(value)) => value,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (config, indent(failOrPropagate, 8), exceptionCode)) return handleOptional(templateBody, declType, handleDefault("None")) if type.isUnion(): declType = CGGeneric(union_native_type(type)) if type.nullable(): declType = CGWrapper(declType, pre="Option<", post=" >") templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n" " Ok(ConversionResult::Success(value)) => value,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (indent(failOrPropagate, 8), exceptionCode)) dictionaries = [ memberType for memberType in type.unroll().flatMemberTypes if memberType.isDictionary() ] if dictionaries: if defaultValue: assert isinstance(defaultValue, IDLNullValue) dictionary, = dictionaries default = "%s::%s(%s::%s::empty())" % ( union_native_type(type), dictionary.name, CGDictionary.makeModuleName(dictionary.inner), CGDictionary.makeDictionaryName(dictionary.inner)) else: default = None else: default = handleDefault("None") return handleOptional(templateBody, declType, default) if type.isPromise(): assert not type.nullable() # Per spec, what we're supposed to do is take the original # Promise.resolve and call it with the original Promise as this # value to make a Promise out of whatever value we actually have # here. The question is which global we should use. There are # a couple cases to consider: # # 1) Normal call to API with a Promise argument. This is a case the # spec covers, and we should be using the current Realm's # Promise. That means the current compartment. # 2) Promise return value from a callback or callback interface. # This is in theory a case the spec covers but in practice it # really doesn't define behavior here because it doesn't define # what Realm we're in after the callback returns, which is when # the argument conversion happens. We will use the current # compartment, which is the compartment of the callable (which # may itself be a cross-compartment wrapper itself), which makes # as much sense as anything else. In practice, such an API would # once again be providing a Promise to signal completion of an # operation, which would then not be exposed to anyone other than # our own implementation code. templateBody = fill( """ { // Scope for our JSAutoCompartment. rooted!(in(cx) let globalObj = CurrentGlobalOrNull(cx)); let promiseGlobal = GlobalScope::from_object_maybe_wrapped(globalObj.handle().get()); rooted!(in(cx) let mut valueToResolve = $${val}.get()); if !JS_WrapValue(cx, valueToResolve.handle_mut()) { $*{exceptionCode} } match Promise::new_resolved(&promiseGlobal, cx, valueToResolve.handle()) { Ok(value) => value, Err(error) => { throw_dom_exception(cx, &promiseGlobal, error); $*{exceptionCode} } } } """, exceptionCode=exceptionCode) if isArgument: declType = CGGeneric("&Promise") else: declType = CGGeneric("Rc<Promise>") return handleOptional(templateBody, declType, handleDefault("None")) if type.isGeckoInterface(): assert not isEnforceRange and not isClamp descriptor = descriptorProvider.getDescriptor( type.unroll().inner.identifier.name) if descriptor.interface.isCallback(): name = descriptor.nativeType declType = CGWrapper(CGGeneric(name), pre="Rc<", post=">") template = "%s::new(cx, ${val}.get().to_object())" % name if type.nullable(): declType = CGWrapper(declType, pre="Option<", post=">") template = wrapObjectTemplate("Some(%s)" % template, "None", isDefinitelyObject, type, failureCode) return handleOptional(template, declType, handleDefault("None")) conversionFunction = "root_from_handlevalue" descriptorType = descriptor.returnType if isMember == "Variadic": conversionFunction = "native_from_handlevalue" descriptorType = descriptor.nativeType elif isArgument: descriptorType = descriptor.argumentType if descriptor.interface.isConsequential(): raise TypeError("Consequential interface %s being used as an " "argument" % descriptor.interface.identifier.name) if failureCode is None: substitutions = { "sourceDescription": sourceDescription, "interface": descriptor.interface.identifier.name, "exceptionCode": exceptionCode, } unwrapFailureCode = string.Template( 'throw_type_error(cx, "${sourceDescription} does not ' 'implement interface ${interface}.");\n' '${exceptionCode}').substitute(substitutions) else: unwrapFailureCode = failureCode templateBody = fill( """ match ${function}($${val}) { Ok(val) => val, Err(()) => { $*{failureCode} } } """, failureCode=unwrapFailureCode + "\n", function=conversionFunction) declType = CGGeneric(descriptorType) if type.nullable(): templateBody = "Some(%s)" % templateBody declType = CGWrapper(declType, pre="Option<", post=">") templateBody = wrapObjectTemplate(templateBody, "None", isDefinitelyObject, type, failureCode) return handleOptional(templateBody, declType, handleDefault("None")) if is_typed_array(type): if failureCode is None: substitutions = { "sourceDescription": sourceDescription, "exceptionCode": exceptionCode, } unwrapFailureCode = string.Template( 'throw_type_error(cx, "${sourceDescription} is not a typed array.");\n' '${exceptionCode}').substitute(substitutions) else: unwrapFailureCode = failureCode typeName = type.unroll().name # unroll because it may be nullable if isMember == "Union": typeName = "Heap" + typeName templateBody = fill( """ match typedarray::${ty}::from($${val}.get().to_object()) { Ok(val) => val, Err(()) => { $*{failureCode} } } """, ty=typeName, failureCode=unwrapFailureCode + "\n", ) if isMember == "Union": templateBody = "RootedTraceableBox::new(%s)" % templateBody declType = CGGeneric("typedarray::%s" % typeName) if type.nullable(): templateBody = "Some(%s)" % templateBody declType = CGWrapper(declType, pre="Option<", post=">") templateBody = wrapObjectTemplate(templateBody, "None", isDefinitelyObject, type, failureCode) return handleOptional(templateBody, declType, handleDefault("None")) elif type.isSpiderMonkeyInterface(): raise TypeError("Can't handle SpiderMonkey interface arguments other than typed arrays yet") if type.isDOMString(): nullBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs) conversionCode = ( "match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n" " Ok(ConversionResult::Success(strval)) => strval,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (nullBehavior, indent(failOrPropagate, 8), exceptionCode)) if defaultValue is None: default = None elif isinstance(defaultValue, IDLNullValue): assert type.nullable() default = "None" else: assert defaultValue.type.tag() == IDLType.Tags.domstring default = 'DOMString::from("%s")' % defaultValue.value if type.nullable(): default = "Some(%s)" % default declType = "DOMString" if type.nullable(): declType = "Option<%s>" % declType return handleOptional(conversionCode, CGGeneric(declType), default) if type.isUSVString(): assert not isEnforceRange and not isClamp conversionCode = ( "match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n" " Ok(ConversionResult::Success(strval)) => strval,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (indent(failOrPropagate, 8), exceptionCode)) if defaultValue is None: default = None elif isinstance(defaultValue, IDLNullValue): assert type.nullable() default = "None" else: assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.usvstring) default = 'USVString("%s".to_owned())' % defaultValue.value if type.nullable(): default = "Some(%s)" % default declType = "USVString" if type.nullable(): declType = "Option<%s>" % declType return handleOptional(conversionCode, CGGeneric(declType), default) if type.isByteString(): assert not isEnforceRange and not isClamp conversionCode = ( "match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n" " Ok(ConversionResult::Success(strval)) => strval,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (indent(failOrPropagate, 8), exceptionCode)) if defaultValue is None: default = None elif isinstance(defaultValue, IDLNullValue): assert type.nullable() default = "None" else: assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.bytestring) default = 'ByteString::new(b"%s".to_vec())' % defaultValue.value if type.nullable(): default = "Some(%s)" % default declType = "ByteString" if type.nullable(): declType = "Option<%s>" % declType return handleOptional(conversionCode, CGGeneric(declType), default) if type.isEnum(): assert not isEnforceRange and not isClamp if type.nullable(): raise TypeError("We don't support nullable enumerated arguments " "yet") enum = type.inner.identifier.name if invalidEnumValueFatal: handleInvalidEnumValueCode = onFailureInvalidEnumValue(failureCode, 'search').define() else: handleInvalidEnumValueCode = "return true;" template = ( "match find_enum_value(cx, ${val}, %(pairs)s) {\n" " Err(_) => { %(exceptionCode)s },\n" " Ok((None, search)) => { %(handleInvalidEnumValueCode)s },\n" " Ok((Some(&value), _)) => value,\n" "}" % {"pairs": enum + "Values::pairs", "exceptionCode": exceptionCode, "handleInvalidEnumValueCode": handleInvalidEnumValueCode}) if defaultValue is not None: assert defaultValue.type.tag() == IDLType.Tags.domstring default = "%s::%s" % (enum, getEnumValueName(defaultValue.value)) else: default = None return handleOptional(template, CGGeneric(enum), default) if type.isCallback(): assert not isEnforceRange and not isClamp assert not type.treatNonCallableAsNull() assert not type.treatNonObjectAsNull() or type.nullable() assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull() callback = type.unroll().callback declType = CGGeneric(callback.identifier.name) finalDeclType = CGTemplatedType("Rc", declType) conversion = CGCallbackTempRoot(declType.define()) if type.nullable(): declType = CGTemplatedType("Option", declType) finalDeclType = CGTemplatedType("Option", finalDeclType) conversion = CGWrapper(conversion, pre="Some(", post=")") if allowTreatNonObjectAsNull and type.treatNonObjectAsNull(): if not isDefinitelyObject: haveObject = "${val}.get().is_object()" template = CGIfElseWrapper(haveObject, conversion, CGGeneric("None")).define() else: template = conversion else: template = CGIfElseWrapper("IsCallable(${val}.get().to_object())", conversion, onFailureNotCallable(failureCode)).define() template = wrapObjectTemplate( template, "None", isDefinitelyObject, type, failureCode) if defaultValue is not None: assert allowTreatNonObjectAsNull assert type.treatNonObjectAsNull() assert type.nullable() assert isinstance(defaultValue, IDLNullValue) default = "None" else: default = None return JSToNativeConversionInfo(template, default, finalDeclType) if type.isAny(): assert not isEnforceRange and not isClamp assert isMember != "Union" if isMember == "Dictionary" or isAutoRooted: templateBody = "${val}.get()" if defaultValue is None: default = None elif isinstance(defaultValue, IDLNullValue): default = "NullValue()" elif isinstance(defaultValue, IDLUndefinedValue): default = "UndefinedValue()" else: raise TypeError("Can't handle non-null, non-undefined default value here") if isMember == "Dictionary": templateBody = "RootedTraceableBox::from_box(Heap::boxed(%s))" % templateBody if default is not None: default = "RootedTraceableBox::from_box(Heap::boxed(%s))" % default declType = CGGeneric("RootedTraceableBox<Heap<JSVal>>") # AutoRooter can trace properly inner raw GC thing pointers else: declType = CGGeneric("JSVal") return handleOptional(templateBody, declType, default) declType = CGGeneric("HandleValue") if defaultValue is None: default = None elif isinstance(defaultValue, IDLNullValue): default = "HandleValue::null()" elif isinstance(defaultValue, IDLUndefinedValue): default = "HandleValue::undefined()" else: raise TypeError("Can't handle non-null, non-undefined default value here") return handleOptional("${val}", declType, default) if type.isObject(): assert not isEnforceRange and not isClamp templateBody = "${val}.get().to_object()" default = "ptr::null_mut()" if isMember in ("Dictionary", "Union"): templateBody = "RootedTraceableBox::from_box(Heap::boxed(%s))" % templateBody default = "RootedTraceableBox::new(Heap::default())" declType = CGGeneric("RootedTraceableBox<Heap<*mut JSObject>>") else: # TODO: Need to root somehow # https://github.com/servo/servo/issues/6382 declType = CGGeneric("*mut JSObject") templateBody = wrapObjectTemplate(templateBody, default, isDefinitelyObject, type, failureCode) return handleOptional(templateBody, declType, handleDefault(default)) if type.isDictionary(): # There are no nullable dictionaries assert not type.nullable() typeName = "%s::%s" % (CGDictionary.makeModuleName(type.inner), CGDictionary.makeDictionaryName(type.inner)) declType = CGGeneric(typeName) empty = "%s::empty()" % typeName if type_needs_tracing(type): declType = CGTemplatedType("RootedTraceableBox", declType) template = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n" " Ok(ConversionResult::Success(dictionary)) => dictionary,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s },\n" "}" % (indent(failOrPropagate, 8), exceptionCode)) return handleOptional(template, declType, handleDefault(empty)) if type.isVoid(): # This one only happens for return values, and its easy: Just # ignore the jsval. return JSToNativeConversionInfo("", None, None) if not type.isPrimitive(): raise TypeError("Need conversion for argument type '%s'" % str(type)) conversionBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs) if failureCode is None: failureCode = 'return false' declType = CGGeneric(builtinNames[type.tag()]) if type.nullable(): declType = CGWrapper(declType, pre="Option<", post=">") template = ( "match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n" " Ok(ConversionResult::Success(v)) => v,\n" " Ok(ConversionResult::Failure(error)) => {\n" "%s\n" " }\n" " _ => { %s }\n" "}" % (conversionBehavior, indent(failOrPropagate, 8), exceptionCode)) if defaultValue is not None: if isinstance(defaultValue, IDLNullValue): assert type.nullable() defaultStr = "None" else: tag = defaultValue.type.tag() if tag in [IDLType.Tags.float, IDLType.Tags.double]: defaultStr = "Finite::wrap(%s)" % defaultValue.value elif tag in numericTags: defaultStr = str(defaultValue.value) else: assert tag == IDLType.Tags.bool defaultStr = toStringBool(defaultValue.value) if type.nullable(): defaultStr = "Some(%s)" % defaultStr else: defaultStr = None return handleOptional(template, declType, defaultStr) def instantiateJSToNativeConversionTemplate(templateBody, replacements, declType, declName, needsAutoRoot=False): """ Take the templateBody and declType as returned by getJSToNativeConversionInfo, a set of replacements as required by the strings in such a templateBody, and a declName, and generate code to convert into a stack Rust binding with that name. """ result = CGList([], "\n") conversion = CGGeneric(string.Template(templateBody).substitute(replacements)) if declType is not None: newDecl = [ CGGeneric("let "), CGGeneric(declName), CGGeneric(": "), declType, CGGeneric(" = "), conversion, CGGeneric(";"), ] result.append(CGList(newDecl)) else: result.append(conversion) if needsAutoRoot: result.append(CGGeneric("auto_root!(in(cx) let %s = %s);" % (declName, declName))) # Add an empty CGGeneric to get an extra newline after the argument # conversion. result.append(CGGeneric("")) return result def convertConstIDLValueToJSVal(value): if isinstance(value, IDLNullValue): return "ConstantVal::NullVal" tag = value.type.tag() if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, IDLType.Tags.uint16, IDLType.Tags.int32]: return "ConstantVal::IntVal(%s)" % (value.value) if tag == IDLType.Tags.uint32: return "ConstantVal::UintVal(%s)" % (value.value) if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]: return "ConstantVal::DoubleVal(%s as f64)" % (value.value) if tag == IDLType.Tags.bool: return "ConstantVal::BoolVal(true)" if value.value else "ConstantVal::BoolVal(false)" if tag in [IDLType.Tags.unrestricted_float, IDLType.Tags.float, IDLType.Tags.unrestricted_double, IDLType.Tags.double]: return "ConstantVal::DoubleVal(%s as f64)" % (value.value) raise TypeError("Const value of unhandled type: " + value.type) class CGArgumentConverter(CGThing): """ A class that takes an IDL argument object, its index in the argument list, and the argv and argc strings and generates code to unwrap the argument to the right native type. """ def __init__(self, argument, index, args, argc, descriptorProvider, invalidEnumValueFatal=True): CGThing.__init__(self) assert not argument.defaultValue or argument.optional replacer = { "index": index, "argc": argc, "args": args } replacementVariables = { "val": string.Template("HandleValue::from_raw(${args}.get(${index}))").substitute(replacer), } info = getJSToNativeConversionInfo( argument.type, descriptorProvider, invalidEnumValueFatal=invalidEnumValueFatal, defaultValue=argument.defaultValue, treatNullAs=argument.treatNullAs, isEnforceRange=argument.enforceRange, isClamp=argument.clamp, isMember="Variadic" if argument.variadic else False, isAutoRooted=type_needs_auto_root(argument.type), allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull()) template = info.template default = info.default declType = info.declType if not argument.variadic: if argument.optional: condition = "{args}.get({index}).is_undefined()".format(**replacer) if argument.defaultValue: assert default template = CGIfElseWrapper(condition, CGGeneric(default), CGGeneric(template)).define() else: assert not default declType = CGWrapper(declType, pre="Option<", post=">") template = CGIfElseWrapper(condition, CGGeneric("None"), CGGeneric("Some(%s)" % template)).define() else: assert not default arg = "arg%d" % index self.converter = instantiateJSToNativeConversionTemplate( template, replacementVariables, declType, arg, needsAutoRoot=type_needs_auto_root(argument.type)) else: assert argument.optional variadicConversion = { "val": string.Template("HandleValue::from_raw(${args}.get(variadicArg))").substitute(replacer), } innerConverter = [instantiateJSToNativeConversionTemplate( template, variadicConversion, declType, "slot")] arg = "arg%d" % index if argument.type.isGeckoInterface(): init = "rooted_vec!(let mut %s)" % arg innerConverter.append(CGGeneric("%s.push(Dom::from_ref(&*slot));" % arg)) else: init = "let mut %s = vec![]" % arg innerConverter.append(CGGeneric("%s.push(slot);" % arg)) inner = CGIndenter(CGList(innerConverter, "\n"), 8).define() self.converter = CGGeneric("""\ %(init)s; if %(argc)s > %(index)s { %(arg)s.reserve(%(argc)s as usize - %(index)s); for variadicArg in %(index)s..%(argc)s { %(inner)s } }""" % {'arg': arg, 'argc': argc, 'index': index, 'inner': inner, 'init': init}) def define(self): return self.converter.define() def wrapForType(jsvalRef, result='result', successCode='return true;', pre=''): """ Reflect a Rust value into JS. * 'jsvalRef': a MutableHandleValue in which to store the result of the conversion; * 'result': the name of the variable in which the Rust value is stored; * 'successCode': the code to run once we have done the conversion. * 'pre': code to run before the conversion if rooting is necessary """ wrap = "%s\n(%s).to_jsval(cx, %s);" % (pre, result, jsvalRef) if successCode: wrap += "\n%s" % successCode return wrap def typeNeedsCx(type, retVal=False): if type is None: return False if type.nullable(): type = type.inner if type.isSequence(): type = type.inner if type.isUnion(): return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes) if retVal and type.isSpiderMonkeyInterface(): return True return type.isAny() or type.isObject() # Returns a conversion behavior suitable for a type def getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs): if type.isSequence() or type.isRecord(): return getConversionConfigForType(innerContainerType(type), isEnforceRange, isClamp, treatNullAs) if type.isDOMString(): assert not isEnforceRange and not isClamp treatAs = { "Default": "StringificationBehavior::Default", "EmptyString": "StringificationBehavior::Empty", } if treatNullAs not in treatAs: raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs) if type.nullable(): # Note: the actual behavior passed here doesn't matter for nullable # strings. return "StringificationBehavior::Default" else: return treatAs[treatNullAs] if type.isPrimitive() and type.isInteger(): if isEnforceRange: return "ConversionBehavior::EnforceRange" elif isClamp: return "ConversionBehavior::Clamp" else: return "ConversionBehavior::Default" assert not isEnforceRange and not isClamp return "()" # Returns a CGThing containing the type of the return value. def getRetvalDeclarationForType(returnType, descriptorProvider): if returnType is None or returnType.isVoid(): # Nothing to declare return CGGeneric("()") if returnType.isPrimitive() and returnType.tag() in builtinNames: result = CGGeneric(builtinNames[returnType.tag()]) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isDOMString(): result = CGGeneric("DOMString") if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isUSVString(): result = CGGeneric("USVString") if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isByteString(): result = CGGeneric("ByteString") if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isEnum(): result = CGGeneric(returnType.unroll().inner.identifier.name) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isPromise(): assert not returnType.nullable() return CGGeneric("Rc<Promise>") if returnType.isGeckoInterface(): descriptor = descriptorProvider.getDescriptor( returnType.unroll().inner.identifier.name) result = CGGeneric(descriptor.returnType) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isCallback(): callback = returnType.unroll().callback result = CGGeneric('Rc<%s::%s>' % (getModuleFromObject(callback), callback.identifier.name)) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isUnion(): result = CGGeneric(union_native_type(returnType)) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result # TODO: Return the value through a MutableHandleValue outparam # https://github.com/servo/servo/issues/6307 if returnType.isAny(): return CGGeneric("JSVal") if returnType.isObject() or returnType.isSpiderMonkeyInterface(): result = CGGeneric("NonNull<JSObject>") if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isSequence() or returnType.isRecord(): result = getRetvalDeclarationForType(innerContainerType(returnType), descriptorProvider) result = wrapInNativeContainerType(returnType, result) if returnType.nullable(): result = CGWrapper(result, pre="Option<", post=">") return result if returnType.isDictionary(): nullable = returnType.nullable() dictName = returnType.inner.name if nullable else returnType.name result = CGGeneric(dictName) if type_needs_tracing(returnType): result = CGWrapper(result, pre="RootedTraceableBox<", post=">") if nullable: result = CGWrapper(result, pre="Option<", post=">") return result raise TypeError("Don't know how to declare return value for %s" % returnType) def MemberCondition(pref, func): """ A string representing the condition for a member to actually be exposed. Any of the arguments can be None. If not None, they should have the following types: pref: The name of the preference. func: The name of the function. """ assert pref is None or isinstance(pref, str) assert func is None or isinstance(func, str) assert func is None or pref is None if pref: return 'Condition::Pref("%s")' % pref if func: return 'Condition::Func(%s)' % func return "Condition::Satisfied" class PropertyDefiner: """ A common superclass for defining things on prototype objects. Subclasses should implement generateArray to generate the actual arrays of things we're defining. They should also set self.regular to the list of things exposed to web pages. """ def __init__(self, descriptor, name): self.descriptor = descriptor self.name = name def variableName(self): return "s" + self.name def length(self): return len(self.regular) def __str__(self): # We only need to generate id arrays for things that will end # up used via ResolveProperty or EnumerateProperties. return self.generateArray(self.regular, self.variableName()) @staticmethod def getStringAttr(member, name): attr = member.getExtendedAttribute(name) if attr is None: return None # It's a list of strings assert len(attr) == 1 assert attr[0] is not None return attr[0] @staticmethod def getControllingCondition(interfaceMember, descriptor): return MemberCondition( PropertyDefiner.getStringAttr(interfaceMember, "Pref"), PropertyDefiner.getStringAttr(interfaceMember, "Func")) def generateGuardedArray(self, array, name, specTemplate, specTerminator, specType, getCondition, getDataTuple): """ This method generates our various arrays. array is an array of interface members as passed to generateArray name is the name as passed to generateArray specTemplate is a template for each entry of the spec array specTerminator is a terminator for the spec array (inserted at the end of the array), or None specType is the actual typename of our spec getDataTuple is a callback function that takes an array entry and returns a tuple suitable for substitution into specTemplate. """ # We generate an all-encompassing list of lists of specs, with each sublist # representing a group of members that share a common pref name. That will # make sure the order of the properties as exposed on the interface and # interface prototype objects does not change when pref control is added to # members while still allowing us to define all the members in the smallest # number of JSAPI calls. assert len(array) != 0 specs = [] prefableSpecs = [] prefableTemplate = ' Guard::new(%s, %s[%d])' for cond, members in groupby(array, lambda m: getCondition(m, self.descriptor)): currentSpecs = [specTemplate % getDataTuple(m) for m in members] if specTerminator: currentSpecs.append(specTerminator) specs.append("&[\n" + ",\n".join(currentSpecs) + "]\n") prefableSpecs.append( prefableTemplate % (cond, name + "_specs", len(specs) - 1)) specsArray = ("const %s_specs: &'static [&'static[%s]] = &[\n" + ",\n".join(specs) + "\n" + "];\n") % (name, specType) prefArray = ("const %s: &'static [Guard<&'static [%s]>] = &[\n" + ",\n".join(prefableSpecs) + "\n" + "];\n") % (name, specType) return specsArray + prefArray # The length of a method is the minimum of the lengths of the # argument lists of all its overloads. def methodLength(method): signatures = method.signatures() return min( len([arg for arg in arguments if not arg.optional and not arg.variadic]) for (_, arguments) in signatures) class MethodDefiner(PropertyDefiner): """ A class for defining methods on a prototype object. """ def __init__(self, descriptor, name, static, unforgeable): assert not (static and unforgeable) PropertyDefiner.__init__(self, descriptor, name) # FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822 # We should be able to check for special operations without an # identifier. For now we check if the name starts with __ # Ignore non-static methods for callback interfaces if not descriptor.interface.isCallback() or static: methods = [m for m in descriptor.interface.members if m.isMethod() and m.isStatic() == static and not m.isIdentifierLess() and MemberIsUnforgeable(m, descriptor) == unforgeable] else: methods = [] self.regular = [{"name": m.identifier.name, "methodInfo": not m.isStatic(), "length": methodLength(m), "condition": PropertyDefiner.getControllingCondition(m, descriptor)} for m in methods] # FIXME Check for an existing iterator on the interface first. if any(m.isGetter() and m.isIndexed() for m in methods): self.regular.append({"name": '@@iterator', "methodInfo": False, "selfHostedName": "ArrayValues", "length": 0, "condition": "Condition::Satisfied"}) # Generate the keys/values/entries aliases for value iterables. maplikeOrSetlikeOrIterable = descriptor.interface.maplikeOrSetlikeOrIterable if (not static and not unforgeable and (maplikeOrSetlikeOrIterable and maplikeOrSetlikeOrIterable.isIterable() and maplikeOrSetlikeOrIterable.isValueIterator())): # Add our keys/values/entries/forEach self.regular.append({ "name": "keys", "methodInfo": False, "selfHostedName": "ArrayKeys", "length": 0, "condition": PropertyDefiner.getControllingCondition(m, descriptor) }) self.regular.append({ "name": "values", "methodInfo": False, "selfHostedName": "ArrayValues", "length": 0, "condition": PropertyDefiner.getControllingCondition(m, descriptor) }) self.regular.append({ "name": "entries", "methodInfo": False, "selfHostedName": "ArrayEntries", "length": 0, "condition": PropertyDefiner.getControllingCondition(m, descriptor) }) self.regular.append({ "name": "forEach", "methodInfo": False, "selfHostedName": "ArrayForEach", "length": 1, "condition": PropertyDefiner.getControllingCondition(m, descriptor) }) isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable")) if not static and unforgeable == isUnforgeableInterface: stringifier = descriptor.operations['Stringifier'] if stringifier: self.regular.append({ "name": "toString", "nativeName": stringifier.identifier.name, "length": 0, "condition": PropertyDefiner.getControllingCondition(stringifier, descriptor) }) self.unforgeable = unforgeable def generateArray(self, array, name): if len(array) == 0: return "" def condition(m, d): return m["condition"] flags = "JSPROP_ENUMERATE" if self.unforgeable: flags += " | JSPROP_PERMANENT | JSPROP_READONLY" def specData(m): # TODO: Use something like JS_FNSPEC # https://github.com/servo/servo/issues/6391 if "selfHostedName" in m: selfHostedName = '%s as *const u8 as *const libc::c_char' % str_to_const_array(m["selfHostedName"]) assert not m.get("methodInfo", True) accessor = "None" jitinfo = "0 as *const JSJitInfo" else: selfHostedName = "0 as *const libc::c_char" if m.get("methodInfo", True): identifier = m.get("nativeName", m["name"]) # Go through an intermediate type here, because it's not # easy to tell whether the methodinfo is a JSJitInfo or # a JSTypedMethodJitInfo here. The compiler knows, though, # so let it do the work. jitinfo = "&%s_methodinfo as *const _ as *const JSJitInfo" % identifier accessor = "Some(generic_method)" else: jitinfo = "0 as *const JSJitInfo" accessor = 'Some(%s)' % m.get("nativeName", m["name"]) if m["name"].startswith("@@"): return ('(SymbolCode::%s as i32 + 1)' % m["name"][2:], accessor, jitinfo, m["length"], flags, selfHostedName) return (str_to_const_array(m["name"]), accessor, jitinfo, m["length"], flags, selfHostedName) return self.generateGuardedArray( array, name, ' JSFunctionSpec {\n' ' name: %s as *const u8 as *const libc::c_char,\n' ' call: JSNativeWrapper { op: %s, info: %s },\n' ' nargs: %s,\n' ' flags: (%s) as u16,\n' ' selfHostedName: %s\n' ' }', ' JSFunctionSpec {\n' ' name: 0 as *const libc::c_char,\n' ' call: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n' ' nargs: 0,\n' ' flags: 0,\n' ' selfHostedName: 0 as *const libc::c_char\n' ' }', 'JSFunctionSpec', condition, specData) class AttrDefiner(PropertyDefiner): def __init__(self, descriptor, name, static, unforgeable): assert not (static and unforgeable) PropertyDefiner.__init__(self, descriptor, name) self.name = name self.descriptor = descriptor self.regular = [ m for m in descriptor.interface.members if m.isAttr() and m.isStatic() == static and MemberIsUnforgeable(m, descriptor) == unforgeable ] self.static = static self.unforgeable = unforgeable def generateArray(self, array, name): if len(array) == 0: return "" flags = "JSPROP_ENUMERATE" if self.unforgeable: flags += " | JSPROP_PERMANENT" def getter(attr): if self.static: accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name) jitinfo = "0 as *const JSJitInfo" else: if attr.hasLenientThis(): accessor = "generic_lenient_getter" else: accessor = "generic_getter" jitinfo = "&%s_getterinfo" % self.descriptor.internalNameFor(attr.identifier.name) return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }" % {"info": jitinfo, "native": accessor}) def setter(attr): if (attr.readonly and not attr.getExtendedAttribute("PutForwards") and not attr.getExtendedAttribute("Replaceable")): return "JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }" if self.static: accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name) jitinfo = "0 as *const JSJitInfo" else: if attr.hasLenientThis(): accessor = "generic_lenient_setter" else: accessor = "generic_setter" jitinfo = "&%s_setterinfo" % self.descriptor.internalNameFor(attr.identifier.name) return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }" % {"info": jitinfo, "native": accessor}) def specData(attr): return (str_to_const_array(attr.identifier.name), flags, getter(attr), setter(attr)) return self.generateGuardedArray( array, name, ' JSPropertySpec {\n' ' name: %s as *const u8 as *const libc::c_char,\n' ' flags: (%s) as u8,\n' ' __bindgen_anon_1: JSPropertySpec__bindgen_ty_1 {\n' ' accessors: JSPropertySpec__bindgen_ty_1__bindgen_ty_1 {\n' ' getter: JSPropertySpec__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1 {\n' ' native: %s,\n' ' },\n' ' setter: JSPropertySpec__bindgen_ty_1__bindgen_ty_1__bindgen_ty_2 {\n' ' native: %s,\n' ' }\n' ' }\n' ' }\n' ' }', ' JSPropertySpec::ZERO', 'JSPropertySpec', PropertyDefiner.getControllingCondition, specData) class ConstDefiner(PropertyDefiner): """ A class for definining constants on the interface object """ def __init__(self, descriptor, name): PropertyDefiner.__init__(self, descriptor, name) self.name = name self.regular = [m for m in descriptor.interface.members if m.isConst()] def generateArray(self, array, name): if len(array) == 0: return "" def specData(const): return (str_to_const_array(const.identifier.name), convertConstIDLValueToJSVal(const.value)) return self.generateGuardedArray( array, name, ' ConstantSpec { name: %s, value: %s }', None, 'ConstantSpec', PropertyDefiner.getControllingCondition, specData) # We'll want to insert the indent at the beginnings of lines, but we # don't want to indent empty lines. So only indent lines that have a # non-newline character on them. lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE) class CGIndenter(CGThing): """ A class that takes another CGThing and generates code that indents that CGThing by some number of spaces. The default indent is two spaces. """ def __init__(self, child, indentLevel=4): CGThing.__init__(self) self.child = child self.indent = " " * indentLevel def define(self): defn = self.child.define() if defn != "": return re.sub(lineStartDetector, self.indent, defn) else: return defn class CGWrapper(CGThing): """ Generic CGThing that wraps other CGThings with pre and post text. """ def __init__(self, child, pre="", post="", reindent=False): CGThing.__init__(self) self.child = child self.pre = pre self.post = post self.reindent = reindent def define(self): defn = self.child.define() if self.reindent: # We don't use lineStartDetector because we don't want to # insert whitespace at the beginning of our _first_ line. defn = stripTrailingWhitespace( defn.replace("\n", "\n" + (" " * len(self.pre)))) return self.pre + defn + self.post class CGImports(CGWrapper): """ Generates the appropriate import/use statements. """ def __init__(self, child, descriptors, callbacks, dictionaries, enums, typedefs, imports, config, ignored_warnings=None): """ Adds a set of imports. """ if ignored_warnings is None: ignored_warnings = [ 'non_camel_case_types', 'non_upper_case_globals', 'unused_imports', 'unused_variables', 'unused_assignments', 'unused_mut', ] def componentTypes(type): if type.isType() and type.nullable(): type = type.unroll() if type.isUnion(): return type.flatMemberTypes if type.isDictionary(): return [type] + getTypesFromDictionary(type) if type.isSequence(): return componentTypes(type.inner) return [type] def isImportable(type): if not type.isType(): assert (type.isInterface() or type.isDictionary() or type.isEnum() or type.isNamespace()) return True return not (type.builtin or type.isSequence() or type.isUnion()) def relatedTypesForSignatures(method): types = [] for (returnType, arguments) in method.signatures(): types += componentTypes(returnType) for arg in arguments: types += componentTypes(arg.type) return types def getIdentifier(t): if t.isType(): if t.nullable(): t = t.inner if t.isCallback(): return t.callback.identifier return t.identifier assert t.isInterface() or t.isDictionary() or t.isEnum() or t.isNamespace() return t.identifier def removeWrapperAndNullableTypes(types): normalized = [] for t in types: while (t.isType() and t.nullable()) or isinstance(t, IDLWrapperType): t = t.inner if isImportable(t): normalized += [t] return normalized types = [] for d in descriptors: if not d.interface.isCallback(): types += [d.interface] if d.interface.isIteratorInterface(): types += [d.interface.iterableInterface] members = d.interface.members + d.interface.namedConstructors constructor = d.interface.ctor() if constructor: members += [constructor] if d.proxy: members += [o for o in d.operations.values() if o] for m in members: if m.isMethod(): types += relatedTypesForSignatures(m) elif m.isAttr(): types += componentTypes(m.type) # Import the type names used in the callbacks that are being defined. for c in callbacks: types += relatedTypesForSignatures(c) # Import the type names used in the dictionaries that are being defined. for d in dictionaries: types += componentTypes(d) # Import the type names used in the typedefs that are being defined. for t in typedefs: if not t.innerType.isCallback(): types += componentTypes(t.innerType) # Normalize the types we've collected and remove any ones which can't be imported. types = removeWrapperAndNullableTypes(types) descriptorProvider = config.getDescriptorProvider() extras = [] for t in types: # Importing these types in the same module that defines them is an error. if t in dictionaries or t in enums: continue if t.isInterface() or t.isNamespace(): name = getIdentifier(t).name descriptor = descriptorProvider.getDescriptor(name) if name != 'GlobalScope': extras += [descriptor.path] parentName = descriptor.getParentName() if parentName: descriptor = descriptorProvider.getDescriptor(parentName) extras += [descriptor.path, descriptor.bindingPath] elif t.isType() and t.isRecord(): extras += ['crate::dom::bindings::mozmap::MozMap'] elif isinstance(t, IDLPromiseType): extras += ['crate::dom::promise::Promise'] else: if t.isEnum(): extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name + 'Values'] extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name] statements = [] if len(ignored_warnings) > 0: statements.append('#![allow(%s)]' % ','.join(ignored_warnings)) statements.extend('use %s;' % i for i in sorted(set(imports + extras))) CGWrapper.__init__(self, child, pre='\n'.join(statements) + '\n\n') class CGIfWrapper(CGWrapper): def __init__(self, condition, child): pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n", reindent=True) CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(), post="\n}") class CGTemplatedType(CGWrapper): def __init__(self, templateName, child): CGWrapper.__init__(self, child, pre=templateName + "<", post=">") class CGNamespace(CGWrapper): def __init__(self, namespace, child, public=False): pre = "%smod %s {\n" % ("pub " if public else "", namespace) post = "} // mod %s" % namespace CGWrapper.__init__(self, child, pre=pre, post=post) @staticmethod def build(namespaces, child, public=False): """ Static helper method to build multiple wrapped namespaces. """ if not namespaces: return child inner = CGNamespace.build(namespaces[1:], child, public=public) return CGNamespace(namespaces[0], inner, public=public) def DOMClassTypeId(desc): protochain = desc.prototypeChain inner = "" if desc.hasDescendants(): if desc.interface.getExtendedAttribute("Abstract"): return "crate::dom::bindings::codegen::InheritTypes::TopTypeId { abstract_: () }" name = desc.interface.identifier.name inner = "(crate::dom::bindings::codegen::InheritTypes::%sTypeId::%s)" % (name, name) elif len(protochain) == 1: return "crate::dom::bindings::codegen::InheritTypes::TopTypeId { alone: () }" reversed_protochain = list(reversed(protochain)) for (child, parent) in zip(reversed_protochain, reversed_protochain[1:]): inner = "(crate::dom::bindings::codegen::InheritTypes::%sTypeId::%s%s)" % (parent, child, inner) return "crate::dom::bindings::codegen::InheritTypes::TopTypeId { %s: %s }" % (protochain[0].lower(), inner) def DOMClass(descriptor): protoList = ['PrototypeList::ID::' + proto for proto in descriptor.prototypeChain] # Pad out the list to the right length with ID::Last so we # guarantee that all the lists are the same length. ID::Last # is never the ID of any prototype, so it's safe to use as # padding. protoList.extend(['PrototypeList::ID::Last'] * (descriptor.config.maxProtoChainLength - len(protoList))) prototypeChainString = ', '.join(protoList) mallocSizeOf = 'malloc_size_of_including_raw_self::<%s>' % descriptor.concreteType if descriptor.isGlobal(): globals_ = camel_to_upper_snake(descriptor.name) else: globals_ = 'EMPTY' return """\ DOMClass { interface_chain: [ %s ], type_id: %s, malloc_size_of: %s as unsafe fn(&mut _, _) -> _, global: InterfaceObjectMap::Globals::%s, }""" % (prototypeChainString, DOMClassTypeId(descriptor), mallocSizeOf, globals_) class CGDOMJSClass(CGThing): """ Generate a DOMJSClass for a given descriptor """ def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor def define(self): parentName = self.descriptor.getParentName() if not parentName: parentName = "crate::dom::bindings::reflector::Reflector" args = { "domClass": DOMClass(self.descriptor), "enumerateHook": "None", "finalizeHook": FINALIZE_HOOK_NAME, "flags": "JSCLASS_FOREGROUND_FINALIZE", "name": str_to_const_array(self.descriptor.interface.identifier.name), "resolveHook": "None", "slots": "1", "traceHook": TRACE_HOOK_NAME, } if self.descriptor.isGlobal(): assert not self.descriptor.weakReferenceable args["enumerateHook"] = "Some(enumerate_global)" args["flags"] = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL | JSCLASS_FOREGROUND_FINALIZE" args["slots"] = "JSCLASS_GLOBAL_SLOT_COUNT + 1" args["resolveHook"] = "Some(resolve_global)" args["traceHook"] = "js::jsapi::JS_GlobalObjectTraceHook" elif self.descriptor.weakReferenceable: args["slots"] = "2" return """\ static CLASS_OPS: js::jsapi::JSClassOps = js::jsapi::JSClassOps { addProperty: None, delProperty: None, enumerate: %(enumerateHook)s, newEnumerate: None, resolve: %(resolveHook)s, mayResolve: None, finalize: Some(%(finalizeHook)s), call: None, hasInstance: None, construct: None, trace: Some(%(traceHook)s), }; static Class: DOMJSClass = DOMJSClass { base: js::jsapi::JSClass { name: %(name)s as *const u8 as *const libc::c_char, flags: JSCLASS_IS_DOMJSCLASS | %(flags)s | (((%(slots)s) & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT) /* JSCLASS_HAS_RESERVED_SLOTS(%(slots)s) */, cOps: &CLASS_OPS, reserved: [0 as *mut _; 3], }, dom_class: %(domClass)s }; """ % args class CGAssertInheritance(CGThing): """ Generate a type assertion for inheritance """ def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor def define(self): parent = self.descriptor.interface.parent parentName = "" if parent: parentName = parent.identifier.name else: parentName = "crate::dom::bindings::reflector::Reflector" selfName = self.descriptor.interface.identifier.name if selfName == "PaintRenderingContext2D": # PaintRenderingContext2D embeds a CanvasRenderingContext2D # instead of a Reflector as an optimization, # but this is fine since CanvasRenderingContext2D # also has a reflector # # FIXME *RenderingContext2D should use Inline parentName = "crate::dom::canvasrenderingcontext2d::CanvasRenderingContext2D" args = { "parentName": parentName, "selfName": selfName, } return """\ impl %(selfName)s { fn __assert_parent_type(&self) { use crate::dom::bindings::inheritance::HasParent; // If this type assertion fails, make sure the first field of your // DOM struct is of the correct type -- it must be the parent class. let _: &%(parentName)s = self.as_parent(); } } """ % args def str_to_const_array(s): return "b\"%s\\0\"" % s class CGPrototypeJSClass(CGThing): def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor def define(self): name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype") slotCount = 0 if self.descriptor.hasUnforgeableMembers: slotCount += 1 return """\ static PrototypeClass: JSClass = JSClass { name: %(name)s as *const u8 as *const libc::c_char, flags: // JSCLASS_HAS_RESERVED_SLOTS(%(slotCount)s) (%(slotCount)s & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT, cOps: 0 as *const _, reserved: [0 as *mut os::raw::c_void; 3] }; """ % {'name': name, 'slotCount': slotCount} class CGInterfaceObjectJSClass(CGThing): def __init__(self, descriptor): assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback() CGThing.__init__(self) self.descriptor = descriptor def define(self): if self.descriptor.interface.isNamespace(): classString = self.descriptor.interface.getExtendedAttribute("ClassString") if classString: classString = classString[0] else: classString = "Object" return """\ static NAMESPACE_OBJECT_CLASS: NamespaceObjectClass = unsafe { NamespaceObjectClass::new(%s) }; """ % str_to_const_array(classString) if self.descriptor.interface.ctor(): constructorBehavior = "InterfaceConstructorBehavior::call(%s)" % CONSTRUCT_HOOK_NAME else: constructorBehavior = "InterfaceConstructorBehavior::throw()" name = self.descriptor.interface.identifier.name args = { "constructorBehavior": constructorBehavior, "id": name, "representation": 'b"function %s() {\\n [native code]\\n}"' % name, "depth": self.descriptor.prototypeDepth } return """\ static INTERFACE_OBJECT_CLASS: NonCallbackInterfaceObjectClass = NonCallbackInterfaceObjectClass::new( { // Intermediate `const` because as of nightly-2018-10-05, // rustc is conservative in promotion to `'static` of the return values of `const fn`s: // https://github.com/rust-lang/rust/issues/54846 // https://github.com/rust-lang/rust/pull/53851 const BEHAVIOR: InterfaceConstructorBehavior = %(constructorBehavior)s; &BEHAVIOR }, %(representation)s, PrototypeList::ID::%(id)s, %(depth)s); """ % args class CGList(CGThing): """ Generate code for a list of GCThings. Just concatenates them together, with an optional joiner string. "\n" is a common joiner. """ def __init__(self, children, joiner=""): CGThing.__init__(self) # Make a copy of the kids into a list, because if someone passes in a # generator we won't be able to both declare and define ourselves, or # define ourselves more than once! self.children = list(children) self.joiner = joiner def append(self, child): self.children.append(child) def prepend(self, child): self.children.insert(0, child) def join(self, iterable): return self.joiner.join(s for s in iterable if len(s) > 0) def define(self): return self.join(child.define() for child in self.children if child is not None) def __len__(self): return len(self.children) class CGIfElseWrapper(CGList): def __init__(self, condition, ifTrue, ifFalse): kids = [CGIfWrapper(condition, ifTrue), CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}")] CGList.__init__(self, kids) class CGGeneric(CGThing): """ A class that spits out a fixed string into the codegen. Can spit out a separate string for the declaration too. """ def __init__(self, text): self.text = text def define(self): return self.text class CGCallbackTempRoot(CGGeneric): def __init__(self, name): CGGeneric.__init__(self, "%s::new(cx, ${val}.get().to_object())" % name) def getAllTypes(descriptors, dictionaries, callbacks, typedefs): """ Generate all the types we're dealing with. For each type, a tuple containing type, descriptor, dictionary is yielded. The descriptor and dictionary can be None if the type does not come from a descriptor or dictionary; they will never both be non-None. """ for d in descriptors: for t in getTypesFromDescriptor(d): yield (t, d, None) for dictionary in dictionaries: for t in getTypesFromDictionary(dictionary): yield (t, None, dictionary) for callback in callbacks: for t in getTypesFromCallback(callback): yield (t, None, None) for typedef in typedefs: yield (typedef.innerType, None, None) def UnionTypes(descriptors, dictionaries, callbacks, typedefs, config): """ Returns a CGList containing CGUnionStructs for every union. """ imports = [ 'crate::dom', 'crate::dom::bindings::codegen::PrototypeList', 'crate::dom::bindings::conversions::ConversionResult', 'crate::dom::bindings::conversions::FromJSValConvertible', 'crate::dom::bindings::conversions::ToJSValConvertible', 'crate::dom::bindings::conversions::ConversionBehavior', 'crate::dom::bindings::conversions::StringificationBehavior', 'crate::dom::bindings::conversions::root_from_handlevalue', 'std::ptr::NonNull', 'crate::dom::bindings::mozmap::MozMap', 'crate::dom::bindings::root::DomRoot', 'crate::dom::bindings::str::ByteString', 'crate::dom::bindings::str::DOMString', 'crate::dom::bindings::str::USVString', 'crate::dom::bindings::trace::RootedTraceableBox', 'crate::dom::types::*', 'js::error::throw_type_error', 'js::rust::HandleValue', 'js::jsapi::Heap', 'js::jsapi::JSContext', 'js::jsapi::JSObject', 'js::rust::MutableHandleValue', 'js::jsval::JSVal', 'js::typedarray' ] # Now find all the things we'll need as arguments and return values because # we need to wrap or unwrap them. unionStructs = dict() for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks, typedefs): if dictionary: imports.append("%s::%s" % (CGDictionary.makeModuleName(dictionary), CGDictionary.makeDictionaryName(dictionary))) t = t.unroll() if not t.isUnion(): continue name = str(t) if name not in unionStructs: provider = descriptor or config.getDescriptorProvider() unionStructs[name] = CGList([ CGUnionStruct(t, provider), CGUnionConversionStruct(t, provider) ]) # Sort unionStructs by key, retrieve value unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0))) return CGImports(CGList(unionStructs, "\n\n"), descriptors=[], callbacks=[], dictionaries=[], enums=[], typedefs=[], imports=imports, config=config, ignored_warnings=[]) class Argument(): """ A class for outputting the type and name of an argument """ def __init__(self, argType, name, default=None, mutable=False): self.argType = argType self.name = name self.default = default self.mutable = mutable def declare(self): string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '') # XXXjdm Support default arguments somehow :/ # if self.default is not None: # string += " = " + self.default return string def define(self): return self.argType + ' ' + self.name class CGAbstractMethod(CGThing): """ An abstract class for generating code for a method. Subclasses should override definition_body to create the actual code. descriptor is the descriptor for the interface the method is associated with name is the name of the method as a string returnType is the IDLType of the return value args is a list of Argument objects inline should be True to generate an inline method, whose body is part of the declaration. alwaysInline should be True to generate an inline method annotated with MOZ_ALWAYS_INLINE. If templateArgs is not None it should be a list of strings containing template arguments, and the function will be templatized using those arguments. docs is None or documentation for the method in a string. unsafe is used to add the decorator 'unsafe' to a function, giving as a result an 'unsafe fn()' declaration. """ def __init__(self, descriptor, name, returnType, args, inline=False, alwaysInline=False, extern=False, unsafe=False, pub=False, templateArgs=None, docs=None, doesNotPanic=False): CGThing.__init__(self) self.descriptor = descriptor self.name = name self.returnType = returnType self.args = args self.alwaysInline = alwaysInline self.extern = extern self.unsafe = extern or unsafe self.templateArgs = templateArgs self.pub = pub self.docs = docs self.catchPanic = self.extern and not doesNotPanic def _argstring(self): return ', '.join([a.declare() for a in self.args]) def _template(self): if self.templateArgs is None: return '' return '<%s>\n' % ', '.join(self.templateArgs) def _docs(self): if self.docs is None: return '' lines = self.docs.splitlines() return ''.join('/// %s\n' % line for line in lines) def _decorators(self): decorators = [] if self.alwaysInline: decorators.append('#[inline]') if self.pub: decorators.append('pub') if self.unsafe: decorators.append('unsafe') if self.extern: decorators.append('extern') if not decorators: return '' return ' '.join(decorators) + ' ' def _returnType(self): return (" -> %s" % self.returnType) if self.returnType != "void" else "" def define(self): body = self.definition_body() if self.catchPanic: body = CGWrapper(CGIndenter(body), pre="return wrap_panic(panic::AssertUnwindSafe(|| {\n", post=("""\n}), %s);""" % ("()" if self.returnType == "void" else "false"))) return CGWrapper(CGIndenter(body), pre=self.definition_prologue(), post=self.definition_epilogue()).define() def definition_prologue(self): return "%s%sfn %s%s(%s)%s {\n" % (self._docs(), self._decorators(), self.name, self._template(), self._argstring(), self._returnType()) def definition_epilogue(self): return "\n}\n" def definition_body(self): raise NotImplementedError # Override me! class CGConstructorEnabled(CGAbstractMethod): """ A method for testing whether we should be exposing this interface object. This can perform various tests depending on what conditions are specified on the interface. """ def __init__(self, descriptor): CGAbstractMethod.__init__(self, descriptor, 'ConstructorEnabled', 'bool', [Argument("*mut JSContext", "aCx"), Argument("HandleObject", "aObj")], unsafe=True) def definition_body(self): conditions = [] iface = self.descriptor.interface bits = " | ".join(sorted( "InterfaceObjectMap::Globals::" + camel_to_upper_snake(i) for i in iface.exposureSet )) conditions.append("is_exposed_in(aObj, %s)" % bits) pref = iface.getExtendedAttribute("Pref") if pref: assert isinstance(pref, list) and len(pref) == 1 conditions.append('PREFS.get("%s").as_boolean().unwrap_or(false)' % pref[0]) func = iface.getExtendedAttribute("Func") if func: assert isinstance(func, list) and len(func) == 1 conditions.append("%s(aCx, aObj)" % func[0]) return CGList((CGGeneric(cond) for cond in conditions), " &&\n") def CreateBindingJSObject(descriptor, parent=None): assert not descriptor.isGlobal() create = "let raw = Box::into_raw(object);\nlet _rt = RootedTraceable::new(&*raw);\n" if descriptor.proxy: create += """ let handler = RegisterBindings::PROXY_HANDLERS[PrototypeList::Proxies::%s as usize]; rooted!(in(cx) let private = PrivateValue(raw as *const libc::c_void)); let obj = NewProxyObject(cx, handler, Handle::from_raw(UndefinedHandleValue), proto.get(), %s.get(), ptr::null_mut(), ptr::null_mut()); assert!(!obj.is_null()); SetProxyReservedSlot(obj, 0, &private.get()); rooted!(in(cx) let obj = obj);\ """ % (descriptor.name, parent) else: create += ("rooted!(in(cx) let obj = JS_NewObjectWithGivenProto(\n" " cx, &Class.base as *const JSClass, proto.handle()));\n" "assert!(!obj.is_null());\n" "\n" "let val = PrivateValue(raw as *const libc::c_void);\n" "\n" "JS_SetReservedSlot(obj.get(), DOM_OBJECT_SLOT, &val);") if descriptor.weakReferenceable: create += """ let val = PrivateValue(ptr::null()); JS_SetReservedSlot(obj.get(), DOM_WEAK_SLOT, &val);""" return create def InitUnforgeablePropertiesOnHolder(descriptor, properties): """ Define the unforgeable properties on the unforgeable holder for the interface represented by descriptor. properties is a PropertyArrays instance. """ unforgeables = [] defineUnforgeableAttrs = "define_guarded_properties(cx, unforgeable_holder.handle(), %s);" defineUnforgeableMethods = "define_guarded_methods(cx, unforgeable_holder.handle(), %s);" unforgeableMembers = [ (defineUnforgeableAttrs, properties.unforgeable_attrs), (defineUnforgeableMethods, properties.unforgeable_methods), ] for template, array in unforgeableMembers: if array.length() > 0: unforgeables.append(CGGeneric(template % array.variableName())) return CGList(unforgeables, "\n") def CopyUnforgeablePropertiesToInstance(descriptor): """ Copy the unforgeable properties from the unforgeable holder for this interface to the instance object we have. """ if not descriptor.hasUnforgeableMembers: return "" copyCode = "" # For proxies, we want to define on the expando object, not directly on the # reflector, so we can make sure we don't get confused by named getters. if descriptor.proxy: copyCode += """\ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); ensure_expando_object(cx, obj.handle().into(), expando.handle_mut()); """ obj = "expando" else: obj = "obj" # We can't do the fast copy for globals, because we can't allocate the # unforgeable holder for those with the right JSClass. Luckily, there # aren't too many globals being created. if descriptor.isGlobal(): copyFunc = "JS_CopyPropertiesFrom" else: copyFunc = "JS_InitializePropertiesFromCompatibleNativeObject" copyCode += """\ let mut slot = UndefinedValue(); JS_GetReservedSlot(proto.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT, &mut slot); rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut::<JSObject>()); unforgeable_holder.handle_mut().set(slot.to_object()); assert!(%(copyFunc)s(cx, %(obj)s.handle(), unforgeable_holder.handle())); """ % {'copyFunc': copyFunc, 'obj': obj} return copyCode class CGWrapMethod(CGAbstractMethod): """ Class that generates the FooBinding::Wrap function for non-callback interfaces. """ def __init__(self, descriptor): assert not descriptor.interface.isCallback() assert not descriptor.isGlobal() args = [Argument('*mut JSContext', 'cx'), Argument('&GlobalScope', 'scope'), Argument("Box<%s>" % descriptor.concreteType, 'object')] retval = 'DomRoot<%s>' % descriptor.concreteType CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args, pub=True, unsafe=True) def definition_body(self): unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor) create = CreateBindingJSObject(self.descriptor, "scope") return CGGeneric("""\ let scope = scope.reflector().get_jsobject(); assert!(!scope.get().is_null()); assert!(((*get_object_class(scope.get())).flags & JSCLASS_IS_GLOBAL) != 0); rooted!(in(cx) let mut proto = ptr::null_mut::<JSObject>()); let _ac = JSAutoCompartment::new(cx, scope.get()); GetProtoObject(cx, scope, proto.handle_mut()); assert!(!proto.is_null()); %(createObject)s %(copyUnforgeable)s (*raw).init_reflector(obj.get()); DomRoot::from_ref(&*raw)""" % {'copyUnforgeable': unforgeable, 'createObject': create}) class CGWrapGlobalMethod(CGAbstractMethod): """ Class that generates the FooBinding::Wrap function for global interfaces. """ def __init__(self, descriptor, properties): assert not descriptor.interface.isCallback() assert descriptor.isGlobal() args = [Argument('*mut JSContext', 'cx'), Argument("Box<%s>" % descriptor.concreteType, 'object')] retval = 'DomRoot<%s>' % descriptor.concreteType CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args, pub=True, unsafe=True) self.properties = properties def definition_body(self): values = { "unforgeable": CopyUnforgeablePropertiesToInstance(self.descriptor) } pairs = [ ("define_guarded_properties", self.properties.attrs), ("define_guarded_methods", self.properties.methods), ("define_guarded_constants", self.properties.consts) ] members = ["%s(cx, obj.handle(), %s);" % (function, array.variableName()) for (function, array) in pairs if array.length() > 0] values["members"] = "\n".join(members) return CGGeneric("""\ let raw = Box::into_raw(object); let _rt = RootedTraceable::new(&*raw); rooted!(in(cx) let mut obj = ptr::null_mut::<JSObject>()); create_global_object( cx, &Class.base, raw as *const libc::c_void, _trace, obj.handle_mut()); assert!(!obj.is_null()); (*raw).init_reflector(obj.get()); let _ac = JSAutoCompartment::new(cx, obj.get()); rooted!(in(cx) let mut proto = ptr::null_mut::<JSObject>()); GetProtoObject(cx, obj.handle(), proto.handle_mut()); assert!(JS_SplicePrototype(cx, obj.handle(), proto.handle())); let mut immutable = false; assert!(JS_SetImmutablePrototype(cx, obj.handle(), &mut immutable)); assert!(immutable); %(members)s %(unforgeable)s DomRoot::from_ref(&*raw)\ """ % values) class CGIDLInterface(CGThing): """ Class for codegen of an implementation of the IDLInterface trait. """ def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor def define(self): interface = self.descriptor.interface name = self.descriptor.concreteType if (interface.getUserData("hasConcreteDescendant", False) or interface.getUserData("hasProxyDescendant", False)): depth = self.descriptor.prototypeDepth check = "class.interface_chain[%s] == PrototypeList::ID::%s" % (depth, name) elif self.descriptor.proxy: check = "class as *const _ == &Class as *const _" else: check = "class as *const _ == &Class.dom_class as *const _" return """\ impl IDLInterface for %(name)s { #[inline] fn derives(class: &'static DOMClass) -> bool { %(check)s } } impl PartialEq for %(name)s { fn eq(&self, other: &%(name)s) -> bool { self as *const %(name)s == &*other } } """ % {'check': check, 'name': name} class CGAbstractExternMethod(CGAbstractMethod): """ Abstract base class for codegen of implementation-only (no declaration) static methods. """ def __init__(self, descriptor, name, returnType, args, doesNotPanic=False): CGAbstractMethod.__init__(self, descriptor, name, returnType, args, inline=False, extern=True, doesNotPanic=doesNotPanic) class PropertyArrays(): def __init__(self, descriptor): self.static_methods = MethodDefiner(descriptor, "StaticMethods", static=True, unforgeable=False) self.static_attrs = AttrDefiner(descriptor, "StaticAttributes", static=True, unforgeable=False) self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False) self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods", static=False, unforgeable=True) self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False) self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes", static=False, unforgeable=True) self.consts = ConstDefiner(descriptor, "Constants") pass @staticmethod def arrayNames(): return [ "static_methods", "static_attrs", "methods", "unforgeable_methods", "attrs", "unforgeable_attrs", "consts", ] def variableNames(self): names = {} for array in self.arrayNames(): names[array] = getattr(self, array).variableName() return names def __str__(self): define = "" for array in self.arrayNames(): define += str(getattr(self, array)) return define class CGCreateInterfaceObjectsMethod(CGAbstractMethod): """ Generate the CreateInterfaceObjects method for an interface descriptor. properties should be a PropertyArrays instance. """ def __init__(self, descriptor, properties, haveUnscopables): args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'), Argument('*mut ProtoOrIfaceArray', 'cache')] CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args, unsafe=True) self.properties = properties self.haveUnscopables = haveUnscopables def definition_body(self): name = self.descriptor.interface.identifier.name if self.descriptor.interface.isNamespace(): if self.descriptor.interface.getExtendedAttribute("ProtoObjectHack"): proto = "JS_GetObjectPrototype(cx, global)" else: proto = "JS_NewPlainObject(cx)" if self.properties.static_methods.length(): methods = self.properties.static_methods.variableName() else: methods = "&[]" return CGGeneric("""\ rooted!(in(cx) let proto = %(proto)s); assert!(!proto.is_null()); rooted!(in(cx) let mut namespace = ptr::null_mut::<JSObject>()); create_namespace_object(cx, global, proto.handle(), &NAMESPACE_OBJECT_CLASS, %(methods)s, %(name)s, namespace.handle_mut()); assert!(!namespace.is_null()); assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null()); (*cache)[PrototypeList::Constructor::%(id)s as usize] = namespace.get(); <*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize), ptr::null_mut(), namespace.get()); """ % {"id": MakeNativeName(name), "methods": methods, "name": str_to_const_array(name), "proto": proto}) if self.descriptor.interface.isCallback(): assert not self.descriptor.interface.ctor() and self.descriptor.interface.hasConstants() return CGGeneric("""\ rooted!(in(cx) let mut interface = ptr::null_mut::<JSObject>()); create_callback_interface_object(cx, global, sConstants, %(name)s, interface.handle_mut()); assert!(!interface.is_null()); assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null()); (*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get(); <*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize), ptr::null_mut(), interface.get()); """ % {"id": name, "name": str_to_const_array(name)}) parentName = self.descriptor.getParentName() if not parentName: if self.descriptor.interface.getExtendedAttribute("ExceptionClass"): getPrototypeProto = "prototype_proto.set(JS_GetErrorPrototype(cx))" elif self.descriptor.interface.isIteratorInterface(): getPrototypeProto = "prototype_proto.set(JS_GetIteratorPrototype(cx))" else: getPrototypeProto = "prototype_proto.set(JS_GetObjectPrototype(cx, global))" else: getPrototypeProto = ("%s::GetProtoObject(cx, global, prototype_proto.handle_mut())" % toBindingNamespace(parentName)) code = [CGGeneric("""\ rooted!(in(cx) let mut prototype_proto = ptr::null_mut::<JSObject>()); %s; assert!(!prototype_proto.is_null());""" % getPrototypeProto)] properties = { "id": name, "unscopables": "unscopable_names" if self.haveUnscopables else "&[]" } for arrayName in self.properties.arrayNames(): array = getattr(self.properties, arrayName) if array.length(): properties[arrayName] = array.variableName() else: properties[arrayName] = "&[]" if self.descriptor.isGlobal(): assert not self.haveUnscopables proto_properties = { "attrs": "&[]", "consts": "&[]", "id": name, "methods": "&[]", "unscopables": "&[]", } else: proto_properties = properties code.append(CGGeneric(""" rooted!(in(cx) let mut prototype = ptr::null_mut::<JSObject>()); create_interface_prototype_object(cx, prototype_proto.handle().into(), &PrototypeClass, %(methods)s, %(attrs)s, %(consts)s, %(unscopables)s, prototype.handle_mut().into()); assert!(!prototype.is_null()); assert!((*cache)[PrototypeList::ID::%(id)s as usize].is_null()); (*cache)[PrototypeList::ID::%(id)s as usize] = prototype.get(); <*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::ID::%(id)s as isize), ptr::null_mut(), prototype.get()); """ % proto_properties)) if self.descriptor.interface.hasInterfaceObject(): properties["name"] = str_to_const_array(name) if self.descriptor.interface.ctor(): properties["length"] = methodLength(self.descriptor.interface.ctor()) else: properties["length"] = 0 parentName = self.descriptor.getParentName() if parentName: parentName = toBindingNamespace(parentName) code.append(CGGeneric(""" rooted!(in(cx) let mut interface_proto = ptr::null_mut::<JSObject>()); %s::GetConstructorObject(cx, global, interface_proto.handle_mut());""" % parentName)) else: code.append(CGGeneric(""" rooted!(in(cx) let interface_proto = JS_GetFunctionPrototype(cx, global));""")) code.append(CGGeneric("""\ assert!(!interface_proto.is_null()); rooted!(in(cx) let mut interface = ptr::null_mut::<JSObject>()); create_noncallback_interface_object(cx, global.into(), interface_proto.handle(), &INTERFACE_OBJECT_CLASS, %(static_methods)s, %(static_attrs)s, %(consts)s, prototype.handle(), %(name)s, %(length)s, interface.handle_mut()); assert!(!interface.is_null());""" % properties)) if self.descriptor.shouldCacheConstructor(): code.append(CGGeneric("""\ assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null()); (*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get(); <*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize), ptr::null_mut(), interface.get()); """ % properties)) aliasedMembers = [m for m in self.descriptor.interface.members if m.isMethod() and m.aliases] if aliasedMembers: def defineAlias(alias): if alias == "@@iterator": symbolJSID = "RUST_SYMBOL_TO_JSID(GetWellKnownSymbol(cx, SymbolCode::iterator))" getSymbolJSID = CGGeneric(fill("rooted!(in(cx) let iteratorId = ${symbolJSID});", symbolJSID=symbolJSID)) defineFn = "JS_DefinePropertyById2" prop = "iteratorId.handle()" elif alias.startswith("@@"): raise TypeError("Can't handle any well-known Symbol other than @@iterator") else: getSymbolJSID = None defineFn = "JS_DefineProperty" prop = '"%s"' % alias return CGList([ getSymbolJSID, # XXX If we ever create non-enumerable properties that can # be aliased, we should consider making the aliases # match the enumerability of the property being aliased. CGGeneric(fill( """ assert!(${defineFn}(cx, prototype.handle(), ${prop}, aliasedVal.handle(), JSPROP_ENUMERATE as u32)); """, defineFn=defineFn, prop=prop)) ], "\n") def defineAliasesFor(m): return CGList([ CGGeneric(fill( """ assert!(JS_GetProperty(cx, prototype.handle(), ${prop} as *const u8 as *const _, aliasedVal.handle_mut())); """, prop=str_to_const_array(m.identifier.name))) ] + [defineAlias(alias) for alias in sorted(m.aliases)]) defineAliases = CGList([ CGGeneric(fill(""" // Set up aliases on the interface prototype object we just created. """)), CGGeneric("rooted!(in(cx) let mut aliasedVal = UndefinedValue());\n\n") ] + [defineAliasesFor(m) for m in sorted(aliasedMembers)]) code.append(defineAliases) constructors = self.descriptor.interface.namedConstructors if constructors: decl = "let named_constructors: [(ConstructorClassHook, &'static [u8], u32); %d]" % len(constructors) specs = [] for constructor in constructors: hook = CONSTRUCT_HOOK_NAME + "_" + constructor.identifier.name name = str_to_const_array(constructor.identifier.name) length = methodLength(constructor) specs.append(CGGeneric("(%s as ConstructorClassHook, %s, %d)" % (hook, name, length))) values = CGIndenter(CGList(specs, "\n"), 4) code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];")) code.append(CGGeneric("create_named_constructors(cx, global, &named_constructors, prototype.handle());")) if self.descriptor.hasUnforgeableMembers: # We want to use the same JSClass and prototype as the object we'll # end up defining the unforgeable properties on in the end, so that # we can use JS_InitializePropertiesFromCompatibleNativeObject to do # a fast copy. In the case of proxies that's null, because the # expando object is a vanilla object, but in the case of other DOM # objects it's whatever our class is. # # Also, for a global we can't use the global's class; just use # nullpr and when we do the copy off the holder we'll take a slower # path. This also means that we don't need to worry about matching # the prototype. if self.descriptor.proxy or self.descriptor.isGlobal(): holderClass = "ptr::null()" holderProto = "HandleObject::null()" else: holderClass = "&Class.base as *const JSClass" holderProto = "prototype.handle()" code.append(CGGeneric(""" rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut::<JSObject>()); unforgeable_holder.handle_mut().set( JS_NewObjectWithoutMetadata(cx, %(holderClass)s, %(holderProto)s)); assert!(!unforgeable_holder.is_null()); """ % {'holderClass': holderClass, 'holderProto': holderProto})) code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties)) code.append(CGGeneric("""\ let val = ObjectValue(unforgeable_holder.get()); JS_SetReservedSlot(prototype.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT, &val)""")) return CGList(code, "\n") class CGGetPerInterfaceObject(CGAbstractMethod): """ A method for getting a per-interface object (a prototype object or interface constructor object). """ def __init__(self, descriptor, name, idPrefix="", pub=False): args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'), Argument('MutableHandleObject', 'mut rval')] CGAbstractMethod.__init__(self, descriptor, name, 'void', args, pub=pub, unsafe=True) self.id = idPrefix + "::" + MakeNativeName(self.descriptor.name) def definition_body(self): return CGGeneric(""" assert!(((*get_object_class(global.get())).flags & JSCLASS_DOM_GLOBAL) != 0); /* Check to see whether the interface objects are already installed */ let proto_or_iface_array = get_proto_or_iface_array(global.get()); rval.set((*proto_or_iface_array)[%(id)s as usize]); if !rval.get().is_null() { return; } CreateInterfaceObjects(cx, global, proto_or_iface_array); rval.set((*proto_or_iface_array)[%(id)s as usize]); assert!(!rval.get().is_null()); """ % {"id": self.id}) class CGGetProtoObjectMethod(CGGetPerInterfaceObject): """ A method for getting the interface prototype object. """ def __init__(self, descriptor): CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject", "PrototypeList::ID", pub=True) def definition_body(self): return CGList([ CGGeneric("""\ /* Get the interface prototype object for this class. This will create the object as needed. */"""), CGGetPerInterfaceObject.definition_body(self), ]) class CGGetConstructorObjectMethod(CGGetPerInterfaceObject): """ A method for getting the interface constructor object. """ def __init__(self, descriptor): CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject", "PrototypeList::Constructor", pub=True) def definition_body(self): return CGList([ CGGeneric("""\ /* Get the interface object for this class. This will create the object as needed. */"""), CGGetPerInterfaceObject.definition_body(self), ]) class CGDefineProxyHandler(CGAbstractMethod): """ A method to create and cache the proxy trap for a given interface. """ def __init__(self, descriptor): assert descriptor.proxy CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler', '*const libc::c_void', [], pub=True, unsafe=True) def define(self): return CGAbstractMethod.define(self) def definition_body(self): customDefineProperty = 'proxyhandler::define_property' if self.descriptor.operations['IndexedSetter'] or self.descriptor.operations['NamedSetter']: customDefineProperty = 'defineProperty' customDelete = 'proxyhandler::delete' if self.descriptor.operations['NamedDeleter']: customDelete = 'delete' getOwnEnumerablePropertyKeys = "own_property_keys" if self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"): getOwnEnumerablePropertyKeys = "getOwnEnumerablePropertyKeys" args = { "defineProperty": customDefineProperty, "delete": customDelete, "getOwnEnumerablePropertyKeys": getOwnEnumerablePropertyKeys, "trace": TRACE_HOOK_NAME, "finalize": FINALIZE_HOOK_NAME, } return CGGeneric("""\ let traps = ProxyTraps { enter: None, getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor), defineProperty: Some(%(defineProperty)s), ownPropertyKeys: Some(own_property_keys), delete_: Some(%(delete)s), enumerate: None, getPrototypeIfOrdinary: Some(proxyhandler::get_prototype_if_ordinary), preventExtensions: Some(proxyhandler::prevent_extensions), isExtensible: Some(proxyhandler::is_extensible), has: None, get: Some(get), set: None, call: None, construct: None, getPropertyDescriptor: Some(get_property_descriptor), hasOwn: Some(hasOwn), getOwnEnumerablePropertyKeys: Some(%(getOwnEnumerablePropertyKeys)s), nativeCall: None, hasInstance: None, objectClassIs: None, className: Some(className), fun_toString: None, boxedValue_unbox: None, defaultValue: None, trace: Some(%(trace)s), finalize: Some(%(finalize)s), objectMoved: None, isCallable: None, isConstructor: None, }; CreateProxyHandler(&traps, Class.as_void_ptr())\ """ % args) class CGDefineDOMInterfaceMethod(CGAbstractMethod): """ A method for resolve hooks to try to lazily define the interface object for a given interface. """ def __init__(self, descriptor): assert descriptor.interface.hasInterfaceObject() args = [ Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'), ] CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface', 'void', args, pub=True, unsafe=True) def define(self): return CGAbstractMethod.define(self) def definition_body(self): if self.descriptor.interface.isCallback() or self.descriptor.interface.isNamespace(): function = "GetConstructorObject" else: function = "GetProtoObject" return CGGeneric("""\ assert!(!global.get().is_null()); if !ConstructorEnabled(cx, global) { return; } rooted!(in(cx) let mut proto = ptr::null_mut::<JSObject>()); %s(cx, global, proto.handle_mut()); assert!(!proto.is_null());""" % (function,)) def needCx(returnType, arguments, considerTypes): return (considerTypes and (typeNeedsCx(returnType, True) or any(typeNeedsCx(a.type) for a in arguments))) class CGCallGenerator(CGThing): """ A class to generate an actual call to a C++ object. Assumes that the C++ object is stored in a variable whose name is given by the |object| argument. errorResult should be a string for the value to return in case of an exception from the native code, or None if no error reporting is needed. """ def __init__(self, errorResult, arguments, argsPre, returnType, extendedAttributes, descriptor, nativeMethodName, static, object="this", hasCEReactions=False): CGThing.__init__(self) assert errorResult is None or isinstance(errorResult, str) isFallible = errorResult is not None result = getRetvalDeclarationForType(returnType, descriptor) if isFallible: result = CGWrapper(result, pre="Result<", post=", Error>") args = CGList([CGGeneric(arg) for arg in argsPre], ", ") for (a, name) in arguments: # XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer if a.type.isDictionary() and not type_needs_tracing(a.type): name = "&" + name args.append(CGGeneric(name)) needsCx = needCx(returnType, (a for (a, _) in arguments), True) if "cx" not in argsPre and needsCx: args.prepend(CGGeneric("cx")) # Build up our actual call self.cgRoot = CGList([], "\n") call = CGGeneric(nativeMethodName) if static: call = CGWrapper(call, pre="%s::" % MakeNativeName(descriptor.interface.identifier.name)) else: call = CGWrapper(call, pre="%s." % object) call = CGList([call, CGWrapper(args, pre="(", post=")")]) if hasCEReactions: self.cgRoot.append(CGGeneric("push_new_element_queue();\n")) self.cgRoot.append(CGList([ CGGeneric("let result: "), result, CGGeneric(" = "), call, CGGeneric(";"), ])) if hasCEReactions: self.cgRoot.append(CGGeneric("pop_current_element_queue();\n")) if isFallible: if static: glob = "global.upcast::<GlobalScope>()" else: glob = "&this.global()" self.cgRoot.append(CGGeneric( "let result = match result {\n" " Ok(result) => result,\n" " Err(e) => {\n" " throw_dom_exception(cx, %s, e);\n" " return%s;\n" " },\n" "};" % (glob, errorResult))) def define(self): return self.cgRoot.define() class CGPerSignatureCall(CGThing): """ This class handles the guts of generating code for a particular call signature. A call signature consists of four things: 1) A return type, which can be None to indicate that there is no actual return value (e.g. this is an attribute setter) or an IDLType if there's an IDL type involved (including |void|). 2) An argument list, which is allowed to be empty. 3) A name of a native method to call. 4) Whether or not this method is static. We also need to know whether this is a method or a getter/setter to do error reporting correctly. The idlNode parameter can be either a method or an attr. We can query |idlNode.identifier| in both cases, so we can be agnostic between the two. """ # XXXbz For now each entry in the argument list is either an # IDLArgument or a FakeArgument, but longer-term we may want to # have ways of flagging things like JSContext* or optional_argc in # there. def __init__(self, returnType, argsPre, arguments, nativeMethodName, static, descriptor, idlNode, argConversionStartsAt=0, getter=False, setter=False): CGThing.__init__(self) self.returnType = returnType self.descriptor = descriptor self.idlNode = idlNode self.extendedAttributes = descriptor.getExtendedAttributes(idlNode, getter=getter, setter=setter) self.argsPre = argsPre self.arguments = arguments self.argCount = len(arguments) cgThings = [] cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgs(), self.getArgc(), self.descriptor, invalidEnumValueFatal=not setter) for i in range(argConversionStartsAt, self.argCount)]) errorResult = None if self.isFallible(): errorResult = " false" if idlNode.isMethod() and idlNode.isMaplikeOrSetlikeOrIterableMethod(): if idlNode.maplikeOrSetlikeOrIterable.isMaplike() or \ idlNode.maplikeOrSetlikeOrIterable.isSetlike(): raise TypeError('Maplike/Setlike methods are not supported yet') else: cgThings.append(CGIterableMethodGenerator(descriptor, idlNode.maplikeOrSetlikeOrIterable, idlNode.identifier.name)) else: hasCEReactions = idlNode.getExtendedAttribute("CEReactions") cgThings.append(CGCallGenerator( errorResult, self.getArguments(), self.argsPre, returnType, self.extendedAttributes, descriptor, nativeMethodName, static, hasCEReactions=hasCEReactions)) self.cgRoot = CGList(cgThings, "\n") def getArgs(self): return "args" if self.argCount > 0 else "" def getArgc(self): return "argc" def getArguments(self): return [(a, process_arg("arg" + str(i), a)) for (i, a) in enumerate(self.arguments)] def isFallible(self): return 'infallible' not in self.extendedAttributes def wrap_return_value(self): return wrapForType('MutableHandleValue::from_raw(args.rval())') def define(self): return (self.cgRoot.define() + "\n" + self.wrap_return_value()) class CGSwitch(CGList): """ A class to generate code for a switch statement. Takes three constructor arguments: an expression, a list of cases, and an optional default. Each case is a CGCase. The default is a CGThing for the body of the default case, if any. """ def __init__(self, expression, cases, default=None): CGList.__init__(self, [CGIndenter(c) for c in cases], "\n") self.prepend(CGWrapper(CGGeneric(expression), pre="match ", post=" {")) if default is not None: self.append( CGIndenter( CGWrapper( CGIndenter(default), pre="_ => {\n", post="\n}" ) ) ) self.append(CGGeneric("}")) class CGCase(CGList): """ A class to generate code for a case statement. Takes three constructor arguments: an expression, a CGThing for the body (allowed to be None if there is no body), and an optional argument (defaulting to False) for whether to fall through. """ def __init__(self, expression, body, fallThrough=False): CGList.__init__(self, [], "\n") self.append(CGWrapper(CGGeneric(expression), post=" => {")) bodyList = CGList([body], "\n") if fallThrough: raise TypeError("fall through required but unsupported") # bodyList.append(CGGeneric('panic!("fall through unsupported"); /* Fall through */')) self.append(CGIndenter(bodyList)) self.append(CGGeneric("}")) class CGGetterCall(CGPerSignatureCall): """ A class to generate a native object getter call for a particular IDL getter. """ def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr): CGPerSignatureCall.__init__(self, returnType, argsPre, [], nativeMethodName, attr.isStatic(), descriptor, attr, getter=True) class FakeArgument(): """ A class that quacks like an IDLArgument. This is used to make setters look like method calls or for special operations. """ def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False): self.type = type self.optional = False self.variadic = False self.defaultValue = None self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull self.treatNullAs = interfaceMember.treatNullAs self.enforceRange = False self.clamp = False def allowTreatNonCallableAsNull(self): return self._allowTreatNonObjectAsNull class CGSetterCall(CGPerSignatureCall): """ A class to generate a native object setter call for a particular IDL setter. """ def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr): CGPerSignatureCall.__init__(self, None, argsPre, [FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)], nativeMethodName, attr.isStatic(), descriptor, attr, setter=True) def wrap_return_value(self): # We have no return value return "\nreturn true;" def getArgc(self): return "1" class CGAbstractStaticBindingMethod(CGAbstractMethod): """ Common class to generate the JSNatives for all our static methods, getters and setters. This will generate the function declaration and unwrap the global object. Subclasses are expected to override the generate_code function to do the rest of the work. This function should return a CGThing which is already properly indented. """ def __init__(self, descriptor, name): args = [ Argument('*mut JSContext', 'cx'), Argument('libc::c_uint', 'argc'), Argument('*mut JSVal', 'vp'), ] CGAbstractMethod.__init__(self, descriptor, name, "bool", args, extern=True) self.exposureSet = descriptor.interface.exposureSet def definition_body(self): preamble = """\ let args = CallArgs::from_vp(vp, argc); let global = GlobalScope::from_object(args.callee()); """ if len(self.exposureSet) == 1: preamble += ("let global = DomRoot::downcast::<dom::types::%s>(global).unwrap();\n" % list(self.exposureSet)[0]) return CGList([CGGeneric(preamble), self.generate_code()]) def generate_code(self): raise NotImplementedError # Override me! class CGSpecializedMethod(CGAbstractExternMethod): """ A class for generating the C++ code for a specialized method that the JIT can call with lower overhead. """ def __init__(self, descriptor, method): self.method = method name = method.identifier.name args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'), Argument('*const %s' % descriptor.concreteType, 'this'), Argument('*const JSJitMethodCallArgs', 'args')] CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args) def definition_body(self): nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, self.method) return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(), self.descriptor, self.method), pre="let this = &*this;\n" "let args = &*args;\n" "let argc = args.argc_;\n") @staticmethod def makeNativeName(descriptor, method): name = method.identifier.name nativeName = descriptor.binaryNameFor(name) if nativeName == name: nativeName = descriptor.internalNameFor(name) return MakeNativeName(nativeName) class CGStaticMethod(CGAbstractStaticBindingMethod): """ A class for generating the Rust code for an IDL static method. """ def __init__(self, descriptor, method): self.method = method name = method.identifier.name CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, self.method) setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n") call = CGMethodCall(["&global"], nativeName, True, self.descriptor, self.method) return CGList([setupArgs, call]) class CGSpecializedGetter(CGAbstractExternMethod): """ A class for generating the code for a specialized attribute getter that the JIT can call with lower overhead. """ def __init__(self, descriptor, attr): self.attr = attr name = 'get_' + descriptor.internalNameFor(attr.identifier.name) args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'), Argument('*const %s' % descriptor.concreteType, 'this'), Argument('JSJitGetterCallArgs', 'args')] CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args) def definition_body(self): nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, self.attr) return CGWrapper(CGGetterCall([], self.attr.type, nativeName, self.descriptor, self.attr), pre="let this = &*this;\n") @staticmethod def makeNativeName(descriptor, attr): name = attr.identifier.name nativeName = descriptor.binaryNameFor(name) if nativeName == name: nativeName = descriptor.internalNameFor(name) nativeName = MakeNativeName(nativeName) infallible = ('infallible' in descriptor.getExtendedAttributes(attr, getter=True)) if attr.type.nullable() or not infallible: return "Get" + nativeName return nativeName class CGStaticGetter(CGAbstractStaticBindingMethod): """ A class for generating the C++ code for an IDL static attribute getter. """ def __init__(self, descriptor, attr): self.attr = attr name = 'get_' + attr.identifier.name CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, self.attr) setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n") call = CGGetterCall(["&global"], self.attr.type, nativeName, self.descriptor, self.attr) return CGList([setupArgs, call]) class CGSpecializedSetter(CGAbstractExternMethod): """ A class for generating the code for a specialized attribute setter that the JIT can call with lower overhead. """ def __init__(self, descriptor, attr): self.attr = attr name = 'set_' + descriptor.internalNameFor(attr.identifier.name) args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'obj'), Argument('*const %s' % descriptor.concreteType, 'this'), Argument('JSJitSetterCallArgs', 'args')] CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args) def definition_body(self): nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, self.attr) return CGWrapper(CGSetterCall([], self.attr.type, nativeName, self.descriptor, self.attr), pre="let this = &*this;\n") @staticmethod def makeNativeName(descriptor, attr): name = attr.identifier.name nativeName = descriptor.binaryNameFor(name) if nativeName == name: nativeName = descriptor.internalNameFor(name) return "Set" + MakeNativeName(nativeName) class CGStaticSetter(CGAbstractStaticBindingMethod): """ A class for generating the C++ code for an IDL static attribute setter. """ def __init__(self, descriptor, attr): self.attr = attr name = 'set_' + attr.identifier.name CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, self.attr) checkForArg = CGGeneric( "let args = CallArgs::from_vp(vp, argc);\n" "if argc == 0 {\n" " throw_type_error(cx, \"Not enough arguments to %s setter.\");\n" " return false;\n" "}" % self.attr.identifier.name) call = CGSetterCall(["&global"], self.attr.type, nativeName, self.descriptor, self.attr) return CGList([checkForArg, call]) class CGSpecializedForwardingSetter(CGSpecializedSetter): """ A class for generating the code for an IDL attribute forwarding setter. """ def __init__(self, descriptor, attr): CGSpecializedSetter.__init__(self, descriptor, attr) def definition_body(self): attrName = self.attr.identifier.name forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0] # JS_GetProperty and JS_SetProperty can only deal with ASCII assert all(ord(c) < 128 for c in attrName) assert all(ord(c) < 128 for c in forwardToAttrName) return CGGeneric("""\ rooted!(in(cx) let mut v = UndefinedValue()); if !JS_GetProperty(cx, obj, %s as *const u8 as *const libc::c_char, v.handle_mut()) { return false; } if !v.is_object() { throw_type_error(cx, "Value.%s is not an object."); return false; } rooted!(in(cx) let target_obj = v.to_object()); JS_SetProperty(cx, target_obj.handle(), %s as *const u8 as *const libc::c_char, HandleValue::from_raw(args.get(0))) """ % (str_to_const_array(attrName), attrName, str_to_const_array(forwardToAttrName))) class CGSpecializedReplaceableSetter(CGSpecializedSetter): """ A class for generating the code for an IDL replaceable attribute setter. """ def __init__(self, descriptor, attr): CGSpecializedSetter.__init__(self, descriptor, attr) def definition_body(self): assert self.attr.readonly name = str_to_const_array(self.attr.identifier.name) # JS_DefineProperty can only deal with ASCII. assert all(ord(c) < 128 for c in name) return CGGeneric("""\ JS_DefineProperty(cx, obj, %s as *const u8 as *const libc::c_char, HandleValue::from_raw(args.get(0)), JSPROP_ENUMERATE as u32)""" % name) class CGMemberJITInfo(CGThing): """ A class for generating the JITInfo for a property that points to our specialized getter and setter. """ def __init__(self, descriptor, member): self.member = member self.descriptor = descriptor def defineJitInfo(self, infoName, opName, opType, infallible, movable, aliasSet, alwaysInSlot, lazilyInSlot, slotIndex, returnTypes, args): """ aliasSet is a JSJitInfo_AliasSet value, without the "JSJitInfo_AliasSet::" bit. args is None if we don't want to output argTypes for some reason (e.g. we have overloads or we're not a method) and otherwise an iterable of the arguments for this method. """ assert not movable or aliasSet != "AliasEverything" # Can't move write-aliasing things assert not alwaysInSlot or movable # Things always in slots had better be movable def jitInfoInitializer(isTypedMethod): initializer = fill( """ JSJitInfo { call: ${opName} as *const os::raw::c_void, protoID: PrototypeList::ID::${name} as u16, depth: ${depth}, _bitfield_1: new_jsjitinfo_bitfield_1!( JSJitInfo_OpType::${opType} as u8, JSJitInfo_AliasSet::${aliasSet} as u8, JSValueType::${returnType} as u8, ${isInfallible}, ${isMovable}, ${isEliminatable}, ${isAlwaysInSlot}, ${isLazilyCachedInSlot}, ${isTypedMethod}, ${slotIndex}, ), } """, opName=opName, name=self.descriptor.name, depth=self.descriptor.interface.inheritanceDepth(), opType=opType, aliasSet=aliasSet, returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes, ""), isInfallible=toStringBool(infallible), isMovable=toStringBool(movable), # FIXME(nox): https://github.com/servo/servo/issues/10991 isEliminatable=toStringBool(False), isAlwaysInSlot=toStringBool(alwaysInSlot), isLazilyCachedInSlot=toStringBool(lazilyInSlot), isTypedMethod=toStringBool(isTypedMethod), slotIndex=slotIndex) return initializer.rstrip() if args is not None: argTypes = "%s_argTypes" % infoName args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args] args.append("JSJitInfo_ArgType::ArgTypeListEnd as i32") argTypesDecl = ( "const %s: [i32; %d] = [ %s ];\n" % (argTypes, len(args), ", ".join(args))) return fill( """ $*{argTypesDecl} const ${infoName}: JSTypedMethodJitInfo = JSTypedMethodJitInfo { base: ${jitInfo}, argTypes: &${argTypes} as *const _ as *const JSJitInfo_ArgType, }; """, argTypesDecl=argTypesDecl, infoName=infoName, jitInfo=indent(jitInfoInitializer(True)), argTypes=argTypes) return ("\n" "const %s: JSJitInfo = %s;\n" % (infoName, jitInfoInitializer(False))) def define(self): if self.member.isAttr(): internalMemberName = self.descriptor.internalNameFor(self.member.identifier.name) getterinfo = ("%s_getterinfo" % internalMemberName) getter = ("get_%s" % internalMemberName) getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True) movable = self.mayBeMovable() and getterinfal aliasSet = self.aliasSet() isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot") if self.member.slotIndices is not None: assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached") isLazilyCachedInSlot = not isAlwaysInSlot slotIndex = memberReservedSlot(self.member) # noqa:FIXME: memberReservedSlot is not defined # We'll statically assert that this is not too big in # CGUpdateMemberSlotsMethod, in the case when # isAlwaysInSlot is true. else: isLazilyCachedInSlot = False slotIndex = "0" result = self.defineJitInfo(getterinfo, getter, "Getter", getterinfal, movable, aliasSet, isAlwaysInSlot, isLazilyCachedInSlot, slotIndex, [self.member.type], None) if (not self.member.readonly or self.member.getExtendedAttribute("PutForwards") or self.member.getExtendedAttribute("Replaceable")): setterinfo = ("%s_setterinfo" % internalMemberName) setter = ("set_%s" % internalMemberName) # Setters are always fallible, since they have to do a typed unwrap. result += self.defineJitInfo(setterinfo, setter, "Setter", False, False, "AliasEverything", False, False, "0", [BuiltinTypes[IDLBuiltinType.Types.void]], None) return result if self.member.isMethod(): methodinfo = ("%s_methodinfo" % self.member.identifier.name) method = ("%s" % self.member.identifier.name) # Methods are infallible if they are infallible, have no arguments # to unwrap, and have a return type that's infallible to wrap up for # return. sigs = self.member.signatures() if len(sigs) != 1: # Don't handle overloading. If there's more than one signature, # one of them must take arguments. methodInfal = False args = None movable = False else: sig = sigs[0] # For methods that affect nothing, it's OK to set movable to our # notion of infallible on the C++ side, without considering # argument conversions, since argument conversions that can # reliably throw would be effectful anyway and the jit doesn't # move effectful things. hasInfallibleImpl = "infallible" in self.descriptor.getExtendedAttributes(self.member) movable = self.mayBeMovable() and hasInfallibleImpl # XXXbz can we move the smarts about fallibility due to arg # conversions into the JIT, using our new args stuff? if (len(sig[1]) != 0): # We have arguments or our return-value boxing can fail methodInfal = False else: methodInfal = hasInfallibleImpl # For now, only bother to output args if we're side-effect-free. if self.member.affects == "Nothing": args = sig[1] else: args = None aliasSet = self.aliasSet() result = self.defineJitInfo(methodinfo, method, "Method", methodInfal, movable, aliasSet, False, False, "0", [s[0] for s in sigs], args) return result raise TypeError("Illegal member type to CGPropertyJITInfo") def mayBeMovable(self): """ Returns whether this attribute or method may be movable, just based on Affects/DependsOn annotations. """ affects = self.member.affects dependsOn = self.member.dependsOn assert affects in IDLInterfaceMember.AffectsValues assert dependsOn in IDLInterfaceMember.DependsOnValues # Things that are DependsOn=DeviceState are not movable, because we # don't want them coalesced with each other or loop-hoisted, since # their return value can change even if nothing is going on from our # point of view. return (affects == "Nothing" and (dependsOn != "Everything" and dependsOn != "DeviceState")) def aliasSet(self): """Returns the alias set to store in the jitinfo. This may not be the effective alias set the JIT uses, depending on whether we have enough information about our args to allow the JIT to prove that effectful argument conversions won't happen. """ dependsOn = self.member.dependsOn assert dependsOn in IDLInterfaceMember.DependsOnValues if dependsOn == "Nothing" or dependsOn == "DeviceState": assert self.member.affects == "Nothing" return "AliasNone" if dependsOn == "DOMState": assert self.member.affects == "Nothing" return "AliasDOMSets" return "AliasEverything" @staticmethod def getJSReturnTypeTag(t): if t.nullable(): # Sometimes it might return null, sometimes not return "JSVAL_TYPE_UNKNOWN" if t.isVoid(): # No return, every time return "JSVAL_TYPE_UNDEFINED" if t.isSequence(): return "JSVAL_TYPE_OBJECT" if t.isRecord(): return "JSVAL_TYPE_OBJECT" if t.isPromise(): return "JSVAL_TYPE_OBJECT" if t.isGeckoInterface(): return "JSVAL_TYPE_OBJECT" if t.isString(): return "JSVAL_TYPE_STRING" if t.isEnum(): return "JSVAL_TYPE_STRING" if t.isCallback(): return "JSVAL_TYPE_OBJECT" if t.isAny(): # The whole point is to return various stuff return "JSVAL_TYPE_UNKNOWN" if t.isObject(): return "JSVAL_TYPE_OBJECT" if t.isSpiderMonkeyInterface(): return "JSVAL_TYPE_OBJECT" if t.isUnion(): u = t.unroll() if u.hasNullableType: # Might be null or not return "JSVAL_TYPE_UNKNOWN" return reduce(CGMemberJITInfo.getSingleReturnType, u.flatMemberTypes, "") if t.isDictionary(): return "JSVAL_TYPE_OBJECT" if t.isDate(): return "JSVAL_TYPE_OBJECT" if not t.isPrimitive(): raise TypeError("No idea what type " + str(t) + " is.") tag = t.tag() if tag == IDLType.Tags.bool: return "JSVAL_TYPE_BOOLEAN" if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, IDLType.Tags.uint16, IDLType.Tags.int32]: return "JSVAL_TYPE_INT32" if tag in [IDLType.Tags.int64, IDLType.Tags.uint64, IDLType.Tags.unrestricted_float, IDLType.Tags.float, IDLType.Tags.unrestricted_double, IDLType.Tags.double]: # These all use JS_NumberValue, which can return int or double. # But TI treats "double" as meaning "int or double", so we're # good to return JSVAL_TYPE_DOUBLE here. return "JSVAL_TYPE_DOUBLE" if tag != IDLType.Tags.uint32: raise TypeError("No idea what type " + str(t) + " is.") # uint32 is sometimes int and sometimes double. return "JSVAL_TYPE_DOUBLE" @staticmethod def getSingleReturnType(existingType, t): type = CGMemberJITInfo.getJSReturnTypeTag(t) if existingType == "": # First element of the list; just return its type return type if type == existingType: return existingType if ((type == "JSVAL_TYPE_DOUBLE" and existingType == "JSVAL_TYPE_INT32") or (existingType == "JSVAL_TYPE_DOUBLE" and type == "JSVAL_TYPE_INT32")): # Promote INT32 to DOUBLE as needed return "JSVAL_TYPE_DOUBLE" # Different types return "JSVAL_TYPE_UNKNOWN" @staticmethod def getJSArgType(t): assert not t.isVoid() if t.nullable(): # Sometimes it might return null, sometimes not return "JSJitInfo_ArgType::Null as i32 | %s" % CGMemberJITInfo.getJSArgType(t.inner) if t.isSequence(): return "JSJitInfo_ArgType::Object as i32" if t.isGeckoInterface(): return "JSJitInfo_ArgType::Object as i32" if t.isString(): return "JSJitInfo_ArgType::String as i32" if t.isEnum(): return "JSJitInfo_ArgType::String as i32" if t.isCallback(): return "JSJitInfo_ArgType::Object as i32" if t.isAny(): # The whole point is to return various stuff return "JSJitInfo_ArgType::Any as i32" if t.isObject(): return "JSJitInfo_ArgType::Object as i32" if t.isSpiderMonkeyInterface(): return "JSJitInfo_ArgType::Object as i32" if t.isUnion(): u = t.unroll() type = "JSJitInfo::Null as i32" if u.hasNullableType else "" return reduce(CGMemberJITInfo.getSingleArgType, u.flatMemberTypes, type) if t.isDictionary(): return "JSJitInfo_ArgType::Object as i32" if t.isDate(): return "JSJitInfo_ArgType::Object as i32" if not t.isPrimitive(): raise TypeError("No idea what type " + str(t) + " is.") tag = t.tag() if tag == IDLType.Tags.bool: return "JSJitInfo_ArgType::Boolean as i32" if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, IDLType.Tags.uint16, IDLType.Tags.int32]: return "JSJitInfo_ArgType::Integer as i32" if tag in [IDLType.Tags.int64, IDLType.Tags.uint64, IDLType.Tags.unrestricted_float, IDLType.Tags.float, IDLType.Tags.unrestricted_double, IDLType.Tags.double]: # These all use JS_NumberValue, which can return int or double. # But TI treats "double" as meaning "int or double", so we're # good to return JSVAL_TYPE_DOUBLE here. return "JSJitInfo_ArgType::Double as i32" if tag != IDLType.Tags.uint32: raise TypeError("No idea what type " + str(t) + " is.") # uint32 is sometimes int and sometimes double. return "JSJitInfo_ArgType::Double as i32" @staticmethod def getSingleArgType(existingType, t): type = CGMemberJITInfo.getJSArgType(t) if existingType == "": # First element of the list; just return its type return type if type == existingType: return existingType return "%s | %s" % (existingType, type) def getEnumValueName(value): # Some enum values can be empty strings. Others might have weird # characters in them. Deal with the former by returning "_empty", # deal with possible name collisions from that by throwing if the # enum value is actually "_empty", and throw on any value # containing non-ASCII chars for now. Replace all chars other than # [0-9A-Za-z_] with '_'. if re.match("[^\x20-\x7E]", value): raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters') if re.match("^[0-9]", value): raise SyntaxError('Enum value "' + value + '" starts with a digit') value = re.sub(r'[^0-9A-Za-z_]', '_', value) if re.match("^_[A-Z]|__", value): raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec') if value == "_empty": raise SyntaxError('"_empty" is not an IDL enum value we support yet') if value == "": return "_empty" return MakeNativeName(value) class CGEnum(CGThing): def __init__(self, enum): CGThing.__init__(self) ident = enum.identifier.name decl = """\ #[repr(usize)] #[derive(Copy, Clone, Debug, JSTraceable, MallocSizeOf, PartialEq)] pub enum %s { %s } """ % (ident, ",\n ".join(map(getEnumValueName, enum.values()))) pairs = ",\n ".join(['("%s", super::%s::%s)' % (val, ident, getEnumValueName(val)) for val in enum.values()]) inner = string.Template("""\ use crate::dom::bindings::conversions::ToJSValConvertible; use js::jsapi::JSContext; use js::rust::MutableHandleValue; use js::jsval::JSVal; pub const pairs: &'static [(&'static str, super::${ident})] = &[ ${pairs}, ]; impl super::${ident} { pub fn as_str(&self) -> &'static str { pairs[*self as usize].0 } } impl Default for super::${ident} { fn default() -> super::${ident} { pairs[0].1 } } impl ToJSValConvertible for super::${ident} { unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { pairs[*self as usize].0.to_jsval(cx, rval); } } """).substitute({ 'ident': ident, 'pairs': pairs }) self.cgRoot = CGList([ CGGeneric(decl), CGNamespace.build([ident + "Values"], CGIndenter(CGGeneric(inner)), public=True), ]) def define(self): return self.cgRoot.define() def convertConstIDLValueToRust(value): tag = value.type.tag() if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, IDLType.Tags.uint16, IDLType.Tags.int32, IDLType.Tags.uint32, IDLType.Tags.int64, IDLType.Tags.uint64, IDLType.Tags.unrestricted_float, IDLType.Tags.float, IDLType.Tags.unrestricted_double, IDLType.Tags.double]: return str(value.value) if tag == IDLType.Tags.bool: return toStringBool(value.value) raise TypeError("Const value of unhandled type: " + value.type) class CGConstant(CGThing): def __init__(self, constant): CGThing.__init__(self) self.constant = constant def define(self): name = self.constant.identifier.name value = convertConstIDLValueToRust(self.constant.value) tag = self.constant.value.type.tag() const_type = builtinNames[self.constant.value.type.tag()] # Finite<f32> or Finite<f64> cannot be used un a constant declaration. # Remote the Finite type from restricted float and double tag declarations. if tag == IDLType.Tags.float: const_type = "f32" elif tag == IDLType.Tags.double: const_type = "f64" return "pub const %s: %s = %s;\n" % (name, const_type, value) def getUnionTypeTemplateVars(type, descriptorProvider): if type.isGeckoInterface(): name = type.inner.identifier.name typeName = descriptorProvider.getDescriptor(name).returnType elif type.isEnum(): name = type.inner.identifier.name typeName = name elif type.isDictionary(): name = type.name typeName = name elif type.isSequence() or type.isRecord(): name = type.name inner = getUnionTypeTemplateVars(innerContainerType(type), descriptorProvider) typeName = wrapInNativeContainerType(type, CGGeneric(inner["typeName"])).define() elif type.isByteString(): name = type.name typeName = "ByteString" elif type.isDOMString(): name = type.name typeName = "DOMString" elif type.isUSVString(): name = type.name typeName = "USVString" elif type.isPrimitive(): name = type.name typeName = builtinNames[type.tag()] elif type.isObject(): name = type.name typeName = "Heap<*mut JSObject>" elif is_typed_array(type): name = type.name typeName = "typedarray::Heap" + name else: raise TypeError("Can't handle %s in unions yet" % type) info = getJSToNativeConversionInfo( type, descriptorProvider, failureCode="return Ok(None);", exceptionCode='return Err(());', isDefinitelyObject=True, isMember="Union") template = info.template jsConversion = string.Template(template).substitute({ "val": "value", }) jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))") return { "name": name, "typeName": typeName, "jsConversion": jsConversion, } class CGUnionStruct(CGThing): def __init__(self, type, descriptorProvider): assert not type.nullable() assert not type.hasNullableType CGThing.__init__(self) self.type = type self.descriptorProvider = descriptorProvider def membersNeedTracing(self): for t in self.type.flatMemberTypes: if type_needs_tracing(t): return True return False def define(self): templateVars = map(lambda t: (getUnionTypeTemplateVars(t, self.descriptorProvider), type_needs_tracing(t)), self.type.flatMemberTypes) enumValues = [ " %s(%s)," % (v["name"], "RootedTraceableBox<%s>" % v["typeName"] if trace else v["typeName"]) for (v, trace) in templateVars ] enumConversions = [ " %s::%s(ref inner) => inner.to_jsval(cx, rval)," % (self.type, v["name"]) for (v, _) in templateVars ] return ("""\ #[derive(JSTraceable)] pub enum %s { %s } impl ToJSValConvertible for %s { unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { match *self { %s } } } """) % (self.type, "\n".join(enumValues), self.type, "\n".join(enumConversions)) class CGUnionConversionStruct(CGThing): def __init__(self, type, descriptorProvider): assert not type.nullable() assert not type.hasNullableType CGThing.__init__(self) self.type = type self.descriptorProvider = descriptorProvider def membersNeedTracing(self): for t in self.type.flatMemberTypes: if type_needs_tracing(t): return True return False def from_jsval(self): memberTypes = self.type.flatMemberTypes names = [] conversions = [] def get_name(memberType): if self.type.isGeckoInterface(): return memberType.inner.identifier.name return memberType.name def get_match(name): return ( "match %s::TryConvertTo%s(cx, value) {\n" " Err(_) => return Err(()),\n" " Ok(Some(value)) => return Ok(ConversionResult::Success(%s::%s(value))),\n" " Ok(None) => (),\n" "}\n") % (self.type, name, self.type, name) interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes) if len(interfaceMemberTypes) > 0: typeNames = [get_name(memberType) for memberType in interfaceMemberTypes] interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames) names.extend(typeNames) else: interfaceObject = None arrayObjectMemberTypes = filter(lambda t: t.isSequence(), memberTypes) if len(arrayObjectMemberTypes) > 0: assert len(arrayObjectMemberTypes) == 1 typeName = arrayObjectMemberTypes[0].name arrayObject = CGGeneric(get_match(typeName)) names.append(typeName) else: arrayObject = None dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes) if len(dateObjectMemberTypes) > 0: assert len(dateObjectMemberTypes) == 1 raise TypeError("Can't handle dates in unions.") else: dateObject = None callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes) if len(callbackMemberTypes) > 0: assert len(callbackMemberTypes) == 1 raise TypeError("Can't handle callbacks in unions.") else: callbackObject = None dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes) if len(dictionaryMemberTypes) > 0: assert len(dictionaryMemberTypes) == 1 typeName = dictionaryMemberTypes[0].name dictionaryObject = CGGeneric(get_match(typeName)) names.append(typeName) else: dictionaryObject = None objectMemberTypes = filter(lambda t: t.isObject(), memberTypes) if len(objectMemberTypes) > 0: assert len(objectMemberTypes) == 1 typeName = objectMemberTypes[0].name object = CGGeneric(get_match(typeName)) names.append(typeName) else: object = None mozMapMemberTypes = filter(lambda t: t.isRecord(), memberTypes) if len(mozMapMemberTypes) > 0: assert len(mozMapMemberTypes) == 1 typeName = mozMapMemberTypes[0].name mozMapObject = CGGeneric(get_match(typeName)) names.append(typeName) else: mozMapObject = None hasObjectTypes = object or interfaceObject or arrayObject or dateObject or mozMapObject if hasObjectTypes: # "object" is not distinguishable from other types assert not object or not (interfaceObject or arrayObject or dateObject or callbackObject or mozMapObject) templateBody = CGList([], "\n") if object: templateBody.append(object) if interfaceObject: templateBody.append(interfaceObject) if arrayObject: templateBody.append(arrayObject) if mozMapObject: templateBody.append(mozMapObject) conversions.append(CGIfWrapper("value.get().is_object()", templateBody)) if dictionaryObject: assert not hasObjectTypes conversions.append(dictionaryObject) stringTypes = [t for t in memberTypes if t.isString() or t.isEnum()] numericTypes = [t for t in memberTypes if t.isNumeric()] booleanTypes = [t for t in memberTypes if t.isBoolean()] if stringTypes or numericTypes or booleanTypes: assert len(stringTypes) <= 1 assert len(numericTypes) <= 1 assert len(booleanTypes) <= 1 def getStringOrPrimitiveConversion(memberType): typename = get_name(memberType) return CGGeneric(get_match(typename)) other = [] stringConversion = map(getStringOrPrimitiveConversion, stringTypes) numericConversion = map(getStringOrPrimitiveConversion, numericTypes) booleanConversion = map(getStringOrPrimitiveConversion, booleanTypes) if stringConversion: if booleanConversion: other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0])) if numericConversion: other.append(CGIfWrapper("value.get().is_number()", numericConversion[0])) other.append(stringConversion[0]) elif numericConversion: if booleanConversion: other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0])) other.append(numericConversion[0]) else: assert booleanConversion other.append(booleanConversion[0]) conversions.append(CGList(other, "\n\n")) conversions.append(CGGeneric( "Ok(ConversionResult::Failure(\"argument could not be converted to any of: %s\".into()))" % ", ".join(names) )) method = CGWrapper( CGIndenter(CGList(conversions, "\n\n")), pre="unsafe fn from_jsval(cx: *mut JSContext,\n" " value: HandleValue,\n" " _option: ())\n" " -> Result<ConversionResult<%s>, ()> {\n" % self.type, post="\n}") return CGWrapper( CGIndenter(CGList([ CGGeneric("type Config = ();"), method, ], "\n")), pre="impl FromJSValConvertible for %s {\n" % self.type, post="\n}") def try_method(self, t): templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider) actualType = templateVars["typeName"] if type_needs_tracing(t): actualType = "RootedTraceableBox<%s>" % actualType returnType = "Result<Option<%s>, ()>" % actualType jsConversion = templateVars["jsConversion"] return CGWrapper( CGIndenter(jsConversion, 4), pre="unsafe fn TryConvertTo%s(cx: *mut JSContext, value: HandleValue) -> %s {\n" % (t.name, returnType), post="\n}") def define(self): from_jsval = self.from_jsval() methods = CGIndenter(CGList([ self.try_method(t) for t in self.type.flatMemberTypes ], "\n\n")) return """ %s impl %s { %s } """ % (from_jsval.define(), self.type, methods.define()) class ClassItem: """ Use with CGClass """ def __init__(self, name, visibility): self.name = name self.visibility = visibility def declare(self, cgClass): assert False def define(self, cgClass): assert False class ClassBase(ClassItem): def __init__(self, name, visibility='pub'): ClassItem.__init__(self, name, visibility) def declare(self, cgClass): return '%s %s' % (self.visibility, self.name) def define(self, cgClass): # Only in the header return '' class ClassMethod(ClassItem): def __init__(self, name, returnType, args, inline=False, static=False, virtual=False, const=False, bodyInHeader=False, templateArgs=None, visibility='public', body=None, breakAfterReturnDecl="\n", unsafe=False, breakAfterSelf="\n", override=False): """ override indicates whether to flag the method as MOZ_OVERRIDE """ assert not override or virtual assert not (override and static) self.returnType = returnType self.args = args self.inline = False self.static = static self.virtual = virtual self.const = const self.bodyInHeader = True self.templateArgs = templateArgs self.body = body self.breakAfterReturnDecl = breakAfterReturnDecl self.breakAfterSelf = breakAfterSelf self.override = override self.unsafe = unsafe ClassItem.__init__(self, name, visibility) def getDecorators(self, declaring): decorators = [] if self.inline: decorators.append('inline') if declaring: if self.static: decorators.append('static') if self.virtual: decorators.append('virtual') if decorators: return ' '.join(decorators) + ' ' return '' def getBody(self): # Override me or pass a string to constructor assert self.body is not None return self.body def declare(self, cgClass): templateClause = '<%s>' % ', '.join(self.templateArgs) \ if self.bodyInHeader and self.templateArgs else '' args = ', '.join([a.declare() for a in self.args]) if self.bodyInHeader: body = CGIndenter(CGGeneric(self.getBody())).define() body = ' {\n' + body + '\n}' else: body = ';' return string.Template( "${decorators}%s" "${visibility}${unsafe}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" % (self.breakAfterReturnDecl, self.breakAfterSelf) ).substitute({ 'templateClause': templateClause, 'decorators': self.getDecorators(True), 'returnType': (" -> %s" % self.returnType) if self.returnType else "", 'name': self.name, 'const': ' const' if self.const else '', 'override': ' MOZ_OVERRIDE' if self.override else '', 'args': args, 'body': body, 'visibility': self.visibility + ' ' if self.visibility != 'priv' else '', 'unsafe': "unsafe " if self.unsafe else "", }) def define(self, cgClass): pass class ClassConstructor(ClassItem): """ Used for adding a constructor to a CGClass. args is a list of Argument objects that are the arguments taken by the constructor. inline should be True if the constructor should be marked inline. bodyInHeader should be True if the body should be placed in the class declaration in the header. visibility determines the visibility of the constructor (public, protected, private), defaults to private. explicit should be True if the constructor should be marked explicit. baseConstructors is a list of strings containing calls to base constructors, defaults to None. body contains a string with the code for the constructor, defaults to empty. """ def __init__(self, args, inline=False, bodyInHeader=False, visibility="priv", explicit=False, baseConstructors=None, body=""): self.args = args self.inline = False self.bodyInHeader = bodyInHeader self.explicit = explicit self.baseConstructors = baseConstructors or [] self.body = body ClassItem.__init__(self, None, visibility) def getDecorators(self, declaring): decorators = [] if self.explicit: decorators.append('explicit') if self.inline and declaring: decorators.append('inline') if decorators: return ' '.join(decorators) + ' ' return '' def getInitializationList(self, cgClass): items = [str(c) for c in self.baseConstructors] for m in cgClass.members: if not m.static: initialize = m.body if initialize: items.append(m.name + "(" + initialize + ")") if len(items) > 0: return '\n : ' + ',\n '.join(items) return '' def getBody(self, cgClass): initializers = [" parent: %s" % str(self.baseConstructors[0])] return (self.body + ( "let mut ret = Rc::new(%s {\n" "%s\n" "});\n" "// Note: callback cannot be moved after calling init.\n" "match Rc::get_mut(&mut ret) {\n" " Some(ref mut callback) => callback.parent.init(%s, %s),\n" " None => unreachable!(),\n" "};\n" "ret") % (cgClass.name, '\n'.join(initializers), self.args[0].name, self.args[1].name)) def declare(self, cgClass): args = ', '.join([a.declare() for a in self.args]) body = ' ' + self.getBody(cgClass) body = stripTrailingWhitespace(body.replace('\n', '\n ')) if len(body) > 0: body += '\n' body = ' {\n' + body + '}' return string.Template("""\ pub unsafe fn ${decorators}new(${args}) -> Rc<${className}>${body} """).substitute({'decorators': self.getDecorators(True), 'className': cgClass.getNameString(), 'args': args, 'body': body}) def define(self, cgClass): if self.bodyInHeader: return '' args = ', '.join([a.define() for a in self.args]) body = ' ' + self.getBody() body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n ')) if len(body) > 0: body += '\n' return string.Template("""\ ${decorators} ${className}::${className}(${args})${initializationList} {${body}} """).substitute({'decorators': self.getDecorators(False), 'className': cgClass.getNameString(), 'args': args, 'initializationList': self.getInitializationList(cgClass), 'body': body}) class ClassMember(ClassItem): def __init__(self, name, type, visibility="priv", static=False, body=None): self.type = type self.static = static self.body = body ClassItem.__init__(self, name, visibility) def declare(self, cgClass): return '%s %s: %s,\n' % (self.visibility, self.name, self.type) def define(self, cgClass): if not self.static: return '' if self.body: body = " = " + self.body else: body = "" return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(), self.name, body) class CGClass(CGThing): def __init__(self, name, bases=[], members=[], constructors=[], destructor=None, methods=[], typedefs=[], enums=[], unions=[], templateArgs=[], templateSpecialization=[], disallowCopyConstruction=False, indent='', decorators='', extradeclarations=''): CGThing.__init__(self) self.name = name self.bases = bases self.members = members self.constructors = constructors # We store our single destructor in a list, since all of our # code wants lists of members. self.destructors = [destructor] if destructor else [] self.methods = methods self.typedefs = typedefs self.enums = enums self.unions = unions self.templateArgs = templateArgs self.templateSpecialization = templateSpecialization self.disallowCopyConstruction = disallowCopyConstruction self.indent = indent self.decorators = decorators self.extradeclarations = extradeclarations def getNameString(self): className = self.name if self.templateSpecialization: className = className + \ '<%s>' % ', '.join([str(a) for a in self.templateSpecialization]) return className def define(self): result = '' if self.templateArgs: templateArgs = [a.declare() for a in self.templateArgs] templateArgs = templateArgs[len(self.templateSpecialization):] result = result + self.indent + 'template <%s>\n' % ','.join([str(a) for a in templateArgs]) if self.templateSpecialization: specialization = \ '<%s>' % ', '.join([str(a) for a in self.templateSpecialization]) else: specialization = '' myself = '' if self.decorators != '': myself += self.decorators + '\n' myself += '%spub struct %s%s' % (self.indent, self.name, specialization) result += myself assert len(self.bases) == 1 # XXjdm Can we support multiple inheritance? result += ' {\n' if self.bases: self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members result += CGIndenter(CGGeneric(self.extradeclarations), len(self.indent)).define() def declareMembers(cgClass, memberList): result = '' for member in memberList: declaration = member.declare(cgClass) declaration = CGIndenter(CGGeneric(declaration)).define() result = result + declaration return result if self.disallowCopyConstruction: class DisallowedCopyConstructor(object): def __init__(self): self.visibility = "private" def declare(self, cgClass): name = cgClass.getNameString() return ("%s(const %s&) MOZ_DELETE;\n" "void operator=(const %s) MOZ_DELETE;\n" % (name, name, name)) disallowedCopyConstructors = [DisallowedCopyConstructor()] else: disallowedCopyConstructors = [] order = [(self.enums, ''), (self.unions, ''), (self.typedefs, ''), (self.members, '')] for (memberList, separator) in order: memberString = declareMembers(self, memberList) if self.indent: memberString = CGIndenter(CGGeneric(memberString), len(self.indent)).define() result = result + memberString result += self.indent + '}\n\n' result += 'impl %s {\n' % self.name order = [(self.constructors + disallowedCopyConstructors, '\n'), (self.destructors, '\n'), (self.methods, '\n)')] for (memberList, separator) in order: memberString = declareMembers(self, memberList) if self.indent: memberString = CGIndenter(CGGeneric(memberString), len(self.indent)).define() result = result + memberString result += "}" return result class CGProxySpecialOperation(CGPerSignatureCall): """ Base class for classes for calling an indexed or named special operation (don't use this directly, use the derived classes below). """ def __init__(self, descriptor, operation): nativeName = MakeNativeName(descriptor.binaryNameFor(operation)) operation = descriptor.operations[operation] assert len(operation.signatures()) == 1 signature = operation.signatures()[0] (returnType, arguments) = signature if operation.isGetter() and not returnType.nullable(): returnType = IDLNullableType(returnType.location, returnType) # We pass len(arguments) as the final argument so that the # CGPerSignatureCall won't do any argument conversion of its own. CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName, False, descriptor, operation, len(arguments)) if operation.isSetter(): # arguments[0] is the index or name of the item that we're setting. argument = arguments[1] info = getJSToNativeConversionInfo( argument.type, descriptor, treatNullAs=argument.treatNullAs, exceptionCode="return false;") template = info.template declType = info.declType templateValues = { "val": "value.handle()", } self.cgRoot.prepend(instantiateJSToNativeConversionTemplate( template, templateValues, declType, argument.identifier.name)) self.cgRoot.prepend(CGGeneric("rooted!(in(cx) let value = desc.value);")) def getArguments(self): args = [(a, process_arg(a.identifier.name, a)) for a in self.arguments] return args def wrap_return_value(self): if not self.idlNode.isGetter() or self.templateValues is None: return "" wrap = CGGeneric(wrapForType(**self.templateValues)) wrap = CGIfWrapper("let Some(result) = result", wrap) return "\n" + wrap.define() class CGProxyIndexedGetter(CGProxySpecialOperation): """ Class to generate a call to an indexed getter. If templateValues is not None the returned value will be wrapped with wrapForType using templateValues. """ def __init__(self, descriptor, templateValues=None): self.templateValues = templateValues CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter') class CGProxyIndexedSetter(CGProxySpecialOperation): """ Class to generate a call to an indexed setter. """ def __init__(self, descriptor): CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter') class CGProxyNamedOperation(CGProxySpecialOperation): """ Class to generate a call to a named operation. """ def __init__(self, descriptor, name): CGProxySpecialOperation.__init__(self, descriptor, name) def define(self): # Our first argument is the id we're getting. argName = self.arguments[0].identifier.name return ("let %s = jsid_to_string(cx, Handle::from_raw(id)).expect(\"Not a string-convertible JSID?\");\n" "let this = UnwrapProxy(proxy);\n" "let this = &*this;\n" % argName + CGProxySpecialOperation.define(self)) class CGProxyNamedGetter(CGProxyNamedOperation): """ Class to generate a call to an named getter. If templateValues is not None the returned value will be wrapped with wrapForType using templateValues. """ def __init__(self, descriptor, templateValues=None): self.templateValues = templateValues CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter') class CGProxyNamedPresenceChecker(CGProxyNamedGetter): """ Class to generate a call that checks whether a named property exists. For now, we just delegate to CGProxyNamedGetter """ def __init__(self, descriptor): CGProxyNamedGetter.__init__(self, descriptor) class CGProxyNamedSetter(CGProxyNamedOperation): """ Class to generate a call to a named setter. """ def __init__(self, descriptor): CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter') class CGProxyNamedDeleter(CGProxyNamedOperation): """ Class to generate a call to a named deleter. """ def __init__(self, descriptor): CGProxySpecialOperation.__init__(self, descriptor, 'NamedDeleter') class CGProxyUnwrap(CGAbstractMethod): def __init__(self, descriptor): args = [Argument('RawHandleObject', 'obj')] CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy", '*const ' + descriptor.concreteType, args, alwaysInline=True, unsafe=True) def definition_body(self): return CGGeneric("""\ /*if (xpc::WrapperFactory::IsXrayWrapper(obj)) { obj = js::UnwrapObject(obj); }*/ //MOZ_ASSERT(IsProxy(obj)); let mut slot = UndefinedValue(); GetProxyReservedSlot(obj.get(), 0, &mut slot); let box_ = slot.to_private() as *const %s; return box_;""" % self.descriptor.concreteType) class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('RawHandleId', 'id'), Argument('RawMutableHandle<PropertyDescriptor>', 'mut desc')] CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor", "bool", args) self.descriptor = descriptor # https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty def getBody(self): indexedGetter = self.descriptor.operations['IndexedGetter'] get = "" if indexedGetter: get = "let index = get_array_index_from_id(cx, Handle::from_raw(id));\n" attrs = "JSPROP_ENUMERATE" if self.descriptor.operations['IndexedSetter'] is None: attrs += " | JSPROP_READONLY" # FIXME(#11868) Should assign to desc.value, desc.get() is a copy. fillDescriptor = ("desc.get().value = result_root.get();\n" "fill_property_descriptor(MutableHandle::from_raw(desc), proxy.get(), (%s) as u32);\n" "return true;" % attrs) templateValues = { 'jsvalRef': 'result_root.handle_mut()', 'successCode': fillDescriptor, 'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());' } get += ("if let Some(index) = index {\n" + " let this = UnwrapProxy(proxy);\n" + " let this = &*this;\n" + CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" + "}\n") namedGetter = self.descriptor.operations['NamedGetter'] if namedGetter: attrs = [] if not self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"): attrs.append("JSPROP_ENUMERATE") if self.descriptor.operations['NamedSetter'] is None: attrs.append("JSPROP_READONLY") if attrs: attrs = " | ".join(attrs) else: attrs = "0" # FIXME(#11868) Should assign to desc.value, desc.get() is a copy. fillDescriptor = ("desc.get().value = result_root.get();\n" "fill_property_descriptor(MutableHandle::from_raw(desc), proxy.get(), (%s) as u32);\n" "return true;" % attrs) templateValues = { 'jsvalRef': 'result_root.handle_mut()', 'successCode': fillDescriptor, 'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());' } # See the similar-looking in CGDOMJSProxyHandler_get for the spec quote. condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)" if indexedGetter: condition = "index.is_none() && (%s)" % condition # Once we start supporting OverrideBuiltins we need to make # ResolveOwnProperty or EnumerateOwnProperties filter out named # properties that shadow prototype properties. namedGet = """ if %s { let mut has_on_proto = false; if !has_property_on_prototype(cx, proxy_lt, id_lt, &mut has_on_proto) { return false; } if !has_on_proto { %s } } """ % (condition, CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues), 8).define()) else: namedGet = "" return get + """\ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); get_expando_object(proxy, expando.handle_mut()); //if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) { let proxy_lt = Handle::from_raw(proxy); let id_lt = Handle::from_raw(id); if !expando.is_null() { if !JS_GetPropertyDescriptorById(cx, expando.handle().into(), id, desc) { return false; } if !desc.obj.is_null() { // Pretend the property lives on the wrapper. desc.obj = proxy.get(); return true; } } """ + namedGet + """\ desc.get().obj = ptr::null_mut(); return true;""" def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('RawHandleId', 'id'), Argument('RawHandle<PropertyDescriptor>', 'desc'), Argument('*mut ObjectOpResult', 'opresult')] CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "bool", args) self.descriptor = descriptor def getBody(self): set = "" indexedSetter = self.descriptor.operations['IndexedSetter'] if indexedSetter: set += ("let index = get_array_index_from_id(cx, Handle::from_raw(id));\n" + "if let Some(index) = index {\n" + " let this = UnwrapProxy(proxy);\n" + " let this = &*this;\n" + CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() + " return (*opresult).succeed();\n" + "}\n") elif self.descriptor.operations['IndexedGetter']: set += ("if get_array_index_from_id(cx, Handle::from_raw(id)).is_some() {\n" + " return (*opresult).failNoIndexedSetter();\n" + "}\n") namedSetter = self.descriptor.operations['NamedSetter'] if namedSetter: if self.descriptor.hasUnforgeableMembers: raise TypeError("Can't handle a named setter on an interface that has " "unforgeables. Figure out how that should work!") set += ("if RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id) {\n" + CGIndenter(CGProxyNamedSetter(self.descriptor)).define() + " return (*opresult).succeed();\n" + "}\n") else: set += ("if RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id) {\n" + CGIndenter(CGProxyNamedGetter(self.descriptor)).define() + " if result.is_some() {\n" " return (*opresult).failNoNamedSetter();\n" " }\n" "}\n") set += "return proxyhandler::define_property(%s);" % ", ".join(a.name for a in self.args) return set def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_delete(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('RawHandleId', 'id'), Argument('*mut ObjectOpResult', 'res')] CGAbstractExternMethod.__init__(self, descriptor, "delete", "bool", args) self.descriptor = descriptor def getBody(self): set = "" if self.descriptor.operations['NamedDeleter']: if self.descriptor.hasUnforgeableMembers: raise TypeError("Can't handle a deleter on an interface that has " "unforgeables. Figure out how that should work!") set += CGProxyNamedDeleter(self.descriptor).define() set += "return proxyhandler::delete(%s);" % ", ".join(a.name for a in self.args) return set def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_ownPropertyKeys(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('*mut AutoIdVector', 'props')] CGAbstractExternMethod.__init__(self, descriptor, "own_property_keys", "bool", args) self.descriptor = descriptor def getBody(self): body = dedent( """ let unwrapped_proxy = UnwrapProxy(proxy); """) if self.descriptor.operations['IndexedGetter']: body += dedent( """ for i in 0..(*unwrapped_proxy).Length() { rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32)); AppendToAutoIdVector(props, rooted_jsid.handle().get()); } """) if self.descriptor.operations['NamedGetter']: body += dedent( """ for name in (*unwrapped_proxy).SupportedPropertyNames() { let cstring = CString::new(name).unwrap(); let jsstring = JS_AtomizeAndPinString(cx, cstring.as_ptr()); rooted!(in(cx) let rooted = jsstring); let jsid = INTERNED_STRING_TO_JSID(cx, rooted.handle().get()); rooted!(in(cx) let rooted_jsid = jsid); AppendToAutoIdVector(props, rooted_jsid.handle().get()); } """) body += dedent( """ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); get_expando_object(proxy, expando.handle_mut()); if !expando.is_null() { GetPropertyKeys(cx, expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props); } return true; """) return body def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(CGAbstractExternMethod): def __init__(self, descriptor): assert (descriptor.operations["IndexedGetter"] and descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties")) args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('*mut AutoIdVector', 'props')] CGAbstractExternMethod.__init__(self, descriptor, "getOwnEnumerablePropertyKeys", "bool", args) self.descriptor = descriptor def getBody(self): body = dedent( """ let unwrapped_proxy = UnwrapProxy(proxy); """) if self.descriptor.operations['IndexedGetter']: body += dedent( """ for i in 0..(*unwrapped_proxy).Length() { rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32)); AppendToAutoIdVector(props, rooted_jsid.handle().get()); } """) body += dedent( """ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); get_expando_object(proxy, expando.handle_mut()); if !expando.is_null() { GetPropertyKeys(cx, expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props); } return true; """) return body def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('RawHandleId', 'id'), Argument('*mut bool', 'bp')] CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "bool", args) self.descriptor = descriptor def getBody(self): indexedGetter = self.descriptor.operations['IndexedGetter'] if indexedGetter: indexed = ("let index = get_array_index_from_id(cx, Handle::from_raw(id));\n" + "if let Some(index) = index {\n" + " let this = UnwrapProxy(proxy);\n" + " let this = &*this;\n" + CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" + " *bp = result.is_some();\n" + " return true;\n" + "}\n\n") else: indexed = "" namedGetter = self.descriptor.operations['NamedGetter'] condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)" if indexedGetter: condition = "index.is_none() && (%s)" % condition if namedGetter: named = """\ if %s { let mut has_on_proto = false; if !has_property_on_prototype(cx, proxy_lt, id_lt, &mut has_on_proto) { return false; } if !has_on_proto { %s *bp = result.is_some(); return true; } } """ % (condition, CGIndenter(CGProxyNamedGetter(self.descriptor), 8).define()) else: named = "" return indexed + """\ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); let proxy_lt = Handle::from_raw(proxy); let id_lt = Handle::from_raw(id); get_expando_object(proxy, expando.handle_mut()); if !expando.is_null() { let ok = JS_HasPropertyById(cx, expando.handle().into(), id, bp); if !ok || *bp { return ok; } } """ + named + """\ *bp = false; return true;""" def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_get(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', 'proxy'), Argument('RawHandleValue', 'receiver'), Argument('RawHandleId', 'id'), Argument('RawMutableHandleValue', 'vp')] CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args) self.descriptor = descriptor # https://heycam.github.io/webidl/#LegacyPlatformObjectGetOwnProperty def getBody(self): getFromExpando = """\ rooted!(in(cx) let mut expando = ptr::null_mut::<JSObject>()); get_expando_object(proxy, expando.handle_mut()); if !expando.is_null() { let mut hasProp = false; if !JS_HasPropertyById(cx, expando.handle().into(), id, &mut hasProp) { return false; } if hasProp { return JS_ForwardGetPropertyTo(cx, expando.handle().into(), id, receiver, vp); } }""" templateValues = { 'jsvalRef': 'vp_lt', 'successCode': 'return true;', } indexedGetter = self.descriptor.operations['IndexedGetter'] if indexedGetter: getIndexedOrExpando = ("let index = get_array_index_from_id(cx, id_lt);\n" + "if let Some(index) = index {\n" + " let this = UnwrapProxy(proxy);\n" + " let this = &*this;\n" + CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define()) getIndexedOrExpando += """\ // Even if we don't have this index, we don't forward the // get on to our expando object. } else { %s } """ % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n '))) else: getIndexedOrExpando = getFromExpando + "\n" namedGetter = self.descriptor.operations['NamedGetter'] if namedGetter: condition = "RUST_JSID_IS_STRING(id) || RUST_JSID_IS_INT(id)" # From step 1: # If O supports indexed properties and P is an array index, then: # # 3. Set ignoreNamedProps to true. if indexedGetter: condition = "index.is_none() && (%s)" % condition getNamed = ("if %s {\n" + CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() + "}\n") % condition else: getNamed = "" return """\ //MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy), //"Should not have a XrayWrapper here"); let proxy_lt = Handle::from_raw(proxy); let vp_lt = MutableHandle::from_raw(vp); let id_lt = Handle::from_raw(id); let receiver_lt = Handle::from_raw(receiver); %s let mut found = false; if !get_property_on_prototype(cx, proxy_lt, receiver_lt, id_lt, &mut found, vp_lt) { return false; } if found { return true; } %s vp.set(UndefinedValue()); return true;""" % (getIndexedOrExpando, getNamed) def definition_body(self): return CGGeneric(self.getBody()) class CGDOMJSProxyHandler_className(CGAbstractExternMethod): def __init__(self, descriptor): args = [Argument('*mut JSContext', 'cx'), Argument('RawHandleObject', '_proxy')] CGAbstractExternMethod.__init__(self, descriptor, "className", "*const i8", args, doesNotPanic=True) self.descriptor = descriptor def getBody(self): return '%s as *const u8 as *const i8' % str_to_const_array(self.descriptor.name) def definition_body(self): return CGGeneric(self.getBody()) class CGAbstractClassHook(CGAbstractExternMethod): """ Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw 'this' unwrapping as it assumes that the unwrapped type is always known. """ def __init__(self, descriptor, name, returnType, args, doesNotPanic=False): CGAbstractExternMethod.__init__(self, descriptor, name, returnType, args) def definition_body_prologue(self): return CGGeneric(""" let this = native_from_object::<%s>(obj).unwrap(); """ % self.descriptor.concreteType) def definition_body(self): return CGList([ self.definition_body_prologue(), self.generate_code(), ]) def generate_code(self): raise NotImplementedError # Override me! def finalizeHook(descriptor, hookName, context): release = "" if descriptor.isGlobal(): release += """\ finalize_global(obj); """ elif descriptor.weakReferenceable: release += """\ let mut slot = UndefinedValue(); JS_GetReservedSlot(obj, DOM_WEAK_SLOT, &mut slot); let weak_box_ptr = slot.to_private() as *mut WeakBox<%s>; if !weak_box_ptr.is_null() { let count = { let weak_box = &*weak_box_ptr; assert!(weak_box.value.get().is_some()); assert!(weak_box.count.get() > 0); weak_box.value.set(None); let count = weak_box.count.get() - 1; weak_box.count.set(count); count }; if count == 0 { mem::drop(Box::from_raw(weak_box_ptr)); } } """ % descriptor.concreteType release += """\ if !this.is_null() { // The pointer can be null if the object is the unforgeable holder of that interface. let _ = Box::from_raw(this as *mut %s); } debug!("%s finalize: {:p}", this);\ """ % (descriptor.concreteType, descriptor.concreteType) return release class CGClassTraceHook(CGAbstractClassHook): """ A hook to trace through our native object; used for GC and CC """ def __init__(self, descriptor): args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')] CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void', args, doesNotPanic=True) self.traceGlobal = descriptor.isGlobal() def generate_code(self): body = [CGGeneric("if this.is_null() { return; } // GC during obj creation\n" "(*this).trace(%s);" % self.args[0].name)] if self.traceGlobal: body += [CGGeneric("trace_global(trc, obj);")] return CGList(body, "\n") class CGClassConstructHook(CGAbstractExternMethod): """ JS-visible constructor for our objects """ def __init__(self, descriptor, constructor=None): args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')] name = CONSTRUCT_HOOK_NAME if constructor: name += "_" + constructor.identifier.name else: constructor = descriptor.interface.ctor() assert constructor CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args) self.constructor = constructor self.exposureSet = descriptor.interface.exposureSet def definition_body(self): preamble = """let global = GlobalScope::from_object(JS_CALLEE(cx, vp).to_object());\n""" if len(self.exposureSet) == 1: preamble += """\ let global = DomRoot::downcast::<dom::types::%s>(global).unwrap(); """ % list(self.exposureSet)[0] preamble += """let args = CallArgs::from_vp(vp, argc);\n""" preamble = CGGeneric(preamble) if self.constructor.isHTMLConstructor(): signatures = self.constructor.signatures() assert len(signatures) == 1 constructorCall = CGGeneric("""\ // Step 2 https://html.spec.whatwg.org/multipage/#htmlconstructor // The custom element definition cannot use an element interface as its constructor // The new_target might be a cross-compartment wrapper. Get the underlying object // so we can do the spec's object-identity checks. rooted!(in(cx) let new_target = UnwrapObject(args.new_target().to_object(), 1)); if new_target.is_null() { throw_dom_exception(cx, global.upcast::<GlobalScope>(), Error::Type("new.target is null".to_owned())); return false; } if args.callee() == new_target.get() { throw_dom_exception(cx, global.upcast::<GlobalScope>(), Error::Type("new.target must not be the active function object".to_owned())); return false; } // Step 6 rooted!(in(cx) let mut prototype = ptr::null_mut::<JSObject>()); { rooted!(in(cx) let mut proto_val = UndefinedValue()); let _ac = JSAutoCompartment::new(cx, new_target.get()); if !JS_GetProperty(cx, new_target.handle(), b"prototype\\0".as_ptr() as *const _, proto_val.handle_mut()) { return false; } if !proto_val.is_object() { // Step 7 of https://html.spec.whatwg.org/multipage/#htmlconstructor. // This fallback behavior is designed to match analogous behavior for the // JavaScript built-ins. So we enter the compartment of our underlying // newTarget object and fall back to the prototype object from that global. // XXX The spec says to use GetFunctionRealm(), which is not actually // the same thing as what we have here (e.g. in the case of scripted callable proxies // whose target is not same-compartment with the proxy, or bound functions, etc). // https://bugzilla.mozilla.org/show_bug.cgi?id=1317658 rooted!(in(cx) let global_object = CurrentGlobalOrNull(cx)); GetProtoObject(cx, global_object.handle(), prototype.handle_mut()); } else { // Step 6 prototype.set(proto_val.to_object()); }; } // Wrap prototype in this context since it is from the newTarget compartment if !JS_WrapObject(cx, prototype.handle_mut()) { return false; } let result: Result<DomRoot<%s>, Error> = html_constructor(&global, &args); let result = match result { Ok(result) => result, Err(e) => { throw_dom_exception(cx, global.upcast::<GlobalScope>(), e); return false; }, }; rooted!(in(cx) let mut element = result.reflector().get_jsobject().get()); if !JS_WrapObject(cx, element.handle_mut()) { return false; } JS_SetPrototype(cx, element.handle(), prototype.handle()); (result).to_jsval(cx, MutableHandleValue::from_raw(args.rval())); return true; """ % self.descriptor.name) else: name = self.constructor.identifier.name nativeName = MakeNativeName(self.descriptor.binaryNameFor(name)) constructorCall = CGMethodCall(["&global"], nativeName, True, self.descriptor, self.constructor) return CGList([preamble, constructorCall]) class CGClassFinalizeHook(CGAbstractClassHook): """ A hook for finalize, used to release our native object. """ def __init__(self, descriptor): args = [Argument('*mut JSFreeOp', '_fop'), Argument('*mut JSObject', 'obj')] CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME, 'void', args) def generate_code(self): return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name)) class CGDOMJSProxyHandlerDOMClass(CGThing): def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor def define(self): return "static Class: DOMClass = " + DOMClass(self.descriptor) + ";\n" class CGInterfaceTrait(CGThing): def __init__(self, descriptor): CGThing.__init__(self) def attribute_arguments(needCx, argument=None): if needCx: yield "cx", "*mut JSContext" if argument: yield "value", argument_type(descriptor, argument) def members(): for m in descriptor.interface.members: if (m.isMethod() and not m.isStatic() and not m.isMaplikeOrSetlikeOrIterableMethod() and (not m.isIdentifierLess() or m.isStringifier())): name = CGSpecializedMethod.makeNativeName(descriptor, m) infallible = 'infallible' in descriptor.getExtendedAttributes(m) for idx, (rettype, arguments) in enumerate(m.signatures()): arguments = method_arguments(descriptor, rettype, arguments) rettype = return_type(descriptor, rettype, infallible) yield name + ('_' * idx), arguments, rettype elif m.isAttr() and not m.isStatic(): name = CGSpecializedGetter.makeNativeName(descriptor, m) infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True) yield (name, attribute_arguments(typeNeedsCx(m.type, True)), return_type(descriptor, m.type, infallible)) if not m.readonly: name = CGSpecializedSetter.makeNativeName(descriptor, m) infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True) if infallible: rettype = "()" else: rettype = "ErrorResult" yield name, attribute_arguments(typeNeedsCx(m.type, False), m.type), rettype if descriptor.proxy: for name, operation in descriptor.operations.iteritems(): if not operation or operation.isStringifier(): continue assert len(operation.signatures()) == 1 rettype, arguments = operation.signatures()[0] infallible = 'infallible' in descriptor.getExtendedAttributes(operation) if operation.isGetter(): if not rettype.nullable(): rettype = IDLNullableType(rettype.location, rettype) arguments = method_arguments(descriptor, rettype, arguments) # If this interface 'supports named properties', then we # should be able to access 'supported property names' # # WebIDL, Second Draft, section 3.2.4.5 # https://heycam.github.io/webidl/#idl-named-properties if operation.isNamed(): yield "SupportedPropertyNames", [], "Vec<DOMString>" else: arguments = method_arguments(descriptor, rettype, arguments) rettype = return_type(descriptor, rettype, infallible) yield name, arguments, rettype def fmt(arguments): keywords = {"async"} return "".join( ", %s: %s" % (name if name not in keywords else "r#" + name, type_) for name, type_ in arguments ) def contains_unsafe_arg(arguments): if not arguments or len(arguments) == 0: return False return reduce((lambda x, y: x or y[1] == '*mut JSContext'), arguments, False) methods = [] for name, arguments, rettype in members(): arguments = list(arguments) methods.append(CGGeneric("%sfn %s(&self%s) -> %s;\n" % ( 'unsafe ' if contains_unsafe_arg(arguments) else '', name, fmt(arguments), rettype)) ) if methods: self.cgRoot = CGWrapper(CGIndenter(CGList(methods, "")), pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name, post="}") else: self.cgRoot = CGGeneric("") self.empty = not methods def define(self): return self.cgRoot.define() class CGWeakReferenceableTrait(CGThing): def __init__(self, descriptor): CGThing.__init__(self) assert descriptor.weakReferenceable self.code = "impl WeakReferenceable for %s {}" % descriptor.interface.identifier.name def define(self): return self.code def generate_imports(config, cgthings, descriptors, callbacks=None, dictionaries=None, enums=None, typedefs=None): if not callbacks: callbacks = [] if not dictionaries: dictionaries = [] if not enums: enums = [] if not typedefs: typedefs = [] return CGImports(cgthings, descriptors, callbacks, dictionaries, enums, typedefs, [ 'js', 'js::JSCLASS_GLOBAL_SLOT_COUNT', 'js::JSCLASS_IS_DOMJSCLASS', 'js::JSCLASS_IS_GLOBAL', 'js::JSCLASS_RESERVED_SLOTS_MASK', 'js::JS_CALLEE', 'js::error::throw_type_error', 'js::error::throw_internal_error', 'js::jsapi::AutoIdVector', 'js::rust::wrappers::Call', 'js::jsapi::CallArgs', 'js::jsapi::CurrentGlobalOrNull', 'js::jsapi::FreeOp', 'js::rust::wrappers::GetPropertyKeys', 'js::jsapi::GetWellKnownSymbol', 'js::rust::Handle', 'js::jsapi::Handle as RawHandle', 'js::rust::HandleId', 'js::jsapi::HandleId as RawHandleId', 'js::rust::HandleObject', 'js::jsapi::HandleObject as RawHandleObject', 'js::rust::HandleValue', 'js::jsapi::HandleValue as RawHandleValue', 'js::jsapi::HandleValueArray', 'js::jsapi::Heap', 'js::jsapi::INTERNED_STRING_TO_JSID', 'js::jsapi::IsCallable', 'js::jsapi::JSAutoCompartment', 'js::jsapi::JSCLASS_FOREGROUND_FINALIZE', 'js::jsapi::JSCLASS_RESERVED_SLOTS_SHIFT', 'js::jsapi::JSClass', 'js::jsapi::JSContext', 'js::jsapi::JSFreeOp', 'js::jsapi::JSFunctionSpec', 'js::jsapi::JSITER_HIDDEN', 'js::jsapi::JSITER_OWNONLY', 'js::jsapi::JSITER_SYMBOLS', 'js::jsapi::JSJitGetterCallArgs', 'js::jsapi::JSJitInfo', 'js::jsapi::JSJitInfo_AliasSet', 'js::jsapi::JSJitInfo_ArgType', 'js::jsapi::JSJitInfo_OpType', 'js::jsapi::JSJitMethodCallArgs', 'js::jsapi::JSJitSetterCallArgs', 'js::jsapi::JSNative', 'js::jsapi::JSNativeWrapper', 'js::jsapi::JSObject', 'js::jsapi::JSPROP_ENUMERATE', 'js::jsapi::JSPROP_PERMANENT', 'js::jsapi::JSPROP_READONLY', 'js::jsapi::JSPropertySpec', 'js::jsapi::JSPropertySpec__bindgen_ty_1', 'js::jsapi::JSPropertySpec__bindgen_ty_1__bindgen_ty_1', 'js::jsapi::JSPropertySpec__bindgen_ty_1__bindgen_ty_1__bindgen_ty_1', 'js::jsapi::JSPropertySpec__bindgen_ty_1__bindgen_ty_1__bindgen_ty_2', 'js::jsapi::JSString', 'js::jsapi::JSTracer', 'js::jsapi::JSType', 'js::jsapi::JSTypedMethodJitInfo', 'js::jsapi::JSValueType', 'js::jsapi::JS_AtomizeAndPinString', 'js::rust::wrappers::JS_CallFunctionValue', 'js::rust::wrappers::JS_CopyPropertiesFrom', 'js::rust::wrappers::JS_DefineProperty', 'js::rust::wrappers::JS_DefinePropertyById2', 'js::jsapi::JS_ForwardGetPropertyTo', 'js::jsapi::JS_GetErrorPrototype', 'js::rust::wrappers::JS_GetFunctionPrototype', 'js::jsapi::JS_GetGlobalForObject', 'js::jsapi::JS_GetIteratorPrototype', 'js::rust::wrappers::JS_GetObjectPrototype', 'js::rust::wrappers::JS_GetProperty', 'js::jsapi::JS_GetPropertyById', 'js::jsapi::JS_GetPropertyDescriptorById', 'js::glue::JS_GetReservedSlot', 'js::jsapi::JS_HasProperty', 'js::jsapi::JS_HasPropertyById', 'js::rust::wrappers::JS_InitializePropertiesFromCompatibleNativeObject', 'js::jsapi::JS_NewObject', 'js::rust::wrappers::JS_NewObjectWithGivenProto', 'js::rust::wrappers::JS_NewObjectWithoutMetadata', 'js::rust::wrappers::JS_ObjectIsDate', 'js::rust::wrappers::JS_SetImmutablePrototype', 'js::rust::wrappers::JS_SetProperty', 'js::rust::wrappers::JS_SetPrototype', 'js::jsapi::JS_SetReservedSlot', 'js::rust::wrappers::JS_SplicePrototype', 'js::rust::wrappers::JS_WrapValue', 'js::rust::wrappers::JS_WrapObject', 'js::rust::MutableHandle', 'js::jsapi::MutableHandle as RawMutableHandle', 'js::rust::MutableHandleObject', 'js::jsapi::MutableHandleObject as RawMutableHandleObject', 'js::rust::MutableHandleValue', 'js::jsapi::MutableHandleValue as RawMutableHandleValue', 'js::jsapi::ObjectOpResult', 'js::jsapi::PropertyDescriptor', 'js::jsapi::Rooted', 'js::jsapi::RootedId', 'js::jsapi::RootedObject', 'js::jsapi::RootedString', 'js::jsapi::SymbolCode', 'js::jsapi::jsid', 'js::jsval::JSVal', 'js::jsval::NullValue', 'js::jsval::ObjectValue', 'js::jsval::ObjectOrNullValue', 'js::jsval::PrivateValue', 'js::jsval::UndefinedValue', 'js::jsapi::UndefinedHandleValue', 'js::glue::AppendToAutoIdVector', 'js::glue::CallJitGetterOp', 'js::glue::CallJitMethodOp', 'js::glue::CallJitSetterOp', 'js::glue::CreateProxyHandler', 'js::glue::GetProxyReservedSlot', 'js::glue::SetProxyReservedSlot', 'js::rust::wrappers::NewProxyObject', 'js::glue::ProxyTraps', 'js::glue::RUST_JSID_IS_INT', 'js::glue::RUST_JSID_IS_STRING', 'js::glue::RUST_SYMBOL_TO_JSID', 'js::glue::int_to_jsid', 'js::glue::UnwrapObject', 'js::panic::maybe_resume_unwind', 'js::panic::wrap_panic', 'js::rust::GCMethods', 'js::rust::CustomAutoRooterGuard', 'js::rust::define_methods', 'js::rust::define_properties', 'js::rust::get_object_class', 'js::typedarray', 'crate::dom', 'crate::dom::bindings', 'crate::dom::bindings::codegen::InterfaceObjectMap', 'crate::dom::bindings::constant::ConstantSpec', 'crate::dom::bindings::constant::ConstantVal', 'crate::dom::bindings::interface::ConstructorClassHook', 'crate::dom::bindings::interface::InterfaceConstructorBehavior', 'crate::dom::bindings::interface::NonCallbackInterfaceObjectClass', 'crate::dom::bindings::interface::create_global_object', 'crate::dom::bindings::interface::create_callback_interface_object', 'crate::dom::bindings::interface::create_interface_prototype_object', 'crate::dom::bindings::interface::create_named_constructors', 'crate::dom::bindings::interface::create_noncallback_interface_object', 'crate::dom::bindings::interface::define_guarded_constants', 'crate::dom::bindings::interface::define_guarded_methods', 'crate::dom::bindings::interface::define_guarded_properties', 'crate::dom::bindings::htmlconstructor::html_constructor', 'crate::dom::bindings::interface::is_exposed_in', 'crate::dom::bindings::htmlconstructor::pop_current_element_queue', 'crate::dom::bindings::htmlconstructor::push_new_element_queue', 'crate::dom::bindings::iterable::Iterable', 'crate::dom::bindings::iterable::IteratorType', 'crate::dom::bindings::namespace::NamespaceObjectClass', 'crate::dom::bindings::namespace::create_namespace_object', 'crate::dom::bindings::reflector::MutDomObject', 'crate::dom::bindings::reflector::DomObject', 'crate::dom::bindings::root::Dom', 'crate::dom::bindings::root::DomRoot', 'crate::dom::bindings::root::OptionalHeapSetter', 'crate::dom::bindings::root::RootedReference', 'crate::dom::bindings::utils::AsVoidPtr', 'crate::dom::bindings::utils::DOMClass', 'crate::dom::bindings::utils::DOMJSClass', 'crate::dom::bindings::utils::DOM_PROTO_UNFORGEABLE_HOLDER_SLOT', 'crate::dom::bindings::utils::JSCLASS_DOM_GLOBAL', 'crate::dom::bindings::utils::ProtoOrIfaceArray', 'crate::dom::bindings::utils::enumerate_global', 'crate::dom::bindings::utils::finalize_global', 'crate::dom::bindings::utils::find_enum_value', 'crate::dom::bindings::utils::generic_getter', 'crate::dom::bindings::utils::generic_lenient_getter', 'crate::dom::bindings::utils::generic_lenient_setter', 'crate::dom::bindings::utils::generic_method', 'crate::dom::bindings::utils::generic_setter', 'crate::dom::bindings::utils::get_array_index_from_id', 'crate::dom::bindings::utils::get_dictionary_property', 'crate::dom::bindings::utils::get_property_on_prototype', 'crate::dom::bindings::utils::get_proto_or_iface_array', 'crate::dom::bindings::utils::has_property_on_prototype', 'crate::dom::bindings::utils::is_platform_object', 'crate::dom::bindings::utils::resolve_global', 'crate::dom::bindings::utils::set_dictionary_property', 'crate::dom::bindings::utils::trace_global', 'crate::dom::bindings::trace::JSTraceable', 'crate::dom::bindings::trace::RootedTraceable', 'crate::dom::bindings::trace::RootedTraceableBox', 'crate::dom::bindings::callback::CallSetup', 'crate::dom::bindings::callback::CallbackContainer', 'crate::dom::bindings::callback::CallbackInterface', 'crate::dom::bindings::callback::CallbackFunction', 'crate::dom::bindings::callback::CallbackObject', 'crate::dom::bindings::callback::ExceptionHandling', 'crate::dom::bindings::callback::wrap_call_this_object', 'crate::dom::bindings::conversions::ConversionBehavior', 'crate::dom::bindings::conversions::ConversionResult', 'crate::dom::bindings::conversions::DOM_OBJECT_SLOT', 'crate::dom::bindings::conversions::FromJSValConvertible', 'crate::dom::bindings::conversions::IDLInterface', 'crate::dom::bindings::conversions::StringificationBehavior', 'crate::dom::bindings::conversions::ToJSValConvertible', 'crate::dom::bindings::conversions::is_array_like', 'crate::dom::bindings::conversions::native_from_handlevalue', 'crate::dom::bindings::conversions::native_from_object', 'crate::dom::bindings::conversions::private_from_object', 'crate::dom::bindings::conversions::root_from_handleobject', 'crate::dom::bindings::conversions::root_from_handlevalue', 'crate::dom::bindings::conversions::root_from_object', 'crate::dom::bindings::conversions::jsid_to_string', 'crate::dom::bindings::codegen::PrototypeList', 'crate::dom::bindings::codegen::RegisterBindings', 'crate::dom::bindings::codegen::UnionTypes', 'crate::dom::bindings::error::Error', 'crate::dom::bindings::error::ErrorResult', 'crate::dom::bindings::error::Fallible', 'crate::dom::bindings::error::Error::JSFailed', 'crate::dom::bindings::error::throw_dom_exception', 'crate::dom::bindings::guard::Condition', 'crate::dom::bindings::guard::Guard', 'crate::dom::bindings::inheritance::Castable', 'crate::dom::bindings::proxyhandler', 'crate::dom::bindings::proxyhandler::ensure_expando_object', 'crate::dom::bindings::proxyhandler::fill_property_descriptor', 'crate::dom::bindings::proxyhandler::get_expando_object', 'crate::dom::bindings::proxyhandler::get_property_descriptor', 'crate::dom::bindings::mozmap::MozMap', 'std::ptr::NonNull', 'crate::dom::bindings::num::Finite', 'crate::dom::bindings::str::ByteString', 'crate::dom::bindings::str::DOMString', 'crate::dom::bindings::str::USVString', 'crate::dom::bindings::weakref::DOM_WEAK_SLOT', 'crate::dom::bindings::weakref::WeakBox', 'crate::dom::bindings::weakref::WeakReferenceable', 'crate::dom::windowproxy::WindowProxy', 'crate::dom::globalscope::GlobalScope', 'crate::mem::malloc_size_of_including_raw_self', 'libc', 'servo_config::prefs::PREFS', 'std::borrow::ToOwned', 'std::cmp', 'std::mem', 'std::num', 'std::os', 'std::panic', 'std::ptr', 'std::str', 'std::rc', 'std::rc::Rc', 'std::default::Default', 'std::ffi::CString', ], config) class CGDescriptor(CGThing): def __init__(self, descriptor, config, soleDescriptor): CGThing.__init__(self) assert not descriptor.concrete or not descriptor.interface.isCallback() reexports = [] def reexportedName(name): if name.startswith(descriptor.name): return name if not soleDescriptor: return '%s as %s%s' % (name, descriptor.name, name) return name cgThings = [] unscopableNames = [] for m in descriptor.interface.members: if (m.isMethod() and (not m.isIdentifierLess() or m == descriptor.operations["Stringifier"])): if m.getExtendedAttribute("Unscopable"): assert not m.isStatic() unscopableNames.append(m.identifier.name) if m.isStatic(): assert descriptor.interface.hasInterfaceObject() cgThings.append(CGStaticMethod(descriptor, m)) elif not descriptor.interface.isCallback(): cgThings.append(CGSpecializedMethod(descriptor, m)) cgThings.append(CGMemberJITInfo(descriptor, m)) elif m.isAttr(): if m.stringifier: raise TypeError("Stringifier attributes not supported yet. " "See https://github.com/servo/servo/issues/7590\n" "%s" % m.location) if m.getExtendedAttribute("Unscopable"): assert not m.isStatic() unscopableNames.append(m.identifier.name) if m.isStatic(): assert descriptor.interface.hasInterfaceObject() cgThings.append(CGStaticGetter(descriptor, m)) elif not descriptor.interface.isCallback(): cgThings.append(CGSpecializedGetter(descriptor, m)) if not m.readonly: if m.isStatic(): assert descriptor.interface.hasInterfaceObject() cgThings.append(CGStaticSetter(descriptor, m)) elif not descriptor.interface.isCallback(): cgThings.append(CGSpecializedSetter(descriptor, m)) elif m.getExtendedAttribute("PutForwards"): cgThings.append(CGSpecializedForwardingSetter(descriptor, m)) elif m.getExtendedAttribute("Replaceable"): cgThings.append(CGSpecializedReplaceableSetter(descriptor, m)) if (not m.isStatic() and not descriptor.interface.isCallback()): cgThings.append(CGMemberJITInfo(descriptor, m)) if descriptor.concrete: cgThings.append(CGClassFinalizeHook(descriptor)) cgThings.append(CGClassTraceHook(descriptor)) # If there are no constant members, don't make a module for constants constMembers = [CGConstant(m) for m in descriptor.interface.members if m.isConst()] if constMembers: cgThings.append(CGNamespace.build([descriptor.name + "Constants"], CGIndenter(CGList(constMembers)), public=True)) reexports.append(descriptor.name + 'Constants') if descriptor.proxy: cgThings.append(CGDefineProxyHandler(descriptor)) properties = PropertyArrays(descriptor) if descriptor.concrete: if descriptor.proxy: # cgThings.append(CGProxyIsProxy(descriptor)) cgThings.append(CGProxyUnwrap(descriptor)) cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor)) cgThings.append(CGDOMJSProxyHandler_ownPropertyKeys(descriptor)) if descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"): cgThings.append(CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(descriptor)) cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor)) cgThings.append(CGDOMJSProxyHandler_className(descriptor)) cgThings.append(CGDOMJSProxyHandler_get(descriptor)) cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor)) if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']: cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor)) # We want to prevent indexed deleters from compiling at all. assert not descriptor.operations['IndexedDeleter'] if descriptor.operations['NamedDeleter']: cgThings.append(CGDOMJSProxyHandler_delete(descriptor)) # cgThings.append(CGDOMJSProxyHandler(descriptor)) # cgThings.append(CGIsMethod(descriptor)) pass else: cgThings.append(CGDOMJSClass(descriptor)) if not descriptor.interface.isIteratorInterface(): cgThings.append(CGAssertInheritance(descriptor)) pass if descriptor.isGlobal(): cgThings.append(CGWrapGlobalMethod(descriptor, properties)) else: cgThings.append(CGWrapMethod(descriptor)) reexports.append('Wrap') haveUnscopables = False if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace(): if unscopableNames: haveUnscopables = True cgThings.append( CGList([CGGeneric("const unscopable_names: &'static [&'static [u8]] = &["), CGIndenter(CGList([CGGeneric(str_to_const_array(name)) for name in unscopableNames], ",\n")), CGGeneric("];\n")], "\n")) if descriptor.concrete or descriptor.hasDescendants(): cgThings.append(CGIDLInterface(descriptor)) interfaceTrait = CGInterfaceTrait(descriptor) cgThings.append(interfaceTrait) if not interfaceTrait.empty: reexports.append('%sMethods' % descriptor.name) if descriptor.weakReferenceable: cgThings.append(CGWeakReferenceableTrait(descriptor)) cgThings.append(CGGeneric(str(properties))) if not descriptor.interface.getExtendedAttribute("Inline"): if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace(): cgThings.append(CGGetProtoObjectMethod(descriptor)) reexports.append('GetProtoObject') cgThings.append(CGPrototypeJSClass(descriptor)) if descriptor.interface.hasInterfaceObject(): if descriptor.interface.ctor(): cgThings.append(CGClassConstructHook(descriptor)) for ctor in descriptor.interface.namedConstructors: cgThings.append(CGClassConstructHook(descriptor, ctor)) if not descriptor.interface.isCallback(): cgThings.append(CGInterfaceObjectJSClass(descriptor)) if descriptor.shouldHaveGetConstructorObjectMethod(): cgThings.append(CGGetConstructorObjectMethod(descriptor)) reexports.append('GetConstructorObject') if descriptor.register: cgThings.append(CGDefineDOMInterfaceMethod(descriptor)) reexports.append('DefineDOMInterface') cgThings.append(CGConstructorEnabled(descriptor)) cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties, haveUnscopables)) cgThings = generate_imports(config, CGList(cgThings, '\n'), [descriptor]) cgThings = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name), cgThings, public=True), post='\n') if reexports: reexports = ', '.join(map(lambda name: reexportedName(name), reexports)) cgThings = CGList([CGGeneric('pub use self::%s::{%s};' % (toBindingNamespace(descriptor.name), reexports)), cgThings], '\n') self.cgRoot = cgThings def define(self): return self.cgRoot.define() class CGNonNamespacedEnum(CGThing): def __init__(self, enumName, names, first, comment="", deriving="", repr=""): # Account for first value entries = ["%s = %s" % (names[0], first)] + names[1:] # Append a Last. entries.append('#[allow(dead_code)] Last = ' + str(first + len(entries))) # Indent. entries = [' ' + e for e in entries] # Build the enum body. enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries)) if repr: enumstr = ('#[repr(%s)]\n' % repr) + enumstr if deriving: enumstr = ('#[derive(%s)]\n' % deriving) + enumstr curr = CGGeneric(enumstr) # Add some whitespace padding. curr = CGWrapper(curr, pre='\n', post='\n') # Add the typedef # typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName) # curr = CGList([curr, CGGeneric(typedef)]) # Save the result. self.node = curr def define(self): return self.node.define() class CGDictionary(CGThing): def __init__(self, dictionary, descriptorProvider): self.dictionary = dictionary if all(CGDictionary(d, descriptorProvider).generatable for d in CGDictionary.getDictionaryDependencies(dictionary)): self.generatable = True else: self.generatable = False # Nothing else to do here return self.memberInfo = [ (member, getJSToNativeConversionInfo(member.type, descriptorProvider, isMember="Dictionary", defaultValue=member.defaultValue, exceptionCode="return Err(());\n")) for member in dictionary.members] def define(self): if not self.generatable: return "" return self.struct() + "\n" + self.impl() def struct(self): d = self.dictionary if d.parent: inheritance = " pub parent: %s::%s,\n" % (self.makeModuleName(d.parent), self.makeClassName(d.parent)) else: inheritance = "" memberDecls = [" pub %s: %s," % (self.makeMemberName(m[0].identifier.name), self.getMemberType(m)) for m in self.memberInfo] derive = ["JSTraceable"] mustRoot = "" if self.membersNeedTracing(): mustRoot = "#[must_root]\n" derive += ["Default"] return (string.Template( "#[derive(${derive})]\n" "${mustRoot}" + "pub struct ${selfName} {\n" + "${inheritance}" + "\n".join(memberDecls) + "\n" + "}").substitute({"selfName": self.makeClassName(d), "inheritance": inheritance, "mustRoot": mustRoot, "derive": ', '.join(derive)})) def impl(self): d = self.dictionary if d.parent: initParent = ("{\n" " match r#try!(%s::%s::new(cx, val)) {\n" " ConversionResult::Success(v) => v,\n" " ConversionResult::Failure(error) => {\n" " throw_type_error(cx, &error);\n" " return Err(());\n" " }\n" " }\n" "}" % (self.makeModuleName(d.parent), self.makeClassName(d.parent))) else: initParent = "" def memberInit(memberInfo, isInitial): member, _ = memberInfo name = self.makeMemberName(member.identifier.name) conversion = self.getMemberConversion(memberInfo, member.type) if isInitial: return CGGeneric("%s: %s,\n" % (name, conversion.define())) return CGGeneric("dictionary.%s = %s;\n" % (name, conversion.define())) def varInsert(varName, dictionaryName): insertion = ("rooted!(in(cx) let mut %s_js = UndefinedValue());\n" "%s.to_jsval(cx, %s_js.handle_mut());\n" "set_dictionary_property(cx, obj.handle(), \"%s\", %s_js.handle()).unwrap();" % (varName, varName, varName, dictionaryName, varName)) return CGGeneric(insertion) def memberInsert(memberInfo): member, _ = memberInfo name = self.makeMemberName(member.identifier.name) if member.optional and not member.defaultValue: insertion = CGIfWrapper("let Some(ref %s) = self.%s" % (name, name), varInsert(name, member.identifier.name)) else: insertion = CGGeneric("let %s = &self.%s;\n%s" % (name, name, varInsert(name, member.identifier.name).define())) return CGGeneric("%s\n" % insertion.define()) memberInserts = CGList([memberInsert(m) for m in self.memberInfo]) selfName = self.makeClassName(d) if self.membersNeedTracing(): actualType = "RootedTraceableBox<%s>" % selfName preInitial = "let mut dictionary = RootedTraceableBox::new(%s::default());\n" % selfName initParent = initParent = ("dictionary.parent = %s;\n" % initParent) if initParent else "" memberInits = CGList([memberInit(m, False) for m in self.memberInfo]) postInitial = "" else: actualType = selfName preInitial = "let dictionary = %s {\n" % selfName postInitial = "};\n" initParent = ("parent: %s,\n" % initParent) if initParent else "" memberInits = CGList([memberInit(m, True) for m in self.memberInfo]) return string.Template( "impl ${selfName} {\n" "${empty}\n" " pub unsafe fn new(cx: *mut JSContext, val: HandleValue) \n" " -> Result<ConversionResult<${actualType}>, ()> {\n" " let object = if val.get().is_null_or_undefined() {\n" " ptr::null_mut()\n" " } else if val.get().is_object() {\n" " val.get().to_object()\n" " } else {\n" " return Ok(ConversionResult::Failure(\"Value is not an object.\".into()));\n" " };\n" " rooted!(in(cx) let object = object);\n" "${preInitial}" "${initParent}" "${initMembers}" "${postInitial}" " Ok(ConversionResult::Success(dictionary))\n" " }\n" "}\n" "\n" "impl FromJSValConvertible for ${actualType} {\n" " type Config = ();\n" " unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, _option: ())\n" " -> Result<ConversionResult<${actualType}>, ()> {\n" " ${selfName}::new(cx, value)\n" " }\n" "}\n" "\n" "impl ToJSValConvertible for ${selfName} {\n" " unsafe fn to_jsval(&self, cx: *mut JSContext, mut rval: MutableHandleValue) {\n" " rooted!(in(cx) let obj = JS_NewObject(cx, ptr::null()));\n" "${insertMembers}" " rval.set(ObjectOrNullValue(obj.get()))\n" " }\n" "}\n").substitute({ "selfName": selfName, "actualType": actualType, "empty": CGIndenter(CGGeneric(self.makeEmpty()), indentLevel=4).define(), "initParent": CGIndenter(CGGeneric(initParent), indentLevel=12).define(), "initMembers": CGIndenter(memberInits, indentLevel=12).define(), "insertMembers": CGIndenter(memberInserts, indentLevel=8).define(), "preInitial": CGIndenter(CGGeneric(preInitial), indentLevel=12).define(), "postInitial": CGIndenter(CGGeneric(postInitial), indentLevel=12).define(), }) def membersNeedTracing(self): for member, _ in self.memberInfo: if type_needs_tracing(member.type): return True return False @staticmethod def makeDictionaryName(dictionary): return dictionary.identifier.name def makeClassName(self, dictionary): return self.makeDictionaryName(dictionary) @staticmethod def makeModuleName(dictionary): return getModuleFromObject(dictionary) def getMemberType(self, memberInfo): member, info = memberInfo declType = info.declType if member.optional and not member.defaultValue: declType = CGWrapper(info.declType, pre="Option<", post=">") return declType.define() def getMemberConversion(self, memberInfo, memberType): def indent(s): return CGIndenter(CGGeneric(s), 12).define() member, info = memberInfo templateBody = info.template default = info.default replacements = {"val": "rval.handle()"} conversion = string.Template(templateBody).substitute(replacements) assert (member.defaultValue is None) == (default is None) if not member.optional: assert default is None default = ("throw_type_error(cx, \"Missing required member \\\"%s\\\".\");\n" "return Err(());") % member.identifier.name elif not default: default = "None" conversion = "Some(%s)" % conversion conversion = ( "{\n" " rooted!(in(cx) let mut rval = UndefinedValue());\n" " match r#try!(get_dictionary_property(cx, object.handle(), \"%s\", rval.handle_mut())) {\n" " true => {\n" "%s\n" " },\n" " false => {\n" "%s\n" " },\n" " }\n" "}") % (member.identifier.name, indent(conversion), indent(default)) return CGGeneric(conversion) def makeEmpty(self): if self.hasRequiredFields(self.dictionary): return "" parentTemplate = "parent: %s::%s::empty(),\n" fieldTemplate = "%s: %s,\n" functionTemplate = ( "pub fn empty() -> Self {\n" " Self {\n" "%s" " }\n" "}" ) if self.membersNeedTracing(): parentTemplate = "dictionary.parent = %s::%s::empty();\n" fieldTemplate = "dictionary.%s = %s;\n" functionTemplate = ( "pub fn empty() -> RootedTraceableBox<Self> {\n" " let mut dictionary = RootedTraceableBox::new(Self::default());\n" "%s" " dictionary\n" "}" ) s = "" if self.dictionary.parent: s += parentTemplate % (self.makeModuleName(self.dictionary.parent), self.makeClassName(self.dictionary.parent)) for member, info in self.memberInfo: if not member.optional: return "" default = info.default if not default: default = "None" s += fieldTemplate % (self.makeMemberName(member.identifier.name), default) return functionTemplate % CGIndenter(CGGeneric(s), 12).define() def hasRequiredFields(self, dictionary): if dictionary.parent: if self.hasRequiredFields(dictionary.parent): return True for member in dictionary.members: if not member.optional: return True return False @staticmethod def makeMemberName(name): # Can't use Rust keywords as member names. if name in RUST_KEYWORDS: return name + "_" return name @staticmethod def getDictionaryDependencies(dictionary): deps = set() if dictionary.parent: deps.add(dictionary.parent) for member in dictionary.members: if member.type.isDictionary(): deps.add(member.type.unroll().inner) return deps class CGRegisterProxyHandlersMethod(CGAbstractMethod): def __init__(self, descriptors): docs = "Create the global vtables used by the generated DOM bindings to implement JS proxies." CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [], unsafe=True, pub=True, docs=docs) self.descriptors = descriptors def definition_body(self): return CGList([ CGGeneric("PROXY_HANDLERS[Proxies::%s as usize] = Bindings::%s::DefineProxyHandler();" % (desc.name, '::'.join([desc.name + 'Binding'] * 2))) for desc in self.descriptors ], "\n") class CGRegisterProxyHandlers(CGThing): def __init__(self, config): descriptors = config.getDescriptors(proxy=True) length = len(descriptors) self.root = CGList([ CGGeneric("pub static mut PROXY_HANDLERS: [*const libc::c_void; %d] = [0 as *const libc::c_void; %d];" % (length, length)), CGRegisterProxyHandlersMethod(descriptors), ], "\n") def define(self): return self.root.define() class CGBindingRoot(CGThing): """ DomRoot codegen class for binding generation. Instantiate the class, and call declare or define to generate header or cpp code (respectively). """ def __init__(self, config, prefix, webIDLFile): descriptors = config.getDescriptors(webIDLFile=webIDLFile, hasInterfaceObject=True) # We also want descriptors that have an interface prototype object # (isCallback=False), but we don't want to include a second copy # of descriptors that we also matched in the previous line # (hence hasInterfaceObject=False). descriptors.extend(config.getDescriptors(webIDLFile=webIDLFile, hasInterfaceObject=False, isCallback=False, register=True)) dictionaries = config.getDictionaries(webIDLFile=webIDLFile) mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile) callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile, isCallback=True) enums = config.getEnums(webIDLFile) typedefs = config.getTypedefs(webIDLFile) if not (descriptors or dictionaries or mainCallbacks or callbackDescriptors or enums): self.root = None return # Do codegen for all the enums. cgthings = [CGEnum(e) for e in enums] # Do codegen for all the typedefs for t in typedefs: typeName = getRetvalDeclarationForType(t.innerType, config.getDescriptorProvider()) substs = { "name": t.identifier.name, "type": typeName.define(), } if t.innerType.isUnion() and not t.innerType.nullable(): # Allow using the typedef's name for accessing variants. template = "pub use self::%(type)s as %(name)s;" else: template = "pub type %(name)s = %(type)s;" cgthings.append(CGGeneric(template % substs)) # Do codegen for all the dictionaries. cgthings.extend([CGDictionary(d, config.getDescriptorProvider()) for d in dictionaries]) # Do codegen for all the callbacks. cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()), CGCallbackFunctionImpl(c)], "\n") for c in mainCallbacks) # Do codegen for all the descriptors cgthings.extend([CGDescriptor(x, config, len(descriptors) == 1) for x in descriptors]) # Do codegen for all the callback interfaces. cgthings.extend(CGList([CGCallbackInterface(x), CGCallbackFunctionImpl(x.interface)], "\n") for x in callbackDescriptors) # And make sure we have the right number of newlines at the end curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n") # Add imports curr = generate_imports(config, curr, callbackDescriptors, mainCallbacks, dictionaries, enums, typedefs) # Add the auto-generated comment. curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) # Store the final result. self.root = curr def define(self): if not self.root: return None return stripTrailingWhitespace(self.root.define()) def type_needs_tracing(t): assert isinstance(t, IDLObject), (t, type(t)) if t.isType(): if isinstance(t, IDLWrapperType): return type_needs_tracing(t.inner) if t.nullable(): return type_needs_tracing(t.inner) if t.isAny(): return True if t.isObject(): return True if t.isSequence(): return type_needs_tracing(t.inner) if t.isUnion(): return any(type_needs_tracing(member) for member in t.flatMemberTypes) if is_typed_array(t): return True return False if t.isDictionary(): if t.parent and type_needs_tracing(t.parent): return True if any(type_needs_tracing(member.type) for member in t.members): return True return False if t.isInterface(): return False if t.isEnum(): return False assert False, (t, type(t)) def is_typed_array(t): assert isinstance(t, IDLObject), (t, type(t)) return t.isTypedArray() or t.isArrayBuffer() or t.isArrayBufferView() or t.isSharedArrayBuffer() def type_needs_auto_root(t): """ Certain IDL types, such as `sequence<any>` or `sequence<object>` need to be traced and wrapped via (Custom)AutoRooter """ assert isinstance(t, IDLObject), (t, type(t)) if t.isType(): if t.isSequence() and (t.inner.isAny() or t.inner.isObject()): return True # SpiderMonkey interfaces, we currently don't support any other except typed arrays if is_typed_array(t): return True return False def argument_type(descriptorProvider, ty, optional=False, defaultValue=None, variadic=False): info = getJSToNativeConversionInfo( ty, descriptorProvider, isArgument=True, isAutoRooted=type_needs_auto_root(ty)) declType = info.declType if variadic: if ty.isGeckoInterface(): declType = CGWrapper(declType, pre="&[", post="]") else: declType = CGWrapper(declType, pre="Vec<", post=">") elif optional and not defaultValue: declType = CGWrapper(declType, pre="Option<", post=">") if ty.isDictionary() and not type_needs_tracing(ty): declType = CGWrapper(declType, pre="&") if type_needs_auto_root(ty): declType = CGTemplatedType("CustomAutoRooterGuard", declType) return declType.define() def method_arguments(descriptorProvider, returnType, arguments, passJSBits=True, trailing=None): if needCx(returnType, arguments, passJSBits): yield "cx", "*mut JSContext" for argument in arguments: ty = argument_type(descriptorProvider, argument.type, argument.optional, argument.defaultValue, argument.variadic) yield CGDictionary.makeMemberName(argument.identifier.name), ty if trailing: yield trailing def return_type(descriptorProvider, rettype, infallible): result = getRetvalDeclarationForType(rettype, descriptorProvider) if not infallible: result = CGWrapper(result, pre="Fallible<", post=">") return result.define() class CGNativeMember(ClassMethod): def __init__(self, descriptorProvider, member, name, signature, extendedAttrs, breakAfter=True, passJSBitsAsNeeded=True, visibility="public", unsafe=False): """ If passJSBitsAsNeeded is false, we don't automatically pass in a JSContext* or a JSObject* based on the return and argument types. """ self.descriptorProvider = descriptorProvider self.member = member self.extendedAttrs = extendedAttrs self.passJSBitsAsNeeded = passJSBitsAsNeeded breakAfterSelf = "\n" if breakAfter else "" ClassMethod.__init__(self, name, self.getReturnType(signature[0]), self.getArgs(signature[0], signature[1]), static=member.isStatic(), # Mark our getters, which are attrs that # have a non-void return type, as const. const=(not member.isStatic() and member.isAttr() and not signature[0].isVoid()), breakAfterSelf=breakAfterSelf, unsafe=unsafe, visibility=visibility) def getReturnType(self, type): infallible = 'infallible' in self.extendedAttrs typeDecl = return_type(self.descriptorProvider, type, infallible) return typeDecl def getArgs(self, returnType, argList): return [Argument(arg[1], arg[0]) for arg in method_arguments(self.descriptorProvider, returnType, argList, self.passJSBitsAsNeeded)] class CGCallback(CGClass): def __init__(self, idlObject, descriptorProvider, baseName, methods): self.baseName = baseName self._deps = idlObject.getDeps() name = idlObject.identifier.name # For our public methods that needThisHandling we want most of the # same args and the same return type as what CallbackMember # generates. So we want to take advantage of all its # CGNativeMember infrastructure, but that infrastructure can't deal # with templates and most especially template arguments. So just # cheat and have CallbackMember compute all those things for us. realMethods = [] for method in methods: if not method.needThisHandling: realMethods.append(method) else: realMethods.extend(self.getMethodImpls(method)) CGClass.__init__(self, name, bases=[ClassBase(baseName)], constructors=self.getConstructors(), methods=realMethods, decorators="#[derive(JSTraceable, PartialEq)]\n#[allow_unrooted_interior]") def getConstructors(self): return [ClassConstructor( [Argument("*mut JSContext", "aCx"), Argument("*mut JSObject", "aCallback")], bodyInHeader=True, visibility="pub", explicit=False, baseConstructors=[ "%s::new()" % self.baseName ])] def getMethodImpls(self, method): assert method.needThisHandling args = list(method.args) # Strip out the JSContext*/JSObject* args # that got added. assert args[0].name == "cx" and args[0].argType == "*mut JSContext" assert args[1].name == "aThisObj" and args[1].argType == "HandleObject" args = args[2:] # Record the names of all the arguments, so we can use them when we call # the private method. argnames = [arg.name for arg in args] argnamesWithThis = ["s.get_context()", "thisObjJS.handle()"] + argnames argnamesWithoutThis = ["s.get_context()", "thisObjJS.handle()"] + argnames # Now that we've recorded the argnames for our call to our private # method, insert our optional argument for deciding whether the # CallSetup should re-throw exceptions on aRv. args.append(Argument("ExceptionHandling", "aExceptionHandling", "ReportExceptions")) # And now insert our template argument. argsWithoutThis = list(args) args.insert(0, Argument("&T", "thisObj")) # And the self argument method.args.insert(0, Argument(None, "&self")) args.insert(0, Argument(None, "&self")) argsWithoutThis.insert(0, Argument(None, "&self")) setupCall = "let s = CallSetup::new(self, aExceptionHandling);\n" bodyWithThis = string.Template( setupCall + "rooted!(in(s.get_context()) let mut thisObjJS = ptr::null_mut::<JSObject>());\n" "wrap_call_this_object(s.get_context(), thisObj, thisObjJS.handle_mut());\n" "if thisObjJS.is_null() {\n" " return Err(JSFailed);\n" "}\n" "unsafe { ${methodName}(${callArgs}) }").substitute({ "callArgs": ", ".join(argnamesWithThis), "methodName": 'self.' + method.name, }) bodyWithoutThis = string.Template( setupCall + "rooted!(in(s.get_context()) let thisObjJS = ptr::null_mut::<JSObject>());\n" "unsafe { ${methodName}(${callArgs}) }").substitute({ "callArgs": ", ".join(argnamesWithoutThis), "methodName": 'self.' + method.name, }) return [ClassMethod(method.name + '_', method.returnType, args, bodyInHeader=True, templateArgs=["T: DomObject"], body=bodyWithThis, visibility='pub'), ClassMethod(method.name + '__', method.returnType, argsWithoutThis, bodyInHeader=True, body=bodyWithoutThis, visibility='pub'), method] def deps(self): return self._deps # We're always fallible def callbackGetterName(attr, descriptor): return "Get" + MakeNativeName( descriptor.binaryNameFor(attr.identifier.name)) def callbackSetterName(attr, descriptor): return "Set" + MakeNativeName( descriptor.binaryNameFor(attr.identifier.name)) class CGCallbackFunction(CGCallback): def __init__(self, callback, descriptorProvider): CGCallback.__init__(self, callback, descriptorProvider, "CallbackFunction", methods=[CallCallback(callback, descriptorProvider)]) def getConstructors(self): return CGCallback.getConstructors(self) class CGCallbackFunctionImpl(CGGeneric): def __init__(self, callback): impl = string.Template("""\ impl CallbackContainer for ${type} { unsafe fn new(cx: *mut JSContext, callback: *mut JSObject) -> Rc<${type}> { ${type}::new(cx, callback) } fn callback_holder(&self) -> &CallbackObject { self.parent.callback_holder() } } impl ToJSValConvertible for ${type} { unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) { self.callback().to_jsval(cx, rval); } }\ """).substitute({"type": callback.identifier.name}) CGGeneric.__init__(self, impl) class CGCallbackInterface(CGCallback): def __init__(self, descriptor): iface = descriptor.interface attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()] assert not attrs methods = [m for m in iface.members if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()] methods = [CallbackOperation(m, sig, descriptor) for m in methods for sig in m.signatures()] assert not iface.isJSImplemented() or not iface.ctor() CGCallback.__init__(self, iface, descriptor, "CallbackInterface", methods) class FakeMember(): def __init__(self): self.treatNullAs = "Default" def isStatic(self): return False def isAttr(self): return False def isMethod(self): return False def getExtendedAttribute(self, name): return None class CallbackMember(CGNativeMember): def __init__(self, sig, name, descriptorProvider, needThisHandling): """ needThisHandling is True if we need to be able to accept a specified thisObj, False otherwise. """ self.retvalType = sig[0] self.originalSig = sig args = sig[1] self.argCount = len(args) if self.argCount > 0: # Check for variadic arguments lastArg = args[self.argCount - 1] if lastArg.variadic: self.argCountStr = ( "(%d - 1) + %s.len()" % (self.argCount, lastArg.identifier.name)) else: self.argCountStr = "%d" % self.argCount self.needThisHandling = needThisHandling # If needThisHandling, we generate ourselves as private and the caller # will handle generating public versions that handle the "this" stuff. visibility = "priv" if needThisHandling else "pub" # We don't care, for callback codegen, whether our original member was # a method or attribute or whatnot. Just always pass FakeMember() # here. CGNativeMember.__init__(self, descriptorProvider, FakeMember(), name, (self.retvalType, args), extendedAttrs={}, passJSBitsAsNeeded=False, unsafe=needThisHandling, visibility=visibility) # We have to do all the generation of our body now, because # the caller relies on us throwing if we can't manage it. self.exceptionCode = "return Err(JSFailed);" self.body = self.getImpl() def getImpl(self): replacements = { "declRval": self.getRvalDecl(), "returnResult": self.getResultConversion(), "convertArgs": self.getArgConversions(), "doCall": self.getCall(), "setupCall": self.getCallSetup(), } if self.argCount > 0: replacements["argCount"] = self.argCountStr replacements["argvDecl"] = string.Template( "rooted_vec!(let mut argv);\n" "argv.extend((0..${argCount}).map(|_| Heap::default()));\n" ).substitute(replacements) else: # Avoid weird 0-sized arrays replacements["argvDecl"] = "" # Newlines and semicolons are in the values pre = string.Template( "${setupCall}" "${declRval}" "${argvDecl}").substitute(replacements) body = string.Template( "${convertArgs}" "${doCall}" "${returnResult}").substitute(replacements) return pre + "\n" + body def getResultConversion(self): replacements = { "val": "rval.handle()", } info = getJSToNativeConversionInfo( self.retvalType, self.descriptorProvider, exceptionCode=self.exceptionCode, isCallbackReturnValue="Callback", # XXXbz we should try to do better here sourceDescription="return value") template = info.template declType = info.declType convertType = instantiateJSToNativeConversionTemplate( template, replacements, declType, "rvalDecl") if self.retvalType is None or self.retvalType.isVoid(): retval = "()" elif self.retvalType.isAny(): retval = "rvalDecl.get()" else: retval = "rvalDecl" return "%s\nOk(%s)\n" % (convertType.define(), retval) def getArgConversions(self): # Just reget the arglist from self.originalSig, because our superclasses # just have way to many members they like to clobber, so I can't find a # safe member name to store it in. argConversions = [self.getArgConversion(i, arg) for (i, arg) in enumerate(self.originalSig[1])] # Do them back to front, so our argc modifications will work # correctly, because we examine trailing arguments first. argConversions.reverse() argConversions = [CGGeneric(c) for c in argConversions] if self.argCount > 0: argConversions.insert(0, self.getArgcDecl()) # And slap them together. return CGList(argConversions, "\n\n").define() + "\n\n" def getArgConversion(self, i, arg): argval = arg.identifier.name if arg.variadic: argval = argval + "[idx].get()" jsvalIndex = "%d + idx" % i else: jsvalIndex = "%d" % i if arg.optional and not arg.defaultValue: argval += ".clone().unwrap()" conversion = wrapForType( "argv_root.handle_mut()", result=argval, successCode=("{\n" + "let arg = &mut argv[%s];\n" + "*arg = Heap::default();\n" + "arg.set(argv_root.get());\n" + "}") % jsvalIndex, pre="rooted!(in(cx) let mut argv_root = UndefinedValue());") if arg.variadic: conversion = string.Template( "for idx in 0..${arg}.len() {\n" + CGIndenter(CGGeneric(conversion)).define() + "\n" "}" ).substitute({"arg": arg.identifier.name}) elif arg.optional and not arg.defaultValue: conversion = ( CGIfWrapper("%s.is_some()" % arg.identifier.name, CGGeneric(conversion)).define() + " else if argc == %d {\n" " // This is our current trailing argument; reduce argc\n" " argc -= 1;\n" "} else {\n" " argv[%d] = Heap::default();\n" "}" % (i + 1, i)) return conversion def getArgs(self, returnType, argList): args = CGNativeMember.getArgs(self, returnType, argList) if not self.needThisHandling: # Since we don't need this handling, we're the actual method that # will be called, so we need an aRethrowExceptions argument. args.append(Argument("ExceptionHandling", "aExceptionHandling", "ReportExceptions")) return args # We want to allow the caller to pass in a "this" object, as # well as a JSContext. return [Argument("*mut JSContext", "cx"), Argument("HandleObject", "aThisObj")] + args def getCallSetup(self): if self.needThisHandling: # It's been done for us already return "" return ( "CallSetup s(CallbackPreserveColor(), aRv, aExceptionHandling);\n" "JSContext* cx = s.get_context();\n" "if (!cx) {\n" " return Err(JSFailed);\n" "}\n") def getArgcDecl(self): if self.argCount <= 1: return CGGeneric("let argc = %s;" % self.argCountStr) return CGGeneric("let mut argc = %s;" % self.argCountStr) @staticmethod def ensureASCIIName(idlObject): type = "attribute" if idlObject.isAttr() else "operation" if re.match("[^\x20-\x7E]", idlObject.identifier.name): raise SyntaxError('Callback %s name "%s" contains non-ASCII ' "characters. We can't handle that. %s" % (type, idlObject.identifier.name, idlObject.location)) if re.match('"', idlObject.identifier.name): raise SyntaxError("Callback %s name '%s' contains " "double-quote character. We can't handle " "that. %s" % (type, idlObject.identifier.name, idlObject.location)) class CallbackMethod(CallbackMember): def __init__(self, sig, name, descriptorProvider, needThisHandling): CallbackMember.__init__(self, sig, name, descriptorProvider, needThisHandling) def getRvalDecl(self): return "rooted!(in(cx) let mut rval = UndefinedValue());\n" def getCall(self): replacements = { "thisObj": self.getThisObj(), "getCallable": self.getCallableDecl(), "callGuard": self.getCallGuard(), } if self.argCount > 0: replacements["argv"] = "argv.as_ptr() as *const JSVal" replacements["argc"] = "argc" else: replacements["argv"] = "ptr::null_mut()" replacements["argc"] = "0" return string.Template( "${getCallable}" "rooted!(in(cx) let rootedThis = ${thisObj});\n" "let ok = ${callGuard}JS_CallFunctionValue(\n" " cx, rootedThis.handle(), callable.handle(),\n" " &HandleValueArray {\n" " length_: ${argc} as ::libc::size_t,\n" " elements_: ${argv}\n" " }, rval.handle_mut());\n" "maybe_resume_unwind();\n" "if !ok {\n" " return Err(JSFailed);\n" "}\n").substitute(replacements) class CallCallback(CallbackMethod): def __init__(self, callback, descriptorProvider): self.callback = callback CallbackMethod.__init__(self, callback.signatures()[0], "Call", descriptorProvider, needThisHandling=True) def getThisObj(self): return "aThisObj.get()" def getCallableDecl(self): return "rooted!(in(cx) let callable = ObjectValue(self.callback()));\n" def getCallGuard(self): if self.callback._treatNonObjectAsNull: return "!IsCallable(self.callback()) || " return "" class CallbackOperationBase(CallbackMethod): """ Common class for implementing various callback operations. """ def __init__(self, signature, jsName, nativeName, descriptor, singleOperation): self.singleOperation = singleOperation self.methodName = jsName CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation) def getThisObj(self): if not self.singleOperation: return "self.callback()" # This relies on getCallableDecl declaring a boolean # isCallable in the case when we're a single-operation # interface. return "if isCallable { aThisObj.get() } else { self.callback() }" def getCallableDecl(self): replacements = { "methodName": self.methodName } getCallableFromProp = string.Template( 'r#try!(self.parent.get_callable_property(cx, "${methodName}"))' ).substitute(replacements) if not self.singleOperation: return 'rooted!(in(cx) let callable =\n' + getCallableFromProp + ');\n' return ( 'let isCallable = IsCallable(self.callback());\n' 'rooted!(in(cx) let callable =\n' + CGIndenter( CGIfElseWrapper('isCallable', CGGeneric('ObjectValue(self.callback())'), CGGeneric(getCallableFromProp))).define() + ');\n') def getCallGuard(self): return "" class CallbackOperation(CallbackOperationBase): """ Codegen actual WebIDL operations on callback interfaces. """ def __init__(self, method, signature, descriptor): self.ensureASCIIName(method) jsName = method.identifier.name CallbackOperationBase.__init__(self, signature, jsName, MakeNativeName(descriptor.binaryNameFor(jsName)), descriptor, descriptor.interface.isSingleOperationInterface()) class CGIterableMethodGenerator(CGGeneric): """ Creates methods for iterable interfaces. Unwrapping/wrapping will be taken care of by the usual method generation machinery in CGMethodCall/CGPerSignatureCall. Functionality is filled in here instead of using CGCallGenerator. """ def __init__(self, descriptor, iterable, methodName): if methodName == "forEach": CGGeneric.__init__(self, fill( """ if !IsCallable(arg0) { throw_type_error(cx, "Argument 1 of ${ifaceName}.forEach is not callable."); return false; } rooted!(in(cx) let arg0 = ObjectValue(arg0)); rooted!(in(cx) let mut call_arg1 = UndefinedValue()); rooted!(in(cx) let mut call_arg2 = UndefinedValue()); let mut call_args = vec![UndefinedValue(), UndefinedValue(), ObjectValue(*_obj)]; rooted!(in(cx) let mut ignoredReturnVal = UndefinedValue()); for i in 0..(*this).get_iterable_length() { (*this).get_value_at_index(i).to_jsval(cx, call_arg1.handle_mut()); (*this).get_key_at_index(i).to_jsval(cx, call_arg2.handle_mut()); call_args[0] = call_arg1.handle().get(); call_args[1] = call_arg2.handle().get(); let call_args = HandleValueArray { length_: 3, elements_: call_args.as_ptr() }; if !Call(cx, arg1, arg0.handle(), &call_args, ignoredReturnVal.handle_mut()) { return false; } } let result = (); """, ifaceName=descriptor.interface.identifier.name)) return CGGeneric.__init__(self, fill( """ let result = ${iterClass}::new(&*this, IteratorType::${itrMethod}, super::${ifaceName}IteratorBinding::Wrap); """, iterClass=iteratorNativeType(descriptor, True), ifaceName=descriptor.interface.identifier.name, itrMethod=methodName.title())) def camel_to_upper_snake(s): return "_".join(m.group(0).upper() for m in re.finditer("[A-Z][a-z]*", s)) def process_arg(expr, arg): if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback(): if arg.type.nullable() or arg.type.isSequence() or arg.optional: expr += ".r()" else: expr = "&" + expr elif isinstance(arg.type, IDLPromiseType): expr = "&" + expr return expr class GlobalGenRoots(): """ Roots for global codegen. To generate code, call the method associated with the target, and then call the appropriate define/declare method. """ @staticmethod def InterfaceObjectMap(config): mods = [ "crate::dom::bindings::codegen", "js::jsapi::JSContext", "js::rust::HandleObject", "phf", ] imports = CGList([CGGeneric("use %s;" % mod) for mod in mods], "\n") global_descriptors = config.getDescriptors(isGlobal=True) flags = [("EMPTY", 0)] flags.extend( (camel_to_upper_snake(d.name), 2 ** idx) for (idx, d) in enumerate(global_descriptors) ) global_flags = CGWrapper(CGIndenter(CGList([ CGGeneric("const %s = %#x;" % args) for args in flags ], "\n")), pre="pub struct Globals: u8 {\n", post="\n}") globals_ = CGWrapper(CGIndenter(global_flags), pre="bitflags! {\n", post="\n}") phf = CGGeneric("include!(concat!(env!(\"OUT_DIR\"), \"/InterfaceObjectMapPhf.rs\"));") return CGList([ CGGeneric(AUTOGENERATED_WARNING_COMMENT), CGList([imports, globals_, phf], "\n\n") ]) @staticmethod def InterfaceObjectMapData(config): pairs = [] for d in config.getDescriptors(hasInterfaceObject=True, isInline=False): binding = toBindingNamespace(d.name) pairs.append((d.name, binding, binding)) for ctor in d.interface.namedConstructors: pairs.append((ctor.identifier.name, binding, binding)) pairs.sort(key=operator.itemgetter(0)) mappings = [ CGGeneric('"%s": "codegen::Bindings::%s::%s::DefineDOMInterface"' % pair) for pair in pairs ] return CGWrapper( CGList(mappings, ",\n"), pre="{\n", post="\n}\n") @staticmethod def PrototypeList(config): # Prototype ID enum. interfaces = config.getDescriptors(isCallback=False, isNamespace=False) protos = [d.name for d in interfaces] constructors = sorted([MakeNativeName(d.name) for d in config.getDescriptors(hasInterfaceObject=True) if d.shouldHaveGetConstructorObjectMethod()]) proxies = [d.name for d in config.getDescriptors(proxy=True)] return CGList([ CGGeneric(AUTOGENERATED_WARNING_COMMENT), CGGeneric("pub const PROTO_OR_IFACE_LENGTH: usize = %d;\n" % (len(protos) + len(constructors))), CGGeneric("pub const MAX_PROTO_CHAIN_LENGTH: usize = %d;\n\n" % config.maxProtoChainLength), CGNonNamespacedEnum('ID', protos, 0, deriving="PartialEq, Copy, Clone", repr="u16"), CGNonNamespacedEnum('Constructor', constructors, len(protos), deriving="PartialEq, Copy, Clone", repr="u16"), CGWrapper(CGIndenter(CGList([CGGeneric('"' + name + '"') for name in protos], ",\n"), indentLevel=4), pre="static INTERFACES: [&'static str; %d] = [\n" % len(protos), post="\n];\n\n"), CGGeneric("pub fn proto_id_to_name(proto_id: u16) -> &'static str {\n" " debug_assert!(proto_id < ID::Last as u16);\n" " INTERFACES[proto_id as usize]\n" "}\n\n"), CGNonNamespacedEnum('Proxies', proxies, 0, deriving="PartialEq, Copy, Clone"), ]) @staticmethod def RegisterBindings(config): # TODO - Generate the methods we want code = CGList([ CGRegisterProxyHandlers(config), ], "\n") return CGImports(code, descriptors=[], callbacks=[], dictionaries=[], enums=[], typedefs=[], imports=[ 'crate::dom::bindings::codegen::Bindings', 'crate::dom::bindings::codegen::PrototypeList::Proxies', 'libc', ], config=config, ignored_warnings=[]) @staticmethod def InterfaceTypes(config): descriptors = sorted([MakeNativeName(d.name) for d in config.getDescriptors(register=True, isCallback=False, isIteratorInterface=False)]) curr = CGList([CGGeneric("pub use crate::dom::%s::%s;\n" % (name.lower(), MakeNativeName(name))) for name in descriptors]) curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) return curr @staticmethod def Bindings(config): def leafModule(d): return getModuleFromObject(d).split('::')[-1] descriptors = config.getDescriptors(register=True, isIteratorInterface=False) descriptors = (set(toBindingNamespace(d.name) for d in descriptors) | set(leafModule(d) for d in config.callbacks) | set(leafModule(d) for d in config.getDictionaries())) curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)]) curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) return curr @staticmethod def InheritTypes(config): descriptors = config.getDescriptors(register=True, isCallback=False) imports = [CGGeneric("use crate::dom::types::*;\n"), CGGeneric("use crate::dom::bindings::conversions::{DerivedFrom, get_dom_class};\n"), CGGeneric("use crate::dom::bindings::inheritance::Castable;\n"), CGGeneric("use crate::dom::bindings::root::{Dom, DomRoot, LayoutDom};\n"), CGGeneric("use crate::dom::bindings::trace::JSTraceable;\n"), CGGeneric("use crate::dom::bindings::reflector::DomObject;\n"), CGGeneric("use js::jsapi::JSTracer;\n\n"), CGGeneric("use std::mem;\n\n")] allprotos = [] topTypes = [] hierarchy = defaultdict(list) for descriptor in descriptors: name = descriptor.name chain = descriptor.prototypeChain upcast = descriptor.hasDescendants() downcast = len(chain) != 1 if upcast and not downcast: topTypes.append(name) if not upcast: # No other interface will implement DeriveFrom<Foo> for this Foo, so avoid # implementing it for itself. chain = chain[:-1] # Implement `DerivedFrom<Bar>` for `Foo`, for all `Bar` that `Foo` inherits from. if chain: allprotos.append(CGGeneric("impl Castable for %s {}\n" % name)) for baseName in chain: allprotos.append(CGGeneric("impl DerivedFrom<%s> for %s {}\n" % (baseName, name))) if chain: allprotos.append(CGGeneric("\n")) if downcast: hierarchy[descriptor.interface.parent.identifier.name].append(name) typeIdCode = [] topTypeVariants = [ ("ID used by abstract interfaces.", "pub abstract_: ()"), ("ID used by interfaces that are not castable.", "pub alone: ()"), ] topTypeVariants += [ ("ID used by interfaces that derive from %s." % typeName, "pub %s: %sTypeId" % (typeName.lower(), typeName)) for typeName in topTypes ] topTypeVariantsAsStrings = [CGGeneric("/// %s\n%s," % variant) for variant in topTypeVariants] typeIdCode.append(CGWrapper(CGIndenter(CGList(topTypeVariantsAsStrings, "\n"), 4), pre="#[derive(Copy)]\npub union TopTypeId {\n", post="\n}\n\n")) typeIdCode.append(CGGeneric("""\ impl Clone for TopTypeId { fn clone(&self) -> Self { *self } } """)) def type_id_variant(name): # If `name` is present in the hierarchy keys', that means some other interfaces # derive from it and this enum variant should have an argument with its own # TypeId enum. return "%s(%sTypeId)" % (name, name) if name in hierarchy else name for base, derived in hierarchy.iteritems(): variants = [] if config.getDescriptor(base).concrete: variants.append(CGGeneric(base)) variants += [CGGeneric(type_id_variant(derivedName)) for derivedName in derived] derives = "Clone, Copy, Debug, PartialEq" typeIdCode.append(CGWrapper(CGIndenter(CGList(variants, ",\n"), 4), pre="#[derive(%s)]\npub enum %sTypeId {\n" % (derives, base), post="\n}\n\n")) if base in topTypes: typeIdCode.append(CGGeneric("""\ impl %(base)s { pub fn type_id(&self) -> &'static %(base)sTypeId { unsafe { &get_dom_class(self.reflector().get_jsobject().get()) .unwrap() .type_id .%(field)s } } } """ % {'base': base, 'field': base.lower()})) curr = CGList(imports + typeIdCode + allprotos) curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) return curr @staticmethod def UnionTypes(config): curr = UnionTypes(config.getDescriptors(), config.getDictionaries(), config.getCallbacks(), config.typedefs, config) # Add the auto-generated comment. curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) # Done. return curr @staticmethod def SupportedDomApis(config): descriptors = config.getDescriptors(isExposedConditionally=False) base_path = os.path.join('dom', 'bindings', 'codegen') with open(os.path.join(base_path, 'apis.html.template')) as f: base_template = f.read() with open(os.path.join(base_path, 'api.html.template')) as f: api_template = f.read() with open(os.path.join(base_path, 'property.html.template')) as f: property_template = f.read() with open(os.path.join(base_path, 'interface.html.template')) as f: interface_template = f.read() apis = [] interfaces = [] for descriptor in descriptors: props = [] for m in descriptor.interface.members: if PropertyDefiner.getStringAttr(m, 'Pref') or \ PropertyDefiner.getStringAttr(m, 'Func') or \ (m.isMethod() and m.isIdentifierLess()): continue display = m.identifier.name + ('()' if m.isMethod() else '') props += [property_template.replace('${name}', display)] name = descriptor.interface.identifier.name apis += [(api_template.replace('${interface}', name) .replace('${properties}', '\n'.join(props)))] interfaces += [interface_template.replace('${interface}', name)] return CGGeneric((base_template.replace('${apis}', '\n'.join(apis)) .replace('${interfaces}', '\n'.join(interfaces))))
mpl-2.0
kg-bot/SupyBot
plugins/I18nPlaceholder/plugin.py
3
2369
### # Copyright (c) 2012, Valentin Lorentz # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions, and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions, and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the author of this software nor the name of # contributors to this software may be used to endorse or promote products # derived from this software without specific prior written consent. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### import sys import supybot.utils as utils from supybot.commands import * import supybot.plugins as plugins import supybot.ircutils as ircutils import supybot.callbacks as callbacks class Placeholder: internationalizeDocstring = lambda self,y:y def PluginInternationalization(self, plugin_name): return lambda x:x class I18nPlaceholder(callbacks.Plugin): """Add the help for "@plugin help I18nPlaceholder" here This should describe *how* to use this plugin.""" def __init__(self, *args, **kwargs): super(I18nPlaceholder, self).__init__(*args, **kwargs) if 'supybot.i18n' not in sys.modules: sys.modules['supybot.i18n'] = Placeholder() Class = I18nPlaceholder # vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
gpl-3.0
smartjump/backend
src/get_bus_data.py
1
1725
#!/usr/bin/env python3 #****************************************************************************# # Project: Smart Jump # # Author: Alfonso Bilbao Velez <[email protected]> # # Developed: Hackatinho Mobilidade Sostible # # License: AGPL # #****************************************************************************# # MODULO BUS CORUNHA # # Librerias # import os import sys import requests import json # Ficheros # f1 = open("dataset/ubicacion_paradas_bus.json", "w") f2 = open("dataset/info_por_parada.json", "w") # Header # headers = {'x-sofia2-apikey': "14b0fdf8d58541a087c49a97f61903eb", 'content-type': "application/json;charset=utf-8", } # Estaciones de Bus Corunha con sus ubicaciones # """Fetch backstations.""" def ubicacion_paradas(): ubicacion_paradas = "https://smart.coruna.es/sib-api/api/v1/openDataParadaAutobus/getAssets" u1 = requests.get(ubicacion_paradas, headers=headers) u1_json = u1.json() print(json.dumps(json.loads(u1_json['data'])), file=f1) # json.dump(f1, json.loads(u1_json['data'])) # Toda la info de una parada de autobus urbano. def info_parada(): for i in range(1,50): info_parada =\ "https://smart.coruna.es/sib-api/api/v1/openDataParadaAutobus/getAssetInfo?$id=%d" %i p1 = requests.get(info_parada, headers=headers) p1_json = p1.json() print(json.dumps(json.loads(p1_json['data'])), file=f2) def main(): # Ubicacion paradas de bus. ubicacion_paradas() info_parada() if __name__=='__main__': main()
agpl-3.0
Huskerboy/startbootstrap-freelancer
freelancer_env/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py
327
26955
# -*- coding: utf-8 -*- # # Copyright (C) 2012 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """Access to Python's configuration information.""" import codecs import os import re import sys from os.path import pardir, realpath try: import configparser except ImportError: import ConfigParser as configparser __all__ = [ 'get_config_h_filename', 'get_config_var', 'get_config_vars', 'get_makefile_filename', 'get_path', 'get_path_names', 'get_paths', 'get_platform', 'get_python_version', 'get_scheme_names', 'parse_config_h', ] def _safe_realpath(path): try: return realpath(path) except OSError: return path if sys.executable: _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) else: # sys.executable can be empty if argv[0] has been changed and Python is # unable to retrieve the real program name _PROJECT_BASE = _safe_realpath(os.getcwd()) if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) # PC/VS7.1 if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) # PC/AMD64 if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) def is_python_build(): for fn in ("Setup.dist", "Setup.local"): if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): return True return False _PYTHON_BUILD = is_python_build() _cfg_read = False def _ensure_cfg_read(): global _cfg_read if not _cfg_read: from ..resources import finder backport_package = __name__.rsplit('.', 1)[0] _finder = finder(backport_package) _cfgfile = _finder.find('sysconfig.cfg') assert _cfgfile, 'sysconfig.cfg exists' with _cfgfile.as_stream() as s: _SCHEMES.readfp(s) if _PYTHON_BUILD: for scheme in ('posix_prefix', 'posix_home'): _SCHEMES.set(scheme, 'include', '{srcdir}/Include') _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') _cfg_read = True _SCHEMES = configparser.RawConfigParser() _VAR_REPL = re.compile(r'\{([^{]*?)\}') def _expand_globals(config): _ensure_cfg_read() if config.has_section('globals'): globals = config.items('globals') else: globals = tuple() sections = config.sections() for section in sections: if section == 'globals': continue for option, value in globals: if config.has_option(section, option): continue config.set(section, option, value) config.remove_section('globals') # now expanding local variables defined in the cfg file # for section in config.sections(): variables = dict(config.items(section)) def _replacer(matchobj): name = matchobj.group(1) if name in variables: return variables[name] return matchobj.group(0) for option, value in config.items(section): config.set(section, option, _VAR_REPL.sub(_replacer, value)) #_expand_globals(_SCHEMES) # FIXME don't rely on sys.version here, its format is an implementation detail # of CPython, use sys.version_info or sys.hexversion _PY_VERSION = sys.version.split()[0] _PY_VERSION_SHORT = sys.version[:3] _PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] _PREFIX = os.path.normpath(sys.prefix) _EXEC_PREFIX = os.path.normpath(sys.exec_prefix) _CONFIG_VARS = None _USER_BASE = None def _subst_vars(path, local_vars): """In the string `path`, replace tokens like {some.thing} with the corresponding value from the map `local_vars`. If there is no corresponding value, leave the token unchanged. """ def _replacer(matchobj): name = matchobj.group(1) if name in local_vars: return local_vars[name] elif name in os.environ: return os.environ[name] return matchobj.group(0) return _VAR_REPL.sub(_replacer, path) def _extend_dict(target_dict, other_dict): target_keys = target_dict.keys() for key, value in other_dict.items(): if key in target_keys: continue target_dict[key] = value def _expand_vars(scheme, vars): res = {} if vars is None: vars = {} _extend_dict(vars, get_config_vars()) for key, value in _SCHEMES.items(scheme): if os.name in ('posix', 'nt'): value = os.path.expanduser(value) res[key] = os.path.normpath(_subst_vars(value, vars)) return res def format_value(value, vars): def _replacer(matchobj): name = matchobj.group(1) if name in vars: return vars[name] return matchobj.group(0) return _VAR_REPL.sub(_replacer, value) def _get_default_scheme(): if os.name == 'posix': # the default scheme for posix is posix_prefix return 'posix_prefix' return os.name def _getuserbase(): env_base = os.environ.get("PYTHONUSERBASE", None) def joinuser(*args): return os.path.expanduser(os.path.join(*args)) # what about 'os2emx', 'riscos' ? if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: return env_base else: return joinuser(base, "Python") if sys.platform == "darwin": framework = get_config_var("PYTHONFRAMEWORK") if framework: if env_base: return env_base else: return joinuser("~", "Library", framework, "%d.%d" % sys.version_info[:2]) if env_base: return env_base else: return joinuser("~", ".local") def _parse_makefile(filename, vars=None): """Parse a Makefile-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ # Regexes needed for parsing Makefile (and similar syntaxes, # like old-style Setup files). _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") if vars is None: vars = {} done = {} notdone = {} with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: lines = f.readlines() for line in lines: if line.startswith('#') or line.strip() == '': continue m = _variable_rx.match(line) if m: n, v = m.group(1, 2) v = v.strip() # `$$' is a literal `$' in make tmpv = v.replace('$$', '') if "$" in tmpv: notdone[n] = v else: try: v = int(v) except ValueError: # insert literal `$' done[n] = v.replace('$$', '$') else: done[n] = v # do variable interpolation here variables = list(notdone.keys()) # Variables with a 'PY_' prefix in the makefile. These need to # be made available without that prefix through sysconfig. # Special care is needed to ensure that variable expansion works, even # if the expansion uses the name without a prefix. renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') while len(variables) > 0: for name in tuple(variables): value = notdone[name] m = _findvar1_rx.search(value) or _findvar2_rx.search(value) if m is not None: n = m.group(1) found = True if n in done: item = str(done[n]) elif n in notdone: # get it on a subsequent round found = False elif n in os.environ: # do it like make: fall back to environment item = os.environ[n] elif n in renamed_variables: if (name.startswith('PY_') and name[3:] in renamed_variables): item = "" elif 'PY_' + n in notdone: found = False else: item = str(done['PY_' + n]) else: done[n] = item = "" if found: after = value[m.end():] value = value[:m.start()] + item + after if "$" in after: notdone[name] = value else: try: value = int(value) except ValueError: done[name] = value.strip() else: done[name] = value variables.remove(name) if (name.startswith('PY_') and name[3:] in renamed_variables): name = name[3:] if name not in done: done[name] = value else: # bogus variable reference (e.g. "prefix=$/opt/python"); # just drop it since we can't deal done[name] = value variables.remove(name) # strip spurious spaces for k, v in done.items(): if isinstance(v, str): done[k] = v.strip() # save the results in the global dictionary vars.update(done) return vars def get_makefile_filename(): """Return the path of the Makefile.""" if _PYTHON_BUILD: return os.path.join(_PROJECT_BASE, "Makefile") if hasattr(sys, 'abiflags'): config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) else: config_dir_name = 'config' return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') def _init_posix(vars): """Initialize the module as appropriate for POSIX systems.""" # load the installed Makefile: makefile = get_makefile_filename() try: _parse_makefile(makefile, vars) except IOError as e: msg = "invalid Python installation: unable to open %s" % makefile if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror raise IOError(msg) # load the installed pyconfig.h: config_h = get_config_h_filename() try: with open(config_h) as f: parse_config_h(f, vars) except IOError as e: msg = "invalid Python installation: unable to open %s" % config_h if hasattr(e, "strerror"): msg = msg + " (%s)" % e.strerror raise IOError(msg) # On AIX, there are wrong paths to the linker scripts in the Makefile # -- these paths are relative to the Python source, but when installed # the scripts are in another directory. if _PYTHON_BUILD: vars['LDSHARED'] = vars['BLDSHARED'] def _init_non_posix(vars): """Initialize the module as appropriate for NT""" # set basic install directories vars['LIBDEST'] = get_path('stdlib') vars['BINLIBDEST'] = get_path('platstdlib') vars['INCLUDEPY'] = get_path('include') vars['SO'] = '.pyd' vars['EXE'] = '.exe' vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) # # public APIs # def parse_config_h(fp, vars=None): """Parse a config.h-style file. A dictionary containing name/value pairs is returned. If an optional dictionary is passed in as the second argument, it is used instead of a new dictionary. """ if vars is None: vars = {} define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") while True: line = fp.readline() if not line: break m = define_rx.match(line) if m: n, v = m.group(1, 2) try: v = int(v) except ValueError: pass vars[n] = v else: m = undef_rx.match(line) if m: vars[m.group(1)] = 0 return vars def get_config_h_filename(): """Return the path of pyconfig.h.""" if _PYTHON_BUILD: if os.name == "nt": inc_dir = os.path.join(_PROJECT_BASE, "PC") else: inc_dir = _PROJECT_BASE else: inc_dir = get_path('platinclude') return os.path.join(inc_dir, 'pyconfig.h') def get_scheme_names(): """Return a tuple containing the schemes names.""" return tuple(sorted(_SCHEMES.sections())) def get_path_names(): """Return a tuple containing the paths names.""" # xxx see if we want a static list return _SCHEMES.options('posix_prefix') def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): """Return a mapping containing an install scheme. ``scheme`` is the install scheme name. If not provided, it will return the default scheme for the current platform. """ _ensure_cfg_read() if expand: return _expand_vars(scheme, vars) else: return dict(_SCHEMES.items(scheme)) def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): """Return a path corresponding to the scheme. ``scheme`` is the install scheme name. """ return get_paths(scheme, vars, expand)[name] def get_config_vars(*args): """With no arguments, return a dictionary of all configuration variables relevant for the current platform. On Unix, this means every variable defined in Python's installed Makefile; On Windows and Mac OS it's a much smaller set. With arguments, return a list of values that result from looking up each argument in the configuration variable dictionary. """ global _CONFIG_VARS if _CONFIG_VARS is None: _CONFIG_VARS = {} # Normalized versions of prefix and exec_prefix are handy to have; # in fact, these are the standard versions used most places in the # distutils2 module. _CONFIG_VARS['prefix'] = _PREFIX _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX _CONFIG_VARS['py_version'] = _PY_VERSION _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] _CONFIG_VARS['base'] = _PREFIX _CONFIG_VARS['platbase'] = _EXEC_PREFIX _CONFIG_VARS['projectbase'] = _PROJECT_BASE try: _CONFIG_VARS['abiflags'] = sys.abiflags except AttributeError: # sys.abiflags may not be defined on all platforms. _CONFIG_VARS['abiflags'] = '' if os.name in ('nt', 'os2'): _init_non_posix(_CONFIG_VARS) if os.name == 'posix': _init_posix(_CONFIG_VARS) # Setting 'userbase' is done below the call to the # init function to enable using 'get_config_var' in # the init-function. if sys.version >= '2.6': _CONFIG_VARS['userbase'] = _getuserbase() if 'srcdir' not in _CONFIG_VARS: _CONFIG_VARS['srcdir'] = _PROJECT_BASE else: _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) # Convert srcdir into an absolute path if it appears necessary. # Normally it is relative to the build directory. However, during # testing, for example, we might be running a non-installed python # from a different directory. if _PYTHON_BUILD and os.name == "posix": base = _PROJECT_BASE try: cwd = os.getcwd() except OSError: cwd = None if (not os.path.isabs(_CONFIG_VARS['srcdir']) and base != cwd): # srcdir is relative and we are not in the same directory # as the executable. Assume executable is in the build # directory and make srcdir absolute. srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) if sys.platform == 'darwin': kernel_version = os.uname()[2] # Kernel version (8.4.3) major_version = int(kernel_version.split('.')[0]) if major_version < 8: # On macOS before 10.4, check if -arch and -isysroot # are in CFLAGS or LDFLAGS and remove them if they are. # This is needed when building extensions on a 10.3 system # using a universal build of python. for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub('-arch\s+\w+\s', ' ', flags) flags = re.sub('-isysroot [^ \t]*', ' ', flags) _CONFIG_VARS[key] = flags else: # Allow the user to override the architecture flags using # an environment variable. # NOTE: This name was introduced by Apple in OSX 10.5 and # is used by several scripting languages distributed with # that OS release. if 'ARCHFLAGS' in os.environ: arch = os.environ['ARCHFLAGS'] for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub('-arch\s+\w+\s', ' ', flags) flags = flags + ' ' + arch _CONFIG_VARS[key] = flags # If we're on OSX 10.5 or later and the user tries to # compiles an extension using an SDK that is not present # on the current machine it is better to not use an SDK # than to fail. # # The major usecase for this is users using a Python.org # binary installer on OSX 10.6: that installer uses # the 10.4u SDK, but that SDK is not installed by default # when you install Xcode. # CFLAGS = _CONFIG_VARS.get('CFLAGS', '') m = re.search('-isysroot\s+(\S+)', CFLAGS) if m is not None: sdk = m.group(1) if not os.path.exists(sdk): for key in ('LDFLAGS', 'BASECFLAGS', # a number of derived variables. These need to be # patched up as well. 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): flags = _CONFIG_VARS[key] flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags) _CONFIG_VARS[key] = flags if args: vals = [] for name in args: vals.append(_CONFIG_VARS.get(name)) return vals else: return _CONFIG_VARS def get_config_var(name): """Return the value of a single variable using the dictionary returned by 'get_config_vars()'. Equivalent to get_config_vars().get(name) """ return get_config_vars().get(name) def get_platform(): """Return a string that identifies the current platform. This is used mainly to distinguish platform-specific build directories and platform-specific built distributions. Typically includes the OS name and version and the architecture (as supplied by 'os.uname()'), although the exact information included depends on the OS; eg. for IRIX the architecture isn't particularly important (IRIX only runs on SGI hardware), but for Linux the kernel version isn't particularly important. Examples of returned values: linux-i586 linux-alpha (?) solaris-2.6-sun4u irix-5.3 irix64-6.2 Windows will return one of: win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) win-ia64 (64bit Windows on Itanium) win32 (all others - specifically, sys.platform is returned) For other non-POSIX platforms, currently just returns 'sys.platform'. """ if os.name == 'nt': # sniff sys.version for architecture. prefix = " bit (" i = sys.version.find(prefix) if i == -1: return sys.platform j = sys.version.find(")", i) look = sys.version[i+len(prefix):j].lower() if look == 'amd64': return 'win-amd64' if look == 'itanium': return 'win-ia64' return sys.platform if os.name != "posix" or not hasattr(os, 'uname'): # XXX what about the architecture? NT is Intel or Alpha, # Mac OS is M68k or PPC, etc. return sys.platform # Try to distinguish various flavours of Unix osname, host, release, version, machine = os.uname() # Convert the OS name to lowercase, remove '/' characters # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") osname = osname.lower().replace('/', '') machine = machine.replace(' ', '_') machine = machine.replace('/', '-') if osname[:5] == "linux": # At least on Linux/Intel, 'machine' is the processor -- # i386, etc. # XXX what about Alpha, SPARC, etc? return "%s-%s" % (osname, machine) elif osname[:5] == "sunos": if release[0] >= "5": # SunOS 5 == Solaris 2 osname = "solaris" release = "%d.%s" % (int(release[0]) - 3, release[2:]) # fall through to standard osname-release-machine representation elif osname[:4] == "irix": # could be "irix64"! return "%s-%s" % (osname, release) elif osname[:3] == "aix": return "%s-%s.%s" % (osname, version, release) elif osname[:6] == "cygwin": osname = "cygwin" rel_re = re.compile(r'[\d.]+') m = rel_re.match(release) if m: release = m.group() elif osname[:6] == "darwin": # # For our purposes, we'll assume that the system version from # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set # to. This makes the compatibility story a bit more sane because the # machine is going to compile and link as if it were # MACOSX_DEPLOYMENT_TARGET. cfgvars = get_config_vars() macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') if True: # Always calculate the release of the running machine, # needed to determine if we can build fat binaries or not. macrelease = macver # Get the system version. Reading this plist is a documented # way to get the system version (see the documentation for # the Gestalt Manager) try: f = open('/System/Library/CoreServices/SystemVersion.plist') except IOError: # We're on a plain darwin box, fall back to the default # behaviour. pass else: try: m = re.search(r'<key>ProductUserVisibleVersion</key>\s*' r'<string>(.*?)</string>', f.read()) finally: f.close() if m is not None: macrelease = '.'.join(m.group(1).split('.')[:2]) # else: fall back to the default behaviour if not macver: macver = macrelease if macver: release = macver osname = "macosx" if ((macrelease + '.') >= '10.4.' and '-arch' in get_config_vars().get('CFLAGS', '').strip()): # The universal build will build fat binaries, but not on # systems before 10.4 # # Try to detect 4-way universal builds, those have machine-type # 'universal' instead of 'fat'. machine = 'fat' cflags = get_config_vars().get('CFLAGS') archs = re.findall('-arch\s+(\S+)', cflags) archs = tuple(sorted(set(archs))) if len(archs) == 1: machine = archs[0] elif archs == ('i386', 'ppc'): machine = 'fat' elif archs == ('i386', 'x86_64'): machine = 'intel' elif archs == ('i386', 'ppc', 'x86_64'): machine = 'fat3' elif archs == ('ppc64', 'x86_64'): machine = 'fat64' elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): machine = 'universal' else: raise ValueError( "Don't know machine value for archs=%r" % (archs,)) elif machine == 'i386': # On OSX the machine type returned by uname is always the # 32-bit variant, even if the executable architecture is # the 64-bit variant if sys.maxsize >= 2**32: machine = 'x86_64' elif machine in ('PowerPC', 'Power_Macintosh'): # Pick a sane name for the PPC architecture. # See 'i386' case if sys.maxsize >= 2**32: machine = 'ppc64' else: machine = 'ppc' return "%s-%s-%s" % (osname, release, machine) def get_python_version(): return _PY_VERSION_SHORT def _print_dict(title, data): for index, (key, value) in enumerate(sorted(data.items())): if index == 0: print('%s: ' % (title)) print('\t%s = "%s"' % (key, value)) def _main(): """Display all information sysconfig detains.""" print('Platform: "%s"' % get_platform()) print('Python version: "%s"' % get_python_version()) print('Current installation scheme: "%s"' % _get_default_scheme()) print() _print_dict('Paths', get_paths()) print() _print_dict('Variables', get_config_vars()) if __name__ == '__main__': _main()
mit
jonathanslenders/python-deployer
deployer/run/socket_client.py
2
10212
""" Start a deployment shell client. """ from StringIO import StringIO from twisted.internet import fdesc from deployer.utils import esc1 from setproctitle import setproctitle import array import errno import fcntl import getpass import glob import os import pickle import select import signal import socket import subprocess import sys import termcolor import termios import time import tty __all__ = ('start',) def get_size(): # Buffer for the C call buf = array.array('h', [0, 0, 0, 0 ]) # Do TIOCGWINSZ (Get) fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ, buf, True) # Return rows, cols return buf[0], buf[1] def make_stdin_unbuffered(): # Make stdin/stdout unbufferd sys.stdin = os.fdopen(sys.stdin.fileno(), 'r', 0) sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) class DeploymentClient(object): def __init__(self, socket_path): self.socket_path = socket_path self._buffer = [] self.wait_for_closing = False self.exit_status = 0 # Currently running command self.update_process_title() # Connect to unix socket self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self._connect_socket() # Send size to server at startup and when SIGWINCH has been caught. def sigwinch_handler(n, frame): self._send_size() signal.signal(signal.SIGWINCH, sigwinch_handler) def _connect_socket(self): # Can throw socket.error self.socket.connect(self.socket_path) # Wait for server to become ready time.sleep(0.1) def _send_size(self): self.socket.sendall(pickle.dumps(('_resize', get_size()))) def update_process_title(self): """ Set process name """ setproctitle('deploy connect --socket "%s"' % self.socket_path) @property def new_window_command(self): """ When a new window is opened, run this command. """ return "python -c 'from deployer.run.socket_client import start; import sys; start(sys.argv[1])' '%s' " % esc1(self.socket_path) def _open_new_window(self, focus=False): """ Open another client in a new window. """ try: tmux_env = os.environ.get('TMUX', '') xterm_env = os.environ.get('XTERM', '') display_env = os.environ.get('DISPLAY', '') colorterm_env = os.environ.get('COLORTERM', '') if tmux_env: # Construct tmux split command swap = (' && (tmux last-pane || true)' if not focus else '') tiled = ' && (tmux select-layout tiled || true)' # We run the new client in the current PATH variable, this # makes sure that if a virtualenv was activated in a tmux # pane, that we use the same virtualenv for this command. path_env = os.environ.get('PATH', '') subprocess.call(r'TMUX=%s tmux split-window "PATH=\"%s\" %s" %s %s' % (tmux_env, path_env, self.new_window_command, swap, tiled), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # When in a gnome-terminal: elif display_env and colorterm_env == 'gnome-terminal': subprocess.call('gnome-terminal -e "%s" &' % self.new_window_command, shell=True) # Fallback to xterm elif display_env and xterm_env: subprocess.call('xterm -e %s &' % self.new_window_command, shell=True) else: # Failed, print err. sys.stdout.write( 'ERROR: Doesn\'t know how to open new terminal. ' 'TMUX and XTERM environment variables are empty.\r\n') sys.stdout.flush() except Exception as e: # TODO: Somehow, the subprocess.call raised an IOError Invalid argument, # we don't know why, but need to debug when it happens again. import pdb; pdb.set_trace() def _receive(self, data): """ Process incoming data """ try: io = StringIO(''.join(self._buffer + [data])) action, data = pickle.load(io) # Unmarshalling succeeded, call callback if action == '_print': while True: try: sys.stdout.write(data) break except IOError as e: # Sometimes, when we have a lot of output, we get here: # IOError: [Errno 11] Resource temporarily unavailable # Just waiting a little, and retrying seems to work. # See also: deployer.host.__init__ for a similar issue. time.sleep(0.2) sys.stdout.flush() elif action == 'open-new-window': focus = data['focus'] self._open_new_window(focus) elif action == '_info': print termcolor.colored(self.socket_path, 'cyan') print ' Created: %s' % data['created'] print ' Root node name: %s' % data['root_node_name'] print ' Root node module: %s' % data['root_node_module'] print ' Processes: (%i)' % len(data['processes']) for i, process in enumerate(data['processes']): print ' %i' % i print ' - Node name %s' % process['node_name'] print ' - Node module %s' % process['node_module'] print ' - Running %s' % process['running'] elif action == 'finish': self.exit_status = data['exit_status'] if data['close_on_keypress']: sys.stdout.write('\r\n\r\n[DONE] Press ENTER to close window.\r\n') sys.stdout.flush() self.wait_for_closing = True # Keep the remainder for the next time remainder = io.read() self._buffer = [ remainder ] if len(remainder): self._receive('') except (EOFError, ValueError) as e: # Not enough data, wait for the next part to arrive if data: self._buffer.append(data) def ask_info(self): self.socket.sendall(pickle.dumps(('_get_info', ''))) self._read_loop() def run(self, cd_path=None, action_name=None, parameters=None, open_scp_shell=False): """ Run main event loop. """ if action_name and open_scp_shell: raise Exception("Don't provide 'action_name' and 'open_scp_shell' at the same time") # Set stdin non blocking and raw tcattr = termios.tcgetattr(sys.stdin.fileno()) tty.setraw(sys.stdin.fileno()) # Report size self._send_size() self.socket.sendall(pickle.dumps(('_start-interaction', { 'cd_path': cd_path, 'action_name': action_name, 'parameters': parameters, 'open_scp_shell': open_scp_shell, 'term_var': os.environ.get('TERM', 'xterm'), # xterm, vt100, xterm-256color }))) self._read_loop() # Reset terminal state termios.tcsetattr(sys.stdin, termios.TCSAFLUSH, tcattr) # Put the cursor again at the left margin. sys.stdout.write('\r\n') # Set exit status sys.exit(self.exit_status) def _read_loop(self): while True: try: # I/O routing r, w, e = select.select([ self.socket, sys.stdin ], [], []) if self.socket in r: data = self.socket.recv(1024) if data: self._receive(data) else: # Nothing received? End of stream. break if sys.stdin in r: # Non blocking read. (Write works better in blocking mode. # Especially on OS X.) fdesc.setNonBlocking(sys.stdin) data = sys.stdin.read(1024) # We read larger chuncks (more than just one byte) in # one go. This is important for meta and arrow keys # which consist of multiple bytes. Many applications # rely on this that they'll receive them together. fdesc.setBlocking(sys.stdin) # If we're finish and 'wait_for_closing' was set. Any key # press will terminate the client. if self.wait_for_closing: break if chr(14) in data: # Ctrl-N # Tell the server to open a new window. self.socket.sendall(pickle.dumps(('open-new-window', ''))) else: self.socket.sendall(pickle.dumps(('_input', data))) except socket.error: print '\nConnection closed...' break except Exception as e: # SIGWINCH will abort select() call. Just ignore this error if e.args and e.args[0] == errno.EINTR: continue else: raise def list_sessions(): """ List all the servers that are running. """ for path in glob.glob('/tmp/deployer.sock.%s.*' % getpass.getuser()): try: DeploymentClient(path).ask_info() except socket.error as e: pass def start(socket_name, cd_path=None, action_name=None, parameters=None, open_scp_shell=False): """ Start a socket client. """ make_stdin_unbuffered() DeploymentClient(socket_name).run(cd_path=cd_path, action_name=action_name, parameters=parameters, open_scp_shell=open_scp_shell)
bsd-2-clause
thnkloud9/Tandem
server/scripts/clean_orphaned_objects.py
1
1406
from pymongo import MongoClient from bson.objectid import ObjectId client = MongoClient('localhost', 27017) db = client['tandem'] audios = db['audio'] users = db['users'] files = db['fs.files'] chunks = db['fs.chunks'] practice_sets = db['practice_sets'] practice_sessions = db['practice_sessions'] # remove practice_sets without associated user for practice_set in practice_sets.find(): if not users.find_one({'_id': ObjectId(practice_set['submitted_by'])}): print('{0} - is an orphane practice_set'.format(practice_set['_id'])) practice_sets.remove(practice_set['_id']) # remove practice_sessions without associated user for practice_session in practice_sessions.find(): if not users.find_one({'_id': ObjectId(practice_set['submitted_by'])}): print('{0} - is an orphane practice_session'.format(practice_session['_id'])) practice_sessions.remove(practice_session['_id']) # remove files without associated user for media_file in files.find(): if not audios.find_one({ 'audio': ObjectId(media_file['_id']) }): if not users.find_one({ 'image': ObjectId(media_file['_id']) }): print('{0} - is an orphane file'.format(media_file['_id'])) files.remove(media_file['_id']) # now remove chunks witout associated file for chunk in chunks.find(): if not files.find_one({'_id': chunk['files_id']}): chunks.remove( chunk['_id'] )
gpl-2.0
dwsilk/qgis-geosaurus-plugin
geosaurus/functions/processing/offsetcurve.py
1
3600
# -*- coding: utf-8 -*- import re from ...core import database as db from ..enums import ( ReturnType, SpatialFunctionGroup, SpatialType) from ..template import AbstractSpatialFunction class OffsetCurve(AbstractSpatialFunction): """ ST_OffsetCurve """ def __init__(self, distance=0, quad_segs=8, join="round", mitre_limit=5.0, parameter_string=""): self._distance = distance self._quad_segs = quad_segs self._join = join self._mitre_limit = mitre_limit self._parameter_string = parameter_string if self._parameter_string == "": pass @property def distance(self): return self._distance @distance.setter def distance(self, value): self._distance = value @property def quad_segs(self): return self._quad_segs @quad_segs.setter def quad_segs(self, value): self._quad_segs = value @property def join(self): return self._join @join.setter def join(self, value): self._join = value @property def mitre_limit(self): return self._mitre_limit @mitre_limit.setter def mitre_limit(self, value): self._mitre_limit = value @property def parameter_string(self): parameter_string = "" if self._quad_segs != 8: parameter_string += "quad_segs={} ".format(self._quad_segs) if self._join != "round": parameter_string += "join={} ".format(self._join) if self._join == "mitre" and self._mitre_limit != 5: parameter_string += "mitre_limit={}.0".format(self._mitre_limit) parameter_string.rstrip() return parameter_string @parameter_string.setter def parameter_string(self, value): if "quad_segs" in value: result = re.search(r"quad_segs=(.*)\b", value) if result.group(1): self.quad_segs = result.group(1) if "join" in value: result = re.search(r"join=(.*)\b", value) if result.group(1): self.join = result.group(1) if "mitre_limit" in value: result = re.search(r"mitre_limit=(.*)\b", value) if result.group(1): self.mitre_limit = result.group(1) @classmethod def function_name(cls): return "ST_OffsetCurve" @classmethod def alias_name(cls): return None @classmethod def function_group(cls): return SpatialFunctionGroup.PROCESSING @classmethod def function_help(cls): return "http://www.postgis.org/docs/ST_OffsetCurve.html" @classmethod def start_postgis_version(cls): return "1.5.0" @classmethod def spatial_type_support(cls): return [ SpatialType.GEOMETRY, SpatialType.SQL_MM, ] @classmethod def return_type(cls): return ReturnType.GEOMETRY def execute_query(self, wkt): if self.parameter_string: output = db.execute(""" SELECT ST_AsText(ST_OffsetCurve(ST_GeomFromText(%s), %s, %s)); """, (wkt, self.distance, self.parameter_string, )) else: output = db.execute(""" SELECT ST_AsText(ST_OffsetCurve(ST_GeomFromText(%s), %s)); """, (wkt, self.distance, )) return output def sample_query_as_text(self, wkt): return """ SELECT ST_AsText(ST_OffsetCurve(ST_GeomFromText('{0}', {1:.5f}, '{2}'))) """.format(wkt, self.distance, self.parameter_string)
bsd-3-clause