code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
from odoo.addons.account.tests.account_test_classes import AccountingTestCase
class TestStockLandedCostsCommon(AccountingTestCase):
def setUp(self):
super(TestStockLandedCostsCommon, self).setUp()
# Objects
self.Product = self.env['product.product']
self.Picking = self.env['stock.picking']
self.Move = self.env['stock.move']
self.LandedCost = self.env['stock.landed.cost']
self.CostLine = self.env['stock.landed.cost.lines']
# References
self.supplier_id = self.ref('base.res_partner_2')
self.customer_id = self.ref('base.res_partner_4')
self.picking_type_in_id = self.ref('stock.picking_type_in')
self.picking_type_out_id = self.ref('stock.picking_type_out')
self.supplier_location_id = self.ref('stock.stock_location_suppliers')
self.stock_location_id = self.ref('stock.stock_location_stock')
self.customer_location_id = self.ref('stock.stock_location_customers')
self.categ_all = self.env.ref('product.product_category_all')
# Create account
self.default_account = self.env['account.account'].create({
'name': "Purchased Stocks",
'code': "X1101",
'user_type_id': self.env['account.account.type'].create({
'name': 'Expenses',
'type': 'other'}).id,
'reconcile': True})
self.expenses_journal = self.env['account.journal'].create({
'name': 'Expenses - Test',
'code': 'TEXJ',
'type': 'purchase',
'default_debit_account_id': self.default_account.id,
'default_credit_account_id': self.default_account.id})
# Create product refrigerator & oven
self.product_refrigerator = self.Product.create({
'name': 'Refrigerator',
'type': 'product',
'cost_method': 'fifo',
'valuation': 'real_time',
'standard_price': 1.0,
'weight': 10,
'volume': 1,
'categ_id': self.categ_all.id})
self.product_oven = self.Product.create({
'name': 'Microwave Oven',
'type': 'product',
'cost_method': 'fifo',
'valuation': 'real_time',
'standard_price': 1.0,
'weight': 20,
'volume': 1.5,
'categ_id': self.categ_all.id})
# Create service type product 1.Labour 2.Brokerage 3.Transportation 4.Packaging
self.landed_cost = self._create_services('Landed Cost')
self.brokerage_quantity = self._create_services('Brokerage Cost')
self.transportation_weight = self._create_services('Transportation Cost')
self.packaging_volume = self._create_services('Packaging Cost')
# Ensure the account properties exists.
self.ensure_account_property('property_stock_account_input')
self.ensure_account_property('property_stock_account_output')
def _create_services(self, name):
return self.Product.create({
'name': name,
'landed_cost_ok': True,
'type': 'service'})
| Aravinthu/odoo | addons/stock_landed_costs/tests/common.py | Python | agpl-3.0 | 3,168 |
from django.conf import settings
from django.db import models
class PermittedManager(models.Manager):
def get_query_set(self):
# Get base queryset and exclude based on state.
queryset = super(PermittedManager, self).get_query_set().exclude(
state='unpublished'
)
# Exclude objects in staging state if not in
# staging mode (settings.STAGING = False).
if not getattr(settings, 'STAGING', False):
queryset = queryset.exclude(state='staging')
# Filter objects for current site.
queryset = queryset.filter(sites__id__exact=settings.SITE_ID)
return queryset
| unomena/jmbo-unomena-old | jmbo/managers.py | Python | bsd-3-clause | 657 |
# -*- coding: utf-8 -*-
# MLC (Machine Learning Control): A genetic algorithm library to solve chaotic problems
# Copyright (C) 2015-2017, Thomas Duriez ([email protected])
# Copyright (C) 2015, Adrian Durán ([email protected])
# Copyright (C) 2015-2017, Ezequiel Torres Feyuk ([email protected])
# Copyright (C) 2016-2017, Marco Germano Zbrun ([email protected])
# Copyright (C) 2016-2017, Raúl Lopez Skuba ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
def stmt_create_table_board():
return ''' CREATE TABLE board(id INTEGER PRIMARY KEY AUTOINCREMENT,
board_type TEXT,
connection_type INTEGER,
read_count INTEGER,
read_delay INTEGER,
report_mode INTEGER,
analog_resolution INTEGER)'''
def stmt_create_table_serial_connection():
return ''' CREATE TABLE serial_connection(id INTEGER PRIMARY KEY AUTOINCREMENT,
board_id INTEGER,
port TEXT,
baudrate INTEGER,
parity TEXT,
stopbits INTEGER,
bytesize INTEGER,
FOREIGN KEY(board_id) REFERENCES board(id))'''
def stmt_create_table_digital_pin():
return ''' CREATE TABLE digital_pin(pin_id INTEGER,
board_id INTEGER,
pin_type INTEGER,
PRIMARY KEY (pin_id, board_id),
FOREIGN KEY(board_id) REFERENCES board(id))'''
def stmt_create_table_analog_pin():
return ''' CREATE TABLE analog_pin(pin_id INTEGER,
board_id INTEGER,
pin_type INTEGER,
PRIMARY KEY (pin_id, board_id),
FOREIGN KEY(board_id) REFERENCES board(id))'''
def stmt_create_table_pwm_pin():
return ''' CREATE TABLE pwm_pin(pin_id INTEGER,
board_id INTEGER,
PRIMARY KEY (pin_id, board_id),
FOREIGN KEY(board_id) REFERENCES board(id))'''
def stmt_insert_board(board_type, connection_type, read_count, read_delay, report_mode, analog_resolution):
return '''INSERT INTO board (board_type, connection_type, read_count, read_delay, report_mode, analog_resolution)
VALUES ("%s", %s, %s, %s, %s, %s)''' % (board_type,
connection_type,
read_count,
read_delay,
report_mode,
analog_resolution)
def stmt_update_board(board_id, board_type, connection_type, read_count, read_delay, report_mode, analog_resolution):
return '''UPDATE board SET
board_type = "%s",
connection_type = %s,
read_count = %s,
read_delay = %s,
report_mode = %s,
analog_resolution = %s
WHERE id = %s''' % (board_type,
connection_type,
read_count,
read_delay,
report_mode,
analog_resolution,
board_id)
def stmt_get_board(board_id):
return '''SELECT board_type, report_mode, read_count, read_delay, analog_resolution
FROM board WHERE id = %s''' % board_id
def stmt_delete_digital_pin(board_id):
return __stmt_delete_pin("digital_pin", board_id)
def stmt_delete_analog_pin(board_id):
return __stmt_delete_pin("analog_pin", board_id)
def stmt_delete_pwm_pin(board_id):
return __stmt_delete_pin("pwm_pin", board_id)
def __stmt_delete_pin(pin_table, board_id):
return '''DELETE FROM %s WHERE board_id = %s''' % (pin_table, board_id)
def stmt_insert_digital_pin(board_id, pin_id, pin_type):
return __stmt_insert_pin("digital_pin", board_id, pin_id, pin_type)
def stmt_insert_analog_pin(board_id, pin_id, pin_type):
return __stmt_insert_pin("analog_pin", board_id, pin_id, pin_type)
def __stmt_insert_pin(pin_table, board_id, pin_id, pin_type):
return '''INSERT INTO %s (pin_id, board_id, pin_type) VALUES (%s, %s, %s)''' % (pin_table, pin_id, board_id, pin_type)
def stmt_insert_pwm_pin(board_id, pin_id):
return '''INSERT INTO pwm_pin (pin_id, board_id) VALUES (%s, %s)''' % (pin_id, board_id)
def stmt_get_analog_pins(board_id):
return "SELECT pin_id, pin_type FROM analog_pin WHERE board_id = %s" % (board_id)
def stmt_get_digital_pins(board_id):
return "SELECT pin_id, pin_type FROM digital_pin WHERE board_id = %s" % (board_id)
def stmt_get_pwm_pins(board_id):
return "SELECT pin_id FROM pwm_pin WHERE board_id = %s" % (board_id)
def stmt_insert_serial_connection(board_id, port, baudrate, parity, stopbits, bytesize):
return '''INSERT INTO serial_connection (board_id, port, baudrate, parity, stopbits, bytesize)
VALUES (%s, "%s", %s, "%s", %s, %s)''' % (board_id,
port,
baudrate,
parity,
stopbits,
bytesize)
def stmt_update_serial_connection(connection_id, board_id, port, baudrate, parity, stopbits, bytesize):
return '''UPDATE serial_connection SET
board_id = "%s",
port = "%s",
baudrate = %s,
parity = "%s",
stopbits = %s,
bytesize = %s
WHERE id = %s''' % (board_id,
port,
baudrate,
parity,
stopbits,
bytesize,
connection_id)
def stmt_get_serial_connection(board_id):
return '''SELECT port, baudrate, parity, stopbits, bytesize
FROM serial_connection WHERE board_id = %s''' % board_id
def stmt_get_board_configuration_ids():
return '''SELECT id FROM board''' | MachineLearningControl/OpenMLC-Python | MLC/db/sqlite/sql_statements_board_configuration.py | Python | gpl-3.0 | 7,456 |
"""Config flow for RFXCOM RFXtrx integration."""
import copy
import os
import RFXtrx as rfxtrxmod
import serial
import serial.tools.list_ports
import voluptuous as vol
from homeassistant import config_entries, exceptions
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_COMMAND_OFF,
CONF_COMMAND_ON,
CONF_DEVICE,
CONF_DEVICE_ID,
CONF_DEVICES,
CONF_HOST,
CONF_PORT,
CONF_TYPE,
)
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.device_registry import (
async_entries_for_config_entry,
async_get_registry as async_get_device_registry,
)
from homeassistant.helpers.entity_registry import (
async_entries_for_device,
async_get_registry as async_get_entity_registry,
)
from . import DOMAIN, get_device_id, get_rfx_object
from .binary_sensor import supported as binary_supported
from .const import (
CONF_AUTOMATIC_ADD,
CONF_DATA_BITS,
CONF_FIRE_EVENT,
CONF_OFF_DELAY,
CONF_REMOVE_DEVICE,
CONF_REPLACE_DEVICE,
CONF_SIGNAL_REPETITIONS,
DEVICE_PACKET_TYPE_LIGHTING4,
)
from .cover import supported as cover_supported
from .light import supported as light_supported
from .switch import supported as switch_supported
CONF_EVENT_CODE = "event_code"
CONF_MANUAL_PATH = "Enter Manually"
def none_or_int(value, base):
"""Check if strin is one otherwise convert to int."""
if value is None:
return None
return int(value, base)
class OptionsFlow(config_entries.OptionsFlow):
"""Handle Rfxtrx options."""
def __init__(self, config_entry: ConfigEntry) -> None:
"""Initialize rfxtrx options flow."""
self._config_entry = config_entry
self._global_options = None
self._selected_device = None
self._selected_device_entry_id = None
self._selected_device_event_code = None
self._selected_device_object = None
self._device_entries = None
self._device_registry = None
async def async_step_init(self, user_input=None):
"""Manage the options."""
return await self.async_step_prompt_options()
async def async_step_prompt_options(self, user_input=None):
"""Prompt for options."""
errors = {}
if user_input is not None:
self._global_options = {
CONF_AUTOMATIC_ADD: user_input[CONF_AUTOMATIC_ADD],
}
if CONF_DEVICE in user_input:
entry_id = user_input[CONF_DEVICE]
device_data = self._get_device_data(entry_id)
self._selected_device_entry_id = entry_id
event_code = device_data[CONF_EVENT_CODE]
self._selected_device_event_code = event_code
self._selected_device = self._config_entry.data[CONF_DEVICES][
event_code
]
self._selected_device_object = get_rfx_object(event_code)
return await self.async_step_set_device_options()
if CONF_REMOVE_DEVICE in user_input:
remove_devices = user_input[CONF_REMOVE_DEVICE]
devices = {}
for entry_id in remove_devices:
device_data = self._get_device_data(entry_id)
event_code = device_data[CONF_EVENT_CODE]
device_id = device_data[CONF_DEVICE_ID]
self.hass.helpers.dispatcher.async_dispatcher_send(
f"{DOMAIN}_{CONF_REMOVE_DEVICE}_{device_id}"
)
self._device_registry.async_remove_device(entry_id)
if event_code is not None:
devices[event_code] = None
self.update_config_data(
global_options=self._global_options, devices=devices
)
return self.async_create_entry(title="", data={})
if CONF_EVENT_CODE in user_input:
self._selected_device_event_code = user_input[CONF_EVENT_CODE]
self._selected_device = {}
selected_device_object = get_rfx_object(
self._selected_device_event_code
)
if selected_device_object is None:
errors[CONF_EVENT_CODE] = "invalid_event_code"
elif not self._can_add_device(selected_device_object):
errors[CONF_EVENT_CODE] = "already_configured_device"
else:
self._selected_device_object = selected_device_object
return await self.async_step_set_device_options()
if not errors:
self.update_config_data(global_options=self._global_options)
return self.async_create_entry(title="", data={})
device_registry = await async_get_device_registry(self.hass)
device_entries = async_entries_for_config_entry(
device_registry, self._config_entry.entry_id
)
self._device_registry = device_registry
self._device_entries = device_entries
remove_devices = {
entry.id: entry.name_by_user if entry.name_by_user else entry.name
for entry in device_entries
}
configure_devices = {
entry.id: entry.name_by_user if entry.name_by_user else entry.name
for entry in device_entries
if self._get_device_event_code(entry.id) is not None
}
options = {
vol.Optional(
CONF_AUTOMATIC_ADD,
default=self._config_entry.data[CONF_AUTOMATIC_ADD],
): bool,
vol.Optional(CONF_EVENT_CODE): str,
vol.Optional(CONF_DEVICE): vol.In(configure_devices),
vol.Optional(CONF_REMOVE_DEVICE): cv.multi_select(remove_devices),
}
return self.async_show_form(
step_id="prompt_options", data_schema=vol.Schema(options), errors=errors
)
async def async_step_set_device_options(self, user_input=None):
"""Manage device options."""
errors = {}
if user_input is not None:
device_id = get_device_id(
self._selected_device_object.device,
data_bits=user_input.get(CONF_DATA_BITS),
)
if CONF_REPLACE_DEVICE in user_input:
await self._async_replace_device(user_input[CONF_REPLACE_DEVICE])
devices = {self._selected_device_event_code: None}
self.update_config_data(
global_options=self._global_options, devices=devices
)
return self.async_create_entry(title="", data={})
try:
command_on = none_or_int(user_input.get(CONF_COMMAND_ON), 16)
except ValueError:
errors[CONF_COMMAND_ON] = "invalid_input_2262_on"
try:
command_off = none_or_int(user_input.get(CONF_COMMAND_OFF), 16)
except ValueError:
errors[CONF_COMMAND_OFF] = "invalid_input_2262_off"
try:
off_delay = none_or_int(user_input.get(CONF_OFF_DELAY), 10)
except ValueError:
errors[CONF_OFF_DELAY] = "invalid_input_off_delay"
if not errors:
devices = {}
device = {
CONF_DEVICE_ID: device_id,
CONF_FIRE_EVENT: user_input.get(CONF_FIRE_EVENT, False),
CONF_SIGNAL_REPETITIONS: user_input.get(CONF_SIGNAL_REPETITIONS, 1),
}
devices[self._selected_device_event_code] = device
if off_delay:
device[CONF_OFF_DELAY] = off_delay
if user_input.get(CONF_DATA_BITS):
device[CONF_DATA_BITS] = user_input[CONF_DATA_BITS]
if command_on:
device[CONF_COMMAND_ON] = command_on
if command_off:
device[CONF_COMMAND_OFF] = command_off
self.update_config_data(
global_options=self._global_options, devices=devices
)
return self.async_create_entry(title="", data={})
device_data = self._selected_device
data_schema = {
vol.Optional(
CONF_FIRE_EVENT, default=device_data.get(CONF_FIRE_EVENT, False)
): bool,
}
if binary_supported(self._selected_device_object):
if device_data.get(CONF_OFF_DELAY):
off_delay_schema = {
vol.Optional(
CONF_OFF_DELAY,
description={"suggested_value": device_data[CONF_OFF_DELAY]},
): str,
}
else:
off_delay_schema = {
vol.Optional(CONF_OFF_DELAY): str,
}
data_schema.update(off_delay_schema)
if (
binary_supported(self._selected_device_object)
or cover_supported(self._selected_device_object)
or light_supported(self._selected_device_object)
or switch_supported(self._selected_device_object)
):
data_schema.update(
{
vol.Optional(
CONF_SIGNAL_REPETITIONS,
default=device_data.get(CONF_SIGNAL_REPETITIONS, 1),
): int,
}
)
if (
self._selected_device_object.device.packettype
== DEVICE_PACKET_TYPE_LIGHTING4
):
data_schema.update(
{
vol.Optional(
CONF_DATA_BITS, default=device_data.get(CONF_DATA_BITS, 0)
): int,
vol.Optional(
CONF_COMMAND_ON,
default=hex(device_data.get(CONF_COMMAND_ON, 0)),
): str,
vol.Optional(
CONF_COMMAND_OFF,
default=hex(device_data.get(CONF_COMMAND_OFF, 0)),
): str,
}
)
devices = {
entry.id: entry.name_by_user if entry.name_by_user else entry.name
for entry in self._device_entries
if self._can_replace_device(entry.id)
}
if devices:
data_schema.update(
{
vol.Optional(CONF_REPLACE_DEVICE): vol.In(devices),
}
)
return self.async_show_form(
step_id="set_device_options",
data_schema=vol.Schema(data_schema),
errors=errors,
)
async def _async_replace_device(self, replace_device):
"""Migrate properties of a device into another."""
device_registry = self._device_registry
old_device = self._selected_device_entry_id
old_entry = device_registry.async_get(old_device)
device_registry.async_update_device(
replace_device,
area_id=old_entry.area_id,
name_by_user=old_entry.name_by_user,
)
old_device_data = self._get_device_data(old_device)
new_device_data = self._get_device_data(replace_device)
old_device_id = "_".join(x for x in old_device_data[CONF_DEVICE_ID])
new_device_id = "_".join(x for x in new_device_data[CONF_DEVICE_ID])
entity_registry = await async_get_entity_registry(self.hass)
entity_entries = async_entries_for_device(entity_registry, old_device)
entity_migration_map = {}
for entry in entity_entries:
unique_id = entry.unique_id
new_unique_id = unique_id.replace(old_device_id, new_device_id)
new_entity_id = entity_registry.async_get_entity_id(
entry.domain, entry.platform, new_unique_id
)
if new_entity_id is not None:
entity_migration_map[new_entity_id] = entry
for entry in entity_migration_map.values():
entity_registry.async_remove(entry.entity_id)
for entity_id, entry in entity_migration_map.items():
entity_registry.async_update_entity(
entity_id,
new_entity_id=entry.entity_id,
name=entry.name,
icon=entry.icon,
)
device_registry.async_remove_device(old_device)
def _can_add_device(self, new_rfx_obj):
"""Check if device does not already exist."""
new_device_id = get_device_id(new_rfx_obj.device)
for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items():
rfx_obj = get_rfx_object(packet_id)
device_id = get_device_id(rfx_obj.device, entity_info.get(CONF_DATA_BITS))
if new_device_id == device_id:
return False
return True
def _can_replace_device(self, entry_id):
"""Check if device can be replaced with selected device."""
device_data = self._get_device_data(entry_id)
event_code = device_data[CONF_EVENT_CODE]
rfx_obj = get_rfx_object(event_code)
if (
rfx_obj.device.packettype == self._selected_device_object.device.packettype
and rfx_obj.device.subtype == self._selected_device_object.device.subtype
and self._selected_device_event_code != event_code
):
return True
return False
def _get_device_event_code(self, entry_id):
data = self._get_device_data(entry_id)
return data[CONF_EVENT_CODE]
def _get_device_data(self, entry_id):
"""Get event code based on device identifier."""
event_code = None
device_id = None
entry = self._device_registry.async_get(entry_id)
device_id = next(iter(entry.identifiers))[1:]
for packet_id, entity_info in self._config_entry.data[CONF_DEVICES].items():
if tuple(entity_info.get(CONF_DEVICE_ID)) == device_id:
event_code = packet_id
break
data = {CONF_EVENT_CODE: event_code, CONF_DEVICE_ID: device_id}
return data
@callback
def update_config_data(self, global_options=None, devices=None):
"""Update data in ConfigEntry."""
entry_data = self._config_entry.data.copy()
entry_data[CONF_DEVICES] = copy.deepcopy(self._config_entry.data[CONF_DEVICES])
if global_options:
entry_data.update(global_options)
if devices:
for event_code, options in devices.items():
if options is None:
entry_data[CONF_DEVICES].pop(event_code)
else:
entry_data[CONF_DEVICES][event_code] = options
self.hass.config_entries.async_update_entry(self._config_entry, data=entry_data)
self.hass.async_create_task(
self.hass.config_entries.async_reload(self._config_entry.entry_id)
)
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for RFXCOM RFXtrx."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
async def async_step_user(self, user_input=None):
"""Step when user initializes a integration."""
await self.async_set_unique_id(DOMAIN)
self._abort_if_unique_id_configured()
errors = {}
if user_input is not None:
user_selection = user_input[CONF_TYPE]
if user_selection == "Serial":
return await self.async_step_setup_serial()
return await self.async_step_setup_network()
list_of_types = ["Serial", "Network"]
schema = vol.Schema({vol.Required(CONF_TYPE): vol.In(list_of_types)})
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
async def async_step_setup_network(self, user_input=None):
"""Step when setting up network configuration."""
errors = {}
if user_input is not None:
host = user_input[CONF_HOST]
port = user_input[CONF_PORT]
try:
data = await self.async_validate_rfx(host=host, port=port)
except CannotConnect:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(title="RFXTRX", data=data)
schema = vol.Schema(
{vol.Required(CONF_HOST): str, vol.Required(CONF_PORT): int}
)
return self.async_show_form(
step_id="setup_network",
data_schema=schema,
errors=errors,
)
async def async_step_setup_serial(self, user_input=None):
"""Step when setting up serial configuration."""
errors = {}
if user_input is not None:
user_selection = user_input[CONF_DEVICE]
if user_selection == CONF_MANUAL_PATH:
return await self.async_step_setup_serial_manual_path()
dev_path = await self.hass.async_add_executor_job(
get_serial_by_id, user_selection
)
try:
data = await self.async_validate_rfx(device=dev_path)
except CannotConnect:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(title="RFXTRX", data=data)
ports = await self.hass.async_add_executor_job(serial.tools.list_ports.comports)
list_of_ports = {}
for port in ports:
list_of_ports[
port.device
] = f"{port}, s/n: {port.serial_number or 'n/a'}" + (
f" - {port.manufacturer}" if port.manufacturer else ""
)
list_of_ports[CONF_MANUAL_PATH] = CONF_MANUAL_PATH
schema = vol.Schema({vol.Required(CONF_DEVICE): vol.In(list_of_ports)})
return self.async_show_form(
step_id="setup_serial",
data_schema=schema,
errors=errors,
)
async def async_step_setup_serial_manual_path(self, user_input=None):
"""Select path manually."""
errors = {}
if user_input is not None:
device = user_input[CONF_DEVICE]
try:
data = await self.async_validate_rfx(device=device)
except CannotConnect:
errors["base"] = "cannot_connect"
if not errors:
return self.async_create_entry(title="RFXTRX", data=data)
schema = vol.Schema({vol.Required(CONF_DEVICE): str})
return self.async_show_form(
step_id="setup_serial_manual_path",
data_schema=schema,
errors=errors,
)
async def async_step_import(self, import_config=None):
"""Handle the initial step."""
entry = await self.async_set_unique_id(DOMAIN)
if entry:
if CONF_DEVICES not in entry.data:
# In version 0.113, devices key was not written to config entry. Update the entry with import data
self._abort_if_unique_id_configured(import_config)
else:
self._abort_if_unique_id_configured()
host = import_config[CONF_HOST]
port = import_config[CONF_PORT]
device = import_config[CONF_DEVICE]
try:
if host is not None:
await self.async_validate_rfx(host=host, port=port)
else:
await self.async_validate_rfx(device=device)
except CannotConnect:
return self.async_abort(reason="cannot_connect")
return self.async_create_entry(title="RFXTRX", data=import_config)
async def async_validate_rfx(self, host=None, port=None, device=None):
"""Create data for rfxtrx entry."""
success = await self.hass.async_add_executor_job(
_test_transport, host, port, device
)
if not success:
raise CannotConnect
data = {
CONF_HOST: host,
CONF_PORT: port,
CONF_DEVICE: device,
CONF_AUTOMATIC_ADD: False,
CONF_DEVICES: {},
}
return data
@staticmethod
@callback
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
"""Get the options flow for this handler."""
return OptionsFlow(config_entry)
def _test_transport(host, port, device):
"""Construct a rfx object based on config."""
if port is not None:
try:
conn = rfxtrxmod.PyNetworkTransport((host, port))
except OSError:
return False
conn.close()
else:
try:
conn = rfxtrxmod.PySerialTransport(device)
except serial.serialutil.SerialException:
return False
if conn.serial is None:
return False
conn.close()
return True
def get_serial_by_id(dev_path: str) -> str:
"""Return a /dev/serial/by-id match for given device if available."""
by_id = "/dev/serial/by-id"
if not os.path.isdir(by_id):
return dev_path
for path in (entry.path for entry in os.scandir(by_id) if entry.is_symlink()):
if os.path.realpath(path) == dev_path:
return path
return dev_path
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
| GenericStudent/home-assistant | homeassistant/components/rfxtrx/config_flow.py | Python | apache-2.0 | 21,583 |
"""
Code Sources Used:
https://gist.github.com/mythz/5723202
https://github.com/dartist/sudoku_solver/blob/master/benchmarks/sudoku.py
https://github.com/gioGats/sudoku
http://norvig.com/sudoku.html
"""
import random
# API/Adapted Code #####################################################################################################
def generate_puzzles(num_puzzles, num_clues, output_filename):
# TODO Generate num_puzzles with num_clues
# TODO verify this works
output = open(output_filename, 'w')
for i in range(num_puzzles):
output.write(generate_puzzle(num_clues, asTuple=False))
output.write('\n')
output.close()
def generate_puzzle(num_clues, asTuple=False):
# TODO Generate a puzzle, solution tuple where the puzzle has num_clues
# TODO if string, return as a string, example:
# '004300209005009001070060043006002087190007400050083000600000105003508690042910300,
# 864371259325849761971265843436192587198657432257483916689734125713528694542916378'
# TODO elif tuple, return as a tuple, example:
# ('004300209005009001070060043006002087190007400050083000600000105003508690042910300',
# '864371259325849761971265843436192587198657432257483916689734125713528694542916378')z
#
# TODO Must be a valid puzzle!
# TODO Use of the below refence code is fine, but make sure it works in Python3 (it's currently in Python2)
# TODO Make sure it works at the theoretical limit (num_clues -> 17)
result = random_puzzle(num_clues)
solution = solve(result)
solution_string, solution_tuple = "", []
for r in rows:
solution_string += (''.join(solution[r + c] for c in cols))
if asTuple:
solution_tuple.extend((result, solution_string))
return solution_tuple
else:
return result + ',\n' + solution_string
# Norvig's Code ########################################################################################################
def cross(A, B):
"""Cross product of elements in A and elements in B."""
return [a + b for a in A for b in B]
digits = '123456789'
rows = 'ABCDEFGHI'
cols = digits
squares = cross(rows, cols)
unitlist = ([cross(rows, c) for c in cols] +
[cross(r, cols) for r in rows] +
[cross(rs, cs) for rs in ('ABC', 'DEF', 'GHI') for cs in ('123', '456', '789')])
units = dict((s, [u for u in unitlist if s in u])
for s in squares)
peers = dict((s, set(sum(units[s], [])) - {s})
for s in squares)
# Parse a Grid #########################################################################################################
def parse_grid(grid):
"""Convert grid to a dict of possible values, {square: digits}, or
return False if a contradiction is detected."""
# To start, every square can be any digit; then assign values from the grid.
values = dict((s, digits) for s in squares)
for s, d in grid_values(grid).items():
if d in digits and not assign(values, s, d):
return False # (Fail if we can't assign d to square s.)
return values
def grid_values(grid):
"""Convert grid into a dict of {square: char} with '0' or '.' for empties."""
chars = [c for c in grid if c in digits or c in '0.']
assert len(chars) == 81
return dict(zip(squares, chars))
# Constraint Propagation ###############################################################################################
def assign(values, s, d):
"""Eliminate all the other values (except d) from values[s] and propagate.
Return values, except return False if a contradiction is detected."""
other_values = values[s].replace(d, '')
if all(eliminate(values, s, d2) for d2 in other_values):
return values
else:
return False
def eliminate(values, s, d):
"""Eliminate d from values[s]; propagate when values or places <= 2.
Return values, except return False if a contradiction is detected."""
if d not in values[s]:
return values # Already eliminated
values[s] = values[s].replace(d, '')
# (1) If a square s is reduced to one value d2, then eliminate d2 from the peers.
if len(values[s]) == 0:
return False # Contradiction: removed last value
elif len(values[s]) == 1:
d2 = values[s]
if not all(eliminate(values, s2, d2) for s2 in peers[s]):
return False
# (2) If a unit u is reduced to only one place for a value d, then put it there.
for u in units[s]:
dplaces = [s for s in u if d in values[s]]
if len(dplaces) == 0:
return False # Contradiction: no place for this value
elif len(dplaces) == 1:
# d can only be in one place in unit; assign it there
if not assign(values, dplaces[0], d):
return False
return values
# Display as 2-D grid ##################################################################################################
def display(values):
"""Display these values as a 2-D grid."""
width = 1 + max(len(values[s]) for s in squares)
line = '+'.join(['-' * (width * 3)] * 3)
for r in rows:
print(''.join(values[r + c].center(width) + ('|' if c in '36' else '')
for c in cols))
if r in 'CF':
print(line)
# Search ###############################################################################################################
def solve(grid): return search(parse_grid(grid))
def search(values):
"""Using depth-first search and propagation, try all possible values."""
if values is False:
return False # Failed earlier
if all(len(values[s]) == 1 for s in squares):
return values # Solved!
# Chose the unfilled square s with the fewest possibilities
n, s = min((len(values[s]), s) for s in squares if len(values[s]) > 1)
return some(search(assign(values.copy(), s, d))
for d in values[s])
def some(seq):
"""Return some element of seq that is true."""
for e in seq:
if e: return e
return False
# API Used #############################################################################################################
def random_puzzle(N=17):
"""Make a random puzzle with N or more assignments. Restart on contradictions.
Note the resulting puzzle is not guaranteed to be solvable, but empirically
about 99.8% of them are solvable. Some have multiple solutions."""
values = dict((s, digits) for s in squares)
for s in shuffled(squares):
if not assign(values, s, random.choice(values[s])):
break
ds = [values[s] for s in squares if len(values[s]) == 1]
if len(ds) == N and len(set(ds)) >= 8:
return ''.join(values[s] if len(values[s]) == 1 else '0' for s in squares)
return random_puzzle(N) # Give up and make a new puzzle
def shuffled(seq):
"Return a randomly shuffled copy of the input sequence."
seq = list(seq)
random.shuffle(seq)
return seq | gioGats/sudoku | v0.1/generate_sudoku.py | Python | gpl-3.0 | 7,033 |
#!/usr/bin/python
# -*- coding: UTF-8 -*-
#Copyright (C) 2007 Adam Spencer - Free Veterinary Management Suite
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or (at your option) any later version.
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
##Contact: [email protected]
import wx
import wx.html
import miscmethods
import datetime
import animalmethods
import clientmethods
import db
import dbmethods
import customwidgets
class AppointmentSettings:
def __init__(self, localsettings, animalid, ID):
#(ID, AnimalID, OwnerID, Date, Time, AppointmentReason, Arrived, WithVet, Problem, Notes, Plan, Done, Operation, Vet)
self.localsettings = localsettings
if ID == False:
self.ID = False
self.animalid = animalid
self.animaldata = animalmethods.AnimalSettings(self.localsettings, False, self.animalid)
self.clientdata = clientmethods.ClientSettings(self.localsettings, self.animaldata.ownerid)
self.ownerid = self.clientdata.ID
self.date = datetime.date.today()
self.date = miscmethods.GetSQLDateFromDate(self.date)
self.time = "14:00"
self.reason = "Checkover"
self.arrived = 0
self.withvet = 0
self.problem = ""
self.notes = ""
self.plan = ""
self.done = 0
self.operation = 0
self.vet = "None"
currenttime = datetime.datetime.today().strftime("%x %X")
self.changelog = str(currenttime) + "%%%" + str(self.localsettings.userid)
else:
action = "SELECT * FROM appointment WHERE ID = " + str(ID)
results = db.SendSQL(action, localsettings.dbconnection)
self.ID = ID
self.animalid = results[0][1]
self.ownerid = results[0][2]
self.date = results[0][3]
self.time = results[0][4]
self.reason = results[0][5]
self.arrived = results[0][6]
self.withvet = results[0][7]
self.problem = results[0][8]
self.notes = results[0][9]
self.plan = results[0][10]
self.done = results[0][11]
self.operation = results[0][12]
self.vet = results[0][13]
self.changelog = results[0][14]
self.animaldata = animalmethods.AnimalSettings(self.localsettings, False, self.animalid)
self.clientdata = clientmethods.ClientSettings(self.localsettings, self.animaldata.ownerid)
def Submit(self):
currenttime = datetime.datetime.today().strftime("%x %X")
userid = self.localsettings.userid
if self.changelog == "":
self.changelog = currenttime + "%%%" + str(userid)
else:
self.changelog = currenttime + "%%%" + str(userid) + "$$$" + self.changelog
self.ID = dbmethods.WriteToAppointmentTable(self.localsettings.dbconnection, self)
class AppointmentPanel(wx.Panel):
def GetLabel(self, field):
return self.appointmentdata.localsettings.dictionary[field][self.appointmentdata.localsettings.language]
def GetButtonLabel(self, field, index):
return self.appointmentdata.localsettings.dictionary[field][self.appointmentdata.localsettings.language][index]
def __init__(self, notebook, appointmentdata):
self.appointmentdata = appointmentdata
wx.Panel.__init__(self, notebook)
self.viewappointmentspanel = False
if self.appointmentdata.operation == 0:
pagetitle = self.GetLabel("appointmentappointmentforlabel") + " " + self.appointmentdata.animaldata.name + " " + self.appointmentdata.clientdata.surname
else:
pagetitle = self.GetLabel("appointmentoperationforlabel") + " " + self.appointmentdata.animaldata.name + " " + self.appointmentdata.clientdata.surname
self.pagetitle = miscmethods.GetPageTitle(notebook, pagetitle)
datesizer = wx.BoxSizer(wx.HORIZONTAL)
self.appointmententry = customwidgets.DateCtrl(self, self.appointmentdata)
appointmentdate = miscmethods.GetWXDateFromSQLDate(self.appointmentdata.date)
self.appointmententry.SetValue(appointmentdate)
action = "SELECT Name FROM user WHERE Position = \"Vet\" OR Position = \"Manager\""
results = db.SendSQL(action, self.appointmentdata.localsettings.dbconnection)
vets = []
if len(results) != 0:
for a in results:
vets.append(a[0])
self.vetcombobox = wx.ComboBox(self, -1, "Vet", choices=vets)
if self.appointmentdata.vet != "None":
self.vetcombobox.SetValue(str(self.appointmentdata.vet))
self.vetcombobox.Bind(wx.EVT_CHAR, self.UseVetComboBox)
self.vetcombobox.SetToolTipString(self.GetLabel("appointmententervettooltip"))
refreshbitmap = wx.Bitmap("icons/refresh.png")
refreshappointmentsbutton = wx.BitmapButton(self, -1, refreshbitmap)
refreshappointmentsbutton.Bind(wx.EVT_BUTTON, self.RefreshAppointment)
refreshappointmentsbutton.SetToolTipString(self.GetLabel("appointmentrefreshtooltip"))
datesizer.Add(self.appointmententry, 1, wx.EXPAND)
datesizer.Add(self.vetcombobox, 1, wx.EXPAND)
datesizer.Add(refreshappointmentsbutton, 0, wx.ALIGN_LEFT)
reasonsizer = wx.BoxSizer(wx.VERTICAL)
self.reasonlabel = wx.StaticText(self, -1, self.GetLabel("appointmentreasonlabel"))
reasonsizer.Add(self.reasonlabel, 0, wx.ALIGN_LEFT)
self.reasonentry = wx.TextCtrl(self, -1, self.appointmentdata.reason, style=wx.TE_MULTILINE, size=(-1,100))
self.reasonentry.SetFocus()
reasonsizer.Add(self.reasonentry, 0, wx.EXPAND)
searchsizer = wx.BoxSizer(wx.VERTICAL)
searchsizer.Add(datesizer, 0, wx.EXPAND)
searchsizer.Add(reasonsizer, 0, wx.EXPAND)
searchspacer2 = wx.StaticText(self, -1, "", size=(-1,10))
searchsizer.Add(searchspacer2, 0, wx.EXPAND)
appointmenttimesizer = wx.BoxSizer(wx.HORIZONTAL)
self.appointmenttimelabel = wx.StaticText(self, -1, self.GetLabel("appointmenttimelabel"))
time = str(self.appointmentdata.time)
if len(str(time)) == 7:
time = "0" + time[:4]
else:
time = time[:5]
self.appointmenttimeentry = wx.TextCtrl(self, -1, time)
appointmenttimesizer.Add(self.appointmenttimelabel, 0, wx.ALIGN_CENTER)
appointmenttimesizer.Add(self.appointmenttimeentry, 0, wx.EXPAND)
appointmenttimespacer = wx.StaticText(self, -1, "")
appointmenttimesizer.Add(appointmenttimespacer, 1, wx.EXPAND)
self.opcheckbox = wx.CheckBox(self, -1, self.GetButtonLabel("appointmentisopcheckbox", 0))
self.opcheckbox.Bind(wx.EVT_CHECKBOX, self.SwitchToOps)
self.opcheckbox.SetToolTipString(self.GetButtonLabel("appointmentisopcheckbox", 1))
appointmenttimesizer.Add(self.opcheckbox, 0, wx.ALIGN_CENTER)
appointmenttimespacer1 = wx.StaticText(self, -1, "")
appointmenttimesizer.Add(appointmenttimespacer1, 1, wx.EXPAND)
submitbitmap = wx.Bitmap("icons/submit.png")
appointmentsubmitbutton = wx.BitmapButton(self, -1, submitbitmap)
appointmentsubmitbutton.Bind(wx.EVT_BUTTON, self.Submit)
appointmentsubmitbutton.SetToolTipString(self.GetLabel("appointmentsubmittooltip"))
appointmenttimesizer.Add(appointmentsubmitbutton)
searchsizer.Add(appointmenttimesizer, 0, wx.EXPAND)
searchspacer3 = wx.StaticText(self, -1, "", size=(-1,10))
searchsizer.Add(searchspacer3, 0, wx.EXPAND)
buttonssizer = wx.BoxSizer(wx.HORIZONTAL)
deletebitmap = wx.Bitmap("icons/delete.png")
deletebutton = wx.BitmapButton(self, -1, deletebitmap)
deletebutton.SetToolTipString(self.GetLabel("appointmentdeletetooltip"))
deletebutton.Bind(wx.EVT_BUTTON, self.Delete)
buttonssizer.Add(deletebutton, 0, wx.EXPAND)
if self.appointmentdata.localsettings.deleteappointments == 0:
deletebutton.Disable()
buttonsspacer = wx.StaticText(self, -1, "")
buttonssizer.Add(buttonsspacer, 1, wx.EXPAND)
statuslabel = wx.StaticText(self, -1, self.GetLabel("appointmentstatuslabel"))
buttonssizer.Add(statuslabel, 0, wx.ALIGN_CENTER)
statuschoice = wx.Choice(self, -1, choices=(self.GetLabel("appointmentnotarrivedlabel"), self.GetLabel("appointmentwaitinglabel"), self.GetLabel("appointmentwithvetlabel"), self.GetLabel("appointmentdonelabel")))
if self.appointmentdata.done == 1:
statuschoice.SetSelection(3)
elif self.appointmentdata.withvet == 1:
statuschoice.SetSelection(2)
elif self.appointmentdata.arrived == 1:
statuschoice.SetSelection(1)
else:
statuschoice.SetSelection(0)
buttonssizer.Add(statuschoice, 0, wx.EXPAND)
searchsizer.Add(buttonssizer, 0, wx.EXPAND)
searchspacer = wx.StaticText(self, -1, "", size=(-1,10))
searchsizer.Add(searchspacer, 0, wx.EXPAND)
owneranimalsizer = wx.BoxSizer(wx.HORIZONTAL)
editownerbutton = wx.Button(self, -1, self.GetButtonLabel("appointmenteditownerbutton", 0))
editownerbutton.SetForegroundColour("blue")
editownerbutton.SetToolTipString(self.GetButtonLabel("appointmenteditownerbutton", 1))
editownerbutton.Bind(wx.EVT_BUTTON, self.OpenClientRecord)
owneranimalsizer.Add(editownerbutton, 0, wx.EXPAND)
if self.appointmentdata.localsettings.editclients == 0:
editownerbutton.Disable()
owneranimalspacer = wx.StaticText(self, -1, "")
owneranimalsizer.Add(owneranimalspacer, 1, wx.EXPAND)
editanimalbutton = wx.Button(self, -1, self.GetButtonLabel("appointmenteditanimalbutton", 0))
editanimalbutton.SetForegroundColour("blue")
editanimalbutton.SetToolTipString(self.GetButtonLabel("appointmenteditanimalbutton", 1))
editanimalbutton.Bind(wx.EVT_BUTTON, self.OpenAnimalRecord)
owneranimalsizer.Add(editanimalbutton, 0, wx.EXPAND)
if self.appointmentdata.localsettings.editanimals == 0:
editanimalbutton.Disable()
searchsizer.Add(owneranimalsizer, 0, wx.EXPAND)
searchspacer1 = wx.StaticText(self, -1, "")
searchsizer.Add(searchspacer1, 1, wx.EXPAND)
#Right hand pane
date = self.appointmententry.GetValue()
date = miscmethods.GetDateFromWXDate(date)
date = miscmethods.FormatDate(date, self.appointmentdata.localsettings)
appointmentslistboxlabeltext = self.GetLabel("appointmentappointmentsforlabel") + " " + str(date)
self.appointmentslistboxlabel = wx.StaticText(self, -1, appointmentslistboxlabeltext)
self.appointmentslistbox = customwidgets.DayPlannerListbox(self, appointmentdata.localsettings, date, 10)
self.appointmentslistbox.Bind(wx.EVT_LISTBOX_DCLICK, self.GetTime)
appointmentslistboxsizer = wx.BoxSizer(wx.VERTICAL)
appointmentslistboxsizer.Add(self.appointmentslistboxlabel, 0, wx.EXPAND)
appointmentslistboxsizer.Add(self.appointmentslistbox, 1, wx.EXPAND)
self.appointmentlistboxtotal = wx.StaticText(self, -1, self.GetLabel("totallabel") + ": 0")
appointmentslistboxsizer.Add(self.appointmentlistboxtotal, 0, wx.ALIGN_RIGHT)
mainsizer = wx.BoxSizer(wx.HORIZONTAL)
mainsizer.Add(searchsizer, 1, wx.EXPAND)
spacer = wx.StaticText(self, -1, "", size=(50,-1))
mainsizer.Add(spacer, 0, wx.EXPAND)
mainsizer.Add(appointmentslistboxsizer, 2, wx.EXPAND)
topsizer = wx.BoxSizer(wx.VERTICAL)
closebuttonsizer = wx.BoxSizer(wx.HORIZONTAL)
closebuttonspacer2 = wx.StaticText(self, -1, "")
closebuttonsizer.Add(closebuttonspacer2, 1, wx.EXPAND)
topsizer.Add(closebuttonsizer, 0, wx.EXPAND)
topsizer.Add(mainsizer, 1, wx.EXPAND)
self.SetSizer(topsizer)
self.appointmentslistboxsizer = appointmentslistboxsizer
self.statuschoice = statuschoice
if self.appointmentdata.operation == 1:
self.opcheckbox.SetValue(True)
self.SwitchToOps()
self.RefreshAppointment()
def UseVetComboBox(self, ID=False):
parent = ID.GetEventObject()
if parent.GetValue() == "Vet":
parent.SetValue("")
ID.Skip()
def RefreshTotal(self, ID=False):
date = self.appointmententry.GetValue()
sqldate = miscmethods.GetSQLDateFromWXDate(date)
if self.opcheckbox.GetValue() == True:
operation = 1
else:
operation = 0
action = "SELECT ID FROM appointment WHERE appointment.Date = \"" + sqldate + "\" AND appointment.Operation = " + str(operation)
results = db.SendSQL(action, self.appointmentdata.localsettings.dbconnection)
total = len(results)
self.appointmentlistboxtotal.SetLabel(self.GetLabel("totallabel") + ": " + str(total))
self.appointmentslistboxsizer.Layout()
def SwitchToOps(self, ID=False):
isop = self.opcheckbox.GetValue()
date = self.appointmententry.GetValue()
weekday = date.GetWeekDay()
weekday = miscmethods.GetDayNameFromID(weekday, self.appointmentdata.localsettings)
sqldate = miscmethods.GetSQLDateFromWXDate(date)
datestring = miscmethods.GetDateFromWXDate(date)
datestring = miscmethods.FormatDate(datestring, self.appointmentdata.localsettings)
if isop == True:
self.appointmenttimeentry.SetValue("09:00")
self.appointmenttimeentry.Disable()
else:
self.appointmenttimeentry.Enable()
self.RefreshAppointment()
self.RefreshTotal()
def Submit(self, ID):
if self.opcheckbox.GetValue() == True:
self.SubmitOperation(ID)
else:
self.SubmitAppointment(ID)
def SubmitOperation(self, ID):
self.appointmentdata.date = miscmethods.GetSQLDateFromWXDate(self.appointmententry.GetValue())
self.appointmentdata.time = self.appointmentdata.localsettings.operationtime
self.appointmentdata.vet = self.vetcombobox.GetValue()
self.appointmentdata.reason = self.reasonentry.GetValue()
self.appointmentdata.operation = 1
choice = self.statuschoice.GetSelection()
if choice == 0:
self.appointmentdata.arrived = 0
self.appointmentdata.withvet = 0
self.appointmentdata.done = 0
elif choice == 1:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 0
self.appointmentdata.done = 0
elif choice == 2:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 1
self.appointmentdata.done = 0
elif choice == 3:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 0
self.appointmentdata.done = 1
self.appointmentdata.Submit()
try:
self.parent.RefreshAppointments()
except:
pass
self.Close()
def SubmitAppointment(self, ID):
time = self.appointmenttimeentry.GetValue()
success = False
if miscmethods.ValidateTime(time) == True:
if miscmethods.GetMinutesFromTime(time) < miscmethods.GetMinutesFromTime(self.appointmentdata.localsettings.opento) + 1:
if miscmethods.GetMinutesFromTime(time) > miscmethods.GetMinutesFromTime(self.appointmentdata.localsettings.openfrom) - 1:
time = time[:2] + ":" + time[3:5]
success = True
else:
failurereason = self.GetLabel("appointmenttimetooearlymessage")
else:
failurereason = self.GetLabel("appointmenttimetoolatemessage")
else:
failurereason = self.GetLabel("appointmentinvalidtimemessage")
if success == True:
self.appointmentdata.date = miscmethods.GetSQLDateFromWXDate(self.appointmententry.GetValue())
self.appointmentdata.time = time
self.appointmentdata.reason = self.reasonentry.GetValue()
self.appointmentdata.operation = 0
if self.vetcombobox.GetValue() == "Vet":
self.appointmentdata.vet = "None"
else:
self.appointmentdata.vet = self.vetcombobox.GetValue()
choice = self.statuschoice.GetSelection()
if choice == 0:
self.appointmentdata.arrived = 0
self.appointmentdata.withvet = 0
self.appointmentdata.done = 0
elif choice == 1:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 0
self.appointmentdata.done = 0
elif choice == 2:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 1
self.appointmentdata.done = 0
elif choice == 3:
self.appointmentdata.arrived = 1
self.appointmentdata.withvet = 0
self.appointmentdata.done = 1
self.appointmentdata.Submit()
try:
self.parent.RefreshAppointments()
except:
pass
self.Close()
else:
miscmethods.ShowMessage(failurereason)
def RefreshAppointment(self, ID=False):
localsettings = self.appointmentdata.localsettings
date = self.appointmententry.GetValue()
weekday = date.GetWeekDay()
weekday = miscmethods.GetDayNameFromID(weekday, self.appointmentdata.localsettings)
sqldate = miscmethods.GetSQLDateFromWXDate(date)
datestring = miscmethods.GetDateFromWXDate(date)
datestring = miscmethods.FormatDate(datestring, self.appointmentdata.localsettings)
isop = self.opcheckbox.GetValue()
if isop == True:
appointmentslistboxlabeltext = self.GetLabel("appointmentoperationsforlabel") + " " + weekday + " " + str(datestring)
else:
appointmentslistboxlabeltext = self.GetLabel("appointmentappointmentsforlabel") + " " + weekday + " " + str(datestring)
self.appointmentslistboxlabel.SetLabel(appointmentslistboxlabeltext)
self.appointmentslistbox.sqldate = sqldate
self.appointmentslistbox.RefreshList()
self.RefreshTotal()
def GetTime(self,ID):
listboxid = self.appointmentslistbox.GetSelection()
action = "SELECT * FROM settings"
results = db.SendSQL(action, self.appointmentdata.localsettings.dbconnection)
openfromraw = results[0][2]
openfromtime = ( int(str(openfromraw)[:2]) * 60 ) + int(str(openfromraw)[3:5])
appointmenttime = openfromtime + (listboxid * 10)
appointmenttime = miscmethods.GetTimeFromMinutes(appointmenttime)[:5]
self.appointmenttimeentry.SetValue(appointmenttime)
def Delete(self, ID):
if miscmethods.ConfirmMessage("Are you sure that you want to delete this appointment?") == True:
action = "DELETE FROM appointment WHERE ID = " + str(self.appointmentdata.ID)
db.SendSQL(action, self.appointmentdata.localsettings.dbconnection)
self.Close(self)
def OpenAnimalRecord(self, ID):
notebook = ID.GetEventObject().GetGrandParent()
animaldata = self.appointmentdata.animaldata
animalpanel = animalmethods.AnimalPanel(notebook, animaldata)
notebook.AddPage(animalpanel)
def OpenClientRecord(self, ID):
notebook = ID.GetEventObject().GetGrandParent()
clientdata = self.appointmentdata.clientdata
clientpanel = clientmethods.ClientPanel(notebook, clientdata)
notebook.AddPage(clientpanel)
def Close(self, ID=False):
if self.viewappointmentspanel != False:
self.viewappointmentspanel.RefreshLists()
miscmethods.ClosePanel(self) | cyncyncyn/evette | appointmentsearchmethods.py | Python | gpl-2.0 | 18,586 |
"""
Test Options
------------
"""
from virtstrap.options import *
def test_initialize_cli_list():
l = CLIList()
assert isinstance(l, list)
def test_cli_list_acts_like_list():
"""Ensure that CLIList still acts like a list"""
l = CLIList([1,2,3,4])
assert l[0] == 1
assert l[1:] == [2,3,4]
l.append(5)
assert l == [1,2,3,4,5]
assert l[-1] == 5
def test_cli_list_converts_to_cli_string():
l1 = CLIList([1,2,3,4])
assert str(l1) == '1,2,3,4'
assert unicode(l1) == u'1,2,3,4'
l2 = CLIList(['a','b','c','d'])
assert str(l2) == 'a,b,c,d'
assert unicode(l2) == u'a,b,c,d'
def test_convert_base_options_to_args():
base_parser = create_base_parser()
options = base_parser.parse_args(args=[])
converted = base_options_to_args(options)
assert converted == ['--config-file=VEfile', '--log=.virtstrap.log',
'--profiles=development', '--verbosity=2',
'--virtstrap-dir=.vs.env']
def test_convert_base_options_to_args_with_multiple_profiles():
"""Check that multiple profiles show up correctly"""
# This is a regression test.
base_parser = create_base_parser()
options = base_parser.parse_args(
args=['--profiles=development,production'])
converted = base_options_to_args(options)
assert converted == ['--config-file=VEfile', '--log=.virtstrap.log',
'--profiles=development,production', '--verbosity=2',
'--virtstrap-dir=.vs.env']
| ravenac95/virtstrap | virtstrap-core/tests/test_options.py | Python | mit | 1,488 |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
from collections import defaultdict
from itertools import groupby
import operator
import os
import re
import string
import textwrap
import functools
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLNullableType,
IDLType,
IDLInterfaceMember,
IDLUndefinedValue,
IDLWrapperType,
)
from Configuration import (
MakeNativeName,
MemberIsUnforgeable,
getModuleFromObject,
getTypesFromCallback,
getTypesFromDescriptor,
getTypesFromDictionary,
iteratorNativeType
)
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
RUST_KEYWORDS = {"abstract", "alignof", "as", "become", "box", "break", "const", "continue",
"else", "enum", "extern", "false", "final", "fn", "for", "if", "impl", "in",
"let", "loop", "macro", "match", "mod", "move", "mut", "offsetof", "override",
"priv", "proc", "pub", "pure", "ref", "return", "static", "self", "sizeof",
"struct", "super", "true", "trait", "type", "typeof", "unsafe", "unsized",
"use", "virtual", "where", "while", "yield"}
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
# XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
# oldFileContents = ""
# try:
# with open(filename, 'rb') as oldFile:
# oldFileContents = ''.join(oldFile.readlines())
# except:
# pass
# if newContents == oldFileContents:
# return False
with open(filename, 'wb') as f:
f.write(newContents)
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", MakeNativeName(arg))
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def innerSequenceType(type):
assert type.isSequence()
return type.inner.inner if type.nullable() else type.inner
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.unrestricted_float: 'f32',
IDLType.Tags.float: 'Finite<f32>',
IDLType.Tags.unrestricted_double: 'f64',
IDLType.Tags.double: 'Finite<f64>'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float,
IDLType.Tags.unrestricted_double
]
def unwrapCastableObject(descriptor, source, codeOnFailure, conversionFunction):
"""
A function for unwrapping an object named by the "source" argument
based on the passed-in descriptor. Returns the string of the Rust expression of
the appropriate type.
codeOnFailure is the code to run if unwrapping fails.
"""
args = {
"failureCode": CGIndenter(CGGeneric(codeOnFailure), 8).define(),
"function": conversionFunction,
"source": source,
}
return """\
match %(function)s(%(source)s) {
Ok(val) => val,
Err(()) => {
%(failureCode)s
}
}""" % args
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n#])", re.MULTILINE)
def indent(s, indentLevel=2):
"""
Indent C++ code.
Weird secret feature: this doesn't indent lines that start with # (such as
#include lines or #ifdef/#endif).
"""
if s == "":
return s
return re.sub(lineStartDetector, indentLevel * " ", s)
# dedent() and fill() are often called on the same string multiple
# times. We want to memoize their return values so we don't keep
# recomputing them all the time.
def memoize(fn):
"""
Decorator to memoize a function of one argument. The cache just
grows without bound.
"""
cache = {}
@functools.wraps(fn)
def wrapper(arg):
retval = cache.get(arg)
if retval is None:
retval = cache[arg] = fn(arg)
return retval
return wrapper
@memoize
def dedent(s):
"""
Remove all leading whitespace from s, and remove a blank line
at the beginning.
"""
if s.startswith('\n'):
s = s[1:]
return textwrap.dedent(s)
# This works by transforming the fill()-template to an equivalent
# string.Template.
fill_multiline_substitution_re = re.compile(r"( *)\$\*{(\w+)}(\n)?")
@memoize
def compile_fill_template(template):
"""
Helper function for fill(). Given the template string passed to fill(),
do the reusable part of template processing and return a pair (t,
argModList) that can be used every time fill() is called with that
template argument.
argsModList is list of tuples that represent modifications to be
made to args. Each modification has, in order: i) the arg name,
ii) the modified name, iii) the indent depth.
"""
t = dedent(template)
assert t.endswith("\n") or "\n" not in t
argModList = []
def replace(match):
"""
Replaces a line like ' $*{xyz}\n' with '${xyz_n}',
where n is the indent depth, and add a corresponding entry to
argModList.
Note that this needs to close over argModList, so it has to be
defined inside compile_fill_template().
"""
indentation, name, nl = match.groups()
depth = len(indentation)
# Check that $*{xyz} appears by itself on a line.
prev = match.string[:match.start()]
if (prev and not prev.endswith("\n")) or nl is None:
raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name)
# Now replace this whole line of template with the indented equivalent.
modified_name = name + "_" + str(depth)
argModList.append((name, modified_name, depth))
return "${" + modified_name + "}"
t = re.sub(fill_multiline_substitution_re, replace, t)
return (string.Template(t), argModList)
def fill(template, **args):
"""
Convenience function for filling in a multiline template.
`fill(template, name1=v1, name2=v2)` is a lot like
`string.Template(template).substitute({"name1": v1, "name2": v2})`.
However, it's shorter, and has a few nice features:
* If `template` is indented, fill() automatically dedents it!
This makes code using fill() with Python's multiline strings
much nicer to look at.
* If `template` starts with a blank line, fill() strips it off.
(Again, convenient with multiline strings.)
* fill() recognizes a special kind of substitution
of the form `$*{name}`.
Use this to paste in, and automatically indent, multiple lines.
(Mnemonic: The `*` is for "multiple lines").
A `$*` substitution must appear by itself on a line, with optional
preceding indentation (spaces only). The whole line is replaced by the
corresponding keyword argument, indented appropriately. If the
argument is an empty string, no output is generated, not even a blank
line.
"""
t, argModList = compile_fill_template(template)
# Now apply argModList to args
for (name, modified_name, depth) in argModList:
if not (args[name] == "" or args[name].endswith("\n")):
raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name]))
args[modified_name] = indent(args[name], depth)
return t.substitute(args)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
raise NotImplementedError # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs - 1].optional:
requiredArgs -= 1
return requiredArgs
signatures = method.signatures()
def getPerSignatureCall(signature, argConversionStartsAt=0):
signatureIndex = signatures.index(signature)
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_' * signatureIndex,
static, descriptor,
method, argConversionStartsAt)
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return false;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
argCountCases.append(CGCase(str(argCount), getPerSignatureCall(signature)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [
CGArgumentConverter(possibleSignatures[0][1][i],
i, "args", "argc", descriptor)
for i in range(0, distinguishingIndex)]
# Select the right overload from our set.
distinguishingArg = "args.get(%d)" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
call = getPerSignatureCall(sigs[0], distinguishingIndex)
if condition is None:
caseBody.append(call)
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(call))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.isNullOrUndefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface())]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if %s.get().is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")))
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
info = getJSToNativeConversionInfo(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
template = info.template
declType = info.declType
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4))
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.get().is_object() && is_array_like(cx, %s)" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isArray() or
s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.get().is_object() && JS_ObjectIsDate(cx, &%s.get().to_object())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.get().is_object() && !is_platform_object(%s.get().to_object())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("throw_type_error(cx, \"Not enough arguments to %s.\");\n"
"return false;" % methodName)))
# XXXjdm Avoid unreachable statement warnings
# overloadCGThings.append(
# CGGeneric('panic!("We have an always-returning default case");\n'
# 'return false;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isArray():
elementType = type.inner
return typeIsSequenceOrHasSequenceMember(elementType)
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def typeNeedsRooting(type, descriptorProvider):
return (type.isGeckoInterface() and
descriptorProvider.getDescriptor(type.unroll().inner.identifier.name).needsRooting)
def union_native_type(t):
name = t.unroll().name
return 'UnionTypes::%s' % name
class JSToNativeConversionInfo():
"""
An object representing information about a JS-to-native conversion.
"""
def __init__(self, template, default=None, declType=None,
needsRooting=False):
"""
template: A string representing the conversion code. This will have
template substitution performed on it as follows:
${val} is a handle to the JS::Value in question
default: A string or None representing rust code for default value(if any).
declType: A CGThing representing the native C++ type we're converting
to. This is allowed to be None if the conversion code is
supposed to be used as-is.
needsRooting: A boolean indicating whether the caller has to root
the result
"""
assert isinstance(template, str)
assert declType is None or isinstance(declType, CGThing)
self.template = template
self.default = default
self.declType = declType
self.needsRooting = needsRooting
def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
if isMember is True, we're being converted from a property of some
JS object, not from an actual method argument, so we can't rely on
our jsval being rooted or outliving us in any way. Any caller
passing true needs to ensure that it is handled correctly in
typeIsSequenceOrHasSequenceMember.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is an object of JSToNativeConversionInfo consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert not isDefinitelyObject or defaultValue is None
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return false;"
needsRooting = typeNeedsRooting(type, descriptorProvider)
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return JSToNativeConversionInfo(template, default, declType, needsRooting=needsRooting)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value.
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureInvalidEnumValue(failureCode, passedVarName):
return CGGeneric(
failureCode or
('throw_type_error(cx, &format!("\'{}\' is not a valid enum value for enumeration \'%s\'.", %s)); %s'
% (type.name, passedVarName, exceptionCode)))
def onFailureNotCallable(failureCode):
return CGGeneric(
failureCode or
('throw_type_error(cx, \"%s is not callable.\");\n'
'%s' % (firstCap(sourceDescription), exceptionCode)))
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, nullValue, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if ${val}.get().is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if ${val}.get().is_null_or_undefined() {\n"
" %s\n") % nullValue
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
innerInfo = getJSToNativeConversionInfo(innerSequenceType(type),
descriptorProvider,
isMember=isMember)
declType = CGWrapper(innerInfo.declType, pre="Vec<", post=">")
config = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(value)) => value,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (config, exceptionCode, exceptionCode))
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isUnion():
declType = CGGeneric(union_native_type(type))
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(value)) => value,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (exceptionCode, exceptionCode))
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGWrapper(CGGeneric(name), pre="Rc<", post=">")
template = "%s::new(${val}.get().to_object())" % name
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = wrapObjectTemplate("Some(%s)" % template, "None",
isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
conversionFunction = "root_from_handlevalue"
descriptorType = descriptor.returnType
if isMember == "Variadic":
conversionFunction = "native_from_handlevalue"
descriptorType = descriptor.nativeType
elif isArgument:
descriptorType = descriptor.argumentType
templateBody = ""
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = unwrapCastableObject(
descriptor, "${val}", unwrapFailureCode, conversionFunction)
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
templateBody = wrapObjectTemplate(templateBody, "None",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
nullBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (nullBehavior, exceptionCode, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = 'DOMString::from("%s")' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isUSVString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (exceptionCode, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.usvstring)
default = 'USVString("%s".to_owned())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "USVString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(ConversionResult::Success(strval)) => strval,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (exceptionCode, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.bytestring)
default = 'ByteString::new(b"%s".to_vec())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "ByteString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = onFailureInvalidEnumValue(failureCode, 'search').define()
else:
handleInvalidEnumValueCode = "return true;"
template = (
"match find_enum_string_index(cx, ${val}, %(values)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok((None, search)) => { %(handleInvalidEnumValueCode)s },\n"
" Ok((Some(index), _)) => {\n"
" //XXXjdm need some range checks up in here.\n"
" mem::transmute(index)\n"
" },\n"
"}" % {"values": enum + "Values::strings",
"exceptionCode": exceptionCode,
"handleInvalidEnumValueCode": handleInvalidEnumValueCode})
if defaultValue is not None:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = "%s::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
callback = type.unroll().callback
declType = CGGeneric(callback.identifier.name)
finalDeclType = CGTemplatedType("Rc", declType)
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
finalDeclType = CGTemplatedType("Option", finalDeclType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.get().is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("IsCallable(${val}.get().to_object())",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
"None",
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return JSToNativeConversionInfo(template, default, finalDeclType, needsRooting=needsRooting)
if type.isAny():
assert not isEnforceRange and not isClamp
declType = ""
default = ""
if isMember == "Dictionary":
# TODO: Need to properly root dictionaries
# https://github.com/servo/servo/issues/6381
declType = CGGeneric("JSVal")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "NullValue()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "UndefinedValue()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
else:
declType = CGGeneric("HandleValue")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "HandleValue::null()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "HandleValue::undefined()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
assert not isEnforceRange and not isClamp
# TODO: Need to root somehow
# https://github.com/servo/servo/issues/6382
declType = CGGeneric("*mut JSObject")
templateBody = wrapObjectTemplate("${val}.get().to_object()",
"ptr::null_mut()",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType,
handleDefaultNull("ptr::null_mut()"))
if type.isDictionary():
if failureCode is not None:
raise TypeError("Can't handle dictionaries when failureCode is not None")
# There are no nullable dictionaries
assert not type.nullable()
typeName = "%s::%s" % (CGDictionary.makeModuleName(type.inner),
CGDictionary.makeDictionaryName(type.inner))
declType = CGGeneric(typeName)
template = ("match %s::new(cx, ${val}) {\n"
" Ok(ConversionResult::Success(dictionary)) => dictionary,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s },\n"
"}" % (typeName, exceptionCode, exceptionCode))
return handleOptional(template, declType, handleDefaultNull("%s::empty(cx)" % typeName))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return JSToNativeConversionInfo("", None, None, needsRooting=False)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
conversionBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if failureCode is None:
failureCode = 'return false'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(ConversionResult::Success(v)) => v,\n"
" Ok(ConversionResult::Failure(error)) => {\n"
" throw_type_error(cx, &error);\n"
" %s\n"
" }\n"
" _ => { %s }\n"
"}" % (conversionBehavior, exceptionCode, exceptionCode))
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
defaultStr = "Finite::wrap(%s)" % defaultValue.value
elif tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert tag == IDLType.Tags.bool
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionInfo, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(string.Template(templateBody).substitute(replacements))
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
return result
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "ConstantVal::NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "ConstantVal::IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "ConstantVal::UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "ConstantVal::DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "ConstantVal::BoolVal(true)" if value.value else "ConstantVal::BoolVal(false)"
if tag in [IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return "ConstantVal::DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, args, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert not argument.defaultValue or argument.optional
replacer = {
"index": index,
"argc": argc,
"args": args
}
replacementVariables = {
"val": string.Template("${args}.get(${index})").substitute(replacer),
}
info = getJSToNativeConversionInfo(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
template = info.template
default = info.default
declType = info.declType
if not argument.variadic:
if argument.optional:
condition = "{args}.get({index}).is_undefined()".format(**replacer)
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(default),
CGGeneric(template)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("None"),
CGGeneric("Some(%s)" % template)).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("${args}.get(variadicArg)").substitute(replacer),
}
innerConverter = [instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot")]
arg = "arg%d" % index
if argument.type.isGeckoInterface():
init = "rooted_vec!(let mut %s)" % arg
innerConverter.append(CGGeneric("%s.push(JS::from_ref(&*slot));" % arg))
else:
init = "let mut %s = vec![]" % arg
innerConverter.append(CGGeneric("%s.push(slot);" % arg))
inner = CGIndenter(CGList(innerConverter, "\n"), 8).define()
self.converter = CGGeneric("""\
%(init)s;
if %(argc)s > %(index)s {
%(arg)s.reserve(%(argc)s as usize - %(index)s);
for variadicArg in %(index)s..%(argc)s {
%(inner)s
}
}""" % {'arg': arg, 'argc': argc, 'index': index, 'inner': inner, 'init': init})
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return true;', pre=''):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a MutableHandleValue in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
* 'pre': code to run before the conversion if rooting is necessary
"""
wrap = "%s\n(%s).to_jsval(cx, %s);" % (pre, result, jsvalRef)
if successCode:
wrap += "\n%s" % successCode
return wrap
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence() or type.isArray():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
# Returns a conversion behavior suitable for a type
def getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs):
if type.isSequence():
return getConversionConfigForType(type.unroll(), isEnforceRange, isClamp, treatNullAs)
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "StringificationBehavior::Default",
"EmptyString": "StringificationBehavior::Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
# Note: the actual behavior passed here doesn't matter for nullable
# strings.
return "StringificationBehavior::Default"
else:
return treatAs[treatNullAs]
if type.isPrimitive() and type.isInteger():
if isEnforceRange:
return "ConversionBehavior::EnforceRange"
elif isClamp:
return "ConversionBehavior::Clamp"
else:
return "ConversionBehavior::Default"
assert not isEnforceRange and not isClamp
return "()"
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUSVString():
result = CGGeneric("USVString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
callback = returnType.unroll().callback
result = CGGeneric('Rc<%s::%s>' % (getModuleFromObject(callback), callback.identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric(union_native_type(returnType))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
# TODO: Return the value through a MutableHandleValue outparam
# https://github.com/servo/servo/issues/6307
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
result = CGGeneric("NonZero<*mut JSObject>")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isSequence():
result = getRetvalDeclarationForType(innerSequenceType(returnType), descriptorProvider)
result = CGWrapper(result, pre="Vec<", post=">")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDictionary():
nullable = returnType.nullable()
dictName = returnType.inner.name if nullable else returnType.name
result = CGGeneric(dictName)
if typeNeedsRooting(returnType, descriptorProvider):
raise TypeError("We don't support rootable dictionaries return values")
if nullable:
result = CGWrapper(result, pre="Option<", post=">")
return result
raise TypeError("Don't know how to declare return value for %s" %
returnType)
def MemberCondition(pref, func):
"""
A string representing the condition for a member to actually be exposed.
Any of the arguments can be None. If not None, they should have the
following types:
pref: The name of the preference.
func: The name of the function.
"""
assert pref is None or isinstance(pref, str)
assert func is None or isinstance(func, str)
assert func is None or pref is None
if pref:
return 'Condition::Pref("%s")' % pref
if func:
return 'Condition::Func(%s)' % func
return "Condition::Satisfied"
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
@staticmethod
def getStringAttr(member, name):
attr = member.getExtendedAttribute(name)
if attr is None:
return None
# It's a list of strings
assert len(attr) == 1
assert attr[0] is not None
return attr[0]
@staticmethod
def getControllingCondition(interfaceMember, descriptor):
return MemberCondition(
PropertyDefiner.getStringAttr(interfaceMember,
"Pref"),
PropertyDefiner.getStringAttr(interfaceMember,
"Func"))
def generateGuardedArray(self, array, name, specTemplate, specTerminator,
specType, getCondition, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
# We generate an all-encompassing list of lists of specs, with each sublist
# representing a group of members that share a common pref name. That will
# make sure the order of the properties as exposed on the interface and
# interface prototype objects does not change when pref control is added to
# members while still allowing us to define all the members in the smallest
# number of JSAPI calls.
assert len(array) != 0
specs = []
prefableSpecs = []
prefableTemplate = ' Guard::new(%s, %s[%d])'
for cond, members in groupby(array, lambda m: getCondition(m, self.descriptor)):
currentSpecs = [specTemplate % getDataTuple(m) for m in members]
if specTerminator:
currentSpecs.append(specTerminator)
specs.append("&[\n" + ",\n".join(currentSpecs) + "]\n")
prefableSpecs.append(
prefableTemplate % (cond, name + "_specs", len(specs) - 1))
specsArray = ("const %s_specs: &'static [&'static[%s]] = &[\n" +
",\n".join(specs) + "\n" +
"];\n") % (name, specType)
prefArray = ("const %s: &'static [Guard<&'static [%s]>] = &[\n" +
",\n".join(prefableSpecs) + "\n" +
"];\n") % (name, specType)
return specsArray + prefArray
# The length of a method is the minimum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return min(
len([arg for arg in arguments if not arg.optional and not arg.variadic])
for (_, arguments) in signatures)
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
# Ignore non-static methods for callback interfaces
if not descriptor.interface.isCallback() or static:
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess() and
MemberIsUnforgeable(m, descriptor) == unforgeable]
else:
methods = []
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m),
"condition": PropertyDefiner.getControllingCondition(m, descriptor)}
for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": '@@iterator',
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0,
"condition": "Condition::Satisfied"})
# Generate the keys/values/entries aliases for value iterables.
maplikeOrSetlikeOrIterable = descriptor.interface.maplikeOrSetlikeOrIterable
if (not static and not unforgeable and
(maplikeOrSetlikeOrIterable and
maplikeOrSetlikeOrIterable.isIterable() and
maplikeOrSetlikeOrIterable.isValueIterator())):
# Add our keys/values/entries/forEach
self.regular.append({
"name": "keys",
"methodInfo": False,
"selfHostedName": "ArrayKeys",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "values",
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "entries",
"methodInfo": False,
"selfHostedName": "ArrayEntries",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
self.regular.append({
"name": "forEach",
"methodInfo": False,
"selfHostedName": "ArrayForEach",
"length": 0,
"condition": PropertyDefiner.getControllingCondition(m,
descriptor)
})
isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable"))
if not static and unforgeable == isUnforgeableInterface:
stringifier = descriptor.operations['Stringifier']
if stringifier:
self.regular.append({
"name": "toString",
"nativeName": stringifier.identifier.name,
"length": 0,
"condition": PropertyDefiner.getControllingCondition(stringifier, descriptor)
})
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
def condition(m, d):
return m["condition"]
flags = "JSPROP_ENUMERATE"
if self.unforgeable:
flags += " | JSPROP_PERMANENT | JSPROP_READONLY"
def specData(m):
# TODO: Use something like JS_FNSPEC
# https://github.com/servo/servo/issues/6391
if "selfHostedName" in m:
selfHostedName = '%s as *const u8 as *const libc::c_char' % str_to_const_array(m["selfHostedName"])
assert not m.get("methodInfo", True)
accessor = "None"
jitinfo = "0 as *const JSJitInfo"
else:
selfHostedName = "0 as *const libc::c_char"
if m.get("methodInfo", True):
identifier = m.get("nativeName", m["name"])
# Go through an intermediate type here, because it's not
# easy to tell whether the methodinfo is a JSJitInfo or
# a JSTypedMethodJitInfo here. The compiler knows, though,
# so let it do the work.
jitinfo = "&%s_methodinfo as *const _ as *const JSJitInfo" % identifier
accessor = "Some(generic_method)"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = 'Some(%s)' % m.get("nativeName", m["name"])
if m["name"].startswith("@@"):
return ('(SymbolCode::%s as i32 + 1)'
% m["name"][2:], accessor, jitinfo, m["length"], flags, selfHostedName)
return (str_to_const_array(m["name"]), accessor, jitinfo, m["length"], flags, selfHostedName)
return self.generateGuardedArray(
array, name,
' JSFunctionSpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: %s, info: %s },\n'
' nargs: %s,\n'
' flags: (%s) as u16,\n'
' selfHostedName: %s\n'
' }',
' JSFunctionSpec {\n'
' name: 0 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' nargs: 0,\n'
' flags: 0,\n'
' selfHostedName: 0 as *const libc::c_char\n'
' }',
'JSFunctionSpec',
condition, specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.descriptor = descriptor
self.regular = [
m
for m in descriptor.interface.members if
m.isAttr() and m.isStatic() == static and
MemberIsUnforgeable(m, descriptor) == unforgeable
]
self.static = static
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE | JSPROP_SHARED"
if self.unforgeable:
flags += " | JSPROP_PERMANENT"
def getter(attr):
if self.static:
accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_getter"
else:
accessor = "generic_getter"
jitinfo = "&%s_getterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def setter(attr):
if (attr.readonly and not attr.getExtendedAttribute("PutForwards")
and not attr.getExtendedAttribute("Replaceable")):
return "JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }"
if self.static:
accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_setter"
else:
accessor = "generic_setter"
jitinfo = "&%s_setterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def specData(attr):
return (str_to_const_array(attr.identifier.name), flags, getter(attr),
setter(attr))
return self.generateGuardedArray(
array, name,
' JSPropertySpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' flags: (%s) as u8,\n'
' getter: %s,\n'
' setter: %s\n'
' }',
' JSPropertySpec {\n'
' name: 0 as *const libc::c_char,\n'
' flags: 0,\n'
' getter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' setter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }\n'
' }',
'JSPropertySpec',
PropertyDefiner.getControllingCondition, specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (str_to_const_array(const.identifier.name),
convertConstIDLValueToJSVal(const.value))
return self.generateGuardedArray(
array, name,
' ConstantSpec { name: %s, value: %s }',
None,
'ConstantSpec',
PropertyDefiner.getControllingCondition, specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=4):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn != "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, callbacks, dictionaries, enums, imports, config, ignored_warnings=None):
"""
Adds a set of imports.
"""
if ignored_warnings is None:
ignored_warnings = [
'non_camel_case_types',
'non_upper_case_globals',
'unused_imports',
'unused_variables',
'unused_assignments',
]
def componentTypes(type):
if type.isType() and type.nullable():
type = type.unroll()
if type.isUnion():
return type.flatMemberTypes
if type.isDictionary():
return [type] + getTypesFromDictionary(type)
return [type]
def isImportable(type):
if not type.isType():
assert (type.isInterface() or type.isDictionary() or
type.isEnum() or type.isNamespace())
return True
return not (type.builtin or type.isSequence() or type.isUnion())
def relatedTypesForSignatures(method):
types = []
for (returnType, arguments) in method.signatures():
types += componentTypes(returnType)
for arg in arguments:
types += componentTypes(arg.type)
return types
def getIdentifier(t):
if t.isType():
if t.nullable():
t = t.inner
if t.isCallback():
return t.callback.identifier
return t.identifier
assert t.isInterface() or t.isDictionary() or t.isEnum() or t.isNamespace()
return t.identifier
def removeWrapperAndNullableTypes(types):
normalized = []
for t in types:
while (t.isType() and t.nullable()) or isinstance(t, IDLWrapperType):
t = t.inner
if isImportable(t):
normalized += [t]
return normalized
types = []
for d in descriptors:
if not d.interface.isCallback():
types += [d.interface]
if d.interface.isIteratorInterface():
types += [d.interface.iterableInterface]
members = d.interface.members + d.interface.namedConstructors
constructor = d.interface.ctor()
if constructor:
members += [constructor]
if d.proxy:
members += [o for o in d.operations.values() if o]
for m in members:
if m.isMethod():
types += relatedTypesForSignatures(m)
elif m.isAttr():
types += componentTypes(m.type)
# Import the type names used in the callbacks that are being defined.
for c in callbacks:
types += relatedTypesForSignatures(c)
# Import the type names used in the dictionaries that are being defined.
for d in dictionaries:
types += componentTypes(d)
# Normalize the types we've collected and remove any ones which can't be imported.
types = removeWrapperAndNullableTypes(types)
descriptorProvider = config.getDescriptorProvider()
extras = []
for t in types:
# Importing these types in the same module that defines them is an error.
if t in dictionaries or t in enums:
continue
if t.isInterface() or t.isNamespace():
descriptor = descriptorProvider.getDescriptor(getIdentifier(t).name)
extras += [descriptor.path]
if descriptor.interface.parent:
parentName = getIdentifier(descriptor.interface.parent).name
descriptor = descriptorProvider.getDescriptor(parentName)
extras += [descriptor.path, descriptor.bindingPath]
else:
if t.isEnum():
extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name + 'Values']
extras += [getModuleFromObject(t) + '::' + getIdentifier(t).name]
statements = []
if len(ignored_warnings) > 0:
statements.append('#![allow(%s)]' % ','.join(ignored_warnings))
statements.extend('use %s;' % i for i in sorted(set(imports + extras)))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
class CGIfWrapper(CGWrapper):
def __init__(self, condition, child):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClassTypeId(desc):
protochain = desc.prototypeChain
inner = ""
if desc.hasDescendants():
if desc.interface.getExtendedAttribute("Abstract"):
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Abstract"
name = desc.interface.identifier.name
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s)" % (name, name)
elif len(protochain) == 1:
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Alone"
reversed_protochain = list(reversed(protochain))
for (child, parent) in zip(reversed_protochain, reversed_protochain[1:]):
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s%s)" % (parent, child, inner)
return "::dom::bindings::codegen::InheritTypes::TopTypeId::%s%s" % (protochain[0], inner)
def DOMClass(descriptor):
protoList = ['PrototypeList::ID::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with ID::Last so we
# guarantee that all the lists are the same length. ID::Last
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::ID::Last'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
heapSizeOf = 'heap_size_of_raw_self_and_children::<%s>' % descriptor.concreteType
if descriptor.isGlobal():
globals_ = camel_to_upper_snake(descriptor.name)
else:
globals_ = 'EMPTY'
return """\
DOMClass {
interface_chain: [ %s ],
type_id: %s,
heap_size_of: %s as unsafe fn(_) -> _,
global: InterfaceObjectMap::%s,
}""" % (prototypeChainString, DOMClassTypeId(descriptor), heapSizeOf, globals_)
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
args = {
"domClass": DOMClass(self.descriptor),
"enumerateHook": "None",
"finalizeHook": FINALIZE_HOOK_NAME,
"flags": "0",
"name": str_to_const_array(self.descriptor.interface.identifier.name),
"resolveHook": "None",
"slots": "1",
"traceHook": TRACE_HOOK_NAME,
}
if self.descriptor.isGlobal():
assert not self.descriptor.weakReferenceable
args["enumerateHook"] = "Some(enumerate_global)"
args["flags"] = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
args["slots"] = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
args["resolveHook"] = "Some(resolve_global)"
args["traceHook"] = "js::jsapi::JS_GlobalObjectTraceHook"
elif self.descriptor.weakReferenceable:
args["slots"] = "2"
return """\
static CLASS_OPS: js::jsapi::JSClassOps = js::jsapi::JSClassOps {
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: %(enumerateHook)s,
resolve: %(resolveHook)s,
mayResolve: None,
finalize: Some(%(finalizeHook)s),
call: None,
hasInstance: None,
construct: None,
trace: Some(%(traceHook)s),
};
static Class: DOMJSClass = DOMJSClass {
base: js::jsapi::JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | %(flags)s |
(((%(slots)s) & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT)
/* JSCLASS_HAS_RESERVED_SLOTS(%(slots)s) */,
cOps: &CLASS_OPS,
reserved: [0 as *mut _; 3],
},
dom_class: %(domClass)s
};""" % args
def str_to_const_array(s):
return "b\"%s\\0\"" % s
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype")
slotCount = 0
if self.descriptor.hasUnforgeableMembers:
slotCount += 1
return """\
static PrototypeClass: JSClass = JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags:
// JSCLASS_HAS_RESERVED_SLOTS(%(slotCount)s)
(%(slotCount)s & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT,
cOps: 0 as *const _,
reserved: [0 as *mut os::raw::c_void; 3]
};
""" % {'name': name, 'slotCount': slotCount}
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if self.descriptor.interface.isNamespace():
classString = self.descriptor.interface.getExtendedAttribute("ClassString")
if classString:
classString = classString[0]
else:
classString = "Object"
return """\
static NAMESPACE_OBJECT_CLASS: NamespaceObjectClass = unsafe {
NamespaceObjectClass::new(%s)
};
""" % str_to_const_array(classString)
if self.descriptor.interface.ctor():
constructorBehavior = "InterfaceConstructorBehavior::call(%s)" % CONSTRUCT_HOOK_NAME
else:
constructorBehavior = "InterfaceConstructorBehavior::throw()"
name = self.descriptor.interface.identifier.name
args = {
"constructorBehavior": constructorBehavior,
"id": name,
"representation": 'b"function %s() {\\n [native code]\\n}"' % name,
"depth": self.descriptor.prototypeDepth
}
return """\
static INTERFACE_OBJECT_CLASS: NonCallbackInterfaceObjectClass =
NonCallbackInterfaceObjectClass::new(
&%(constructorBehavior)s,
%(representation)s,
PrototypeList::ID::%(id)s,
%(depth)s);
""" % args
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
# Make a copy of the kids into a list, because if someone passes in a
# generator we won't be able to both declare and define ourselves, or
# define ourselves more than once!
self.children = list(children)
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, iterable):
return self.joiner.join(s for s in iterable if len(s) > 0)
def define(self):
return self.join(child.define() for child in self.children if child is not None)
def __len__(self):
return len(self.children)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [CGIfWrapper(condition, ifTrue),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}")]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
CGGeneric.__init__(self, "%s::new(${val}.get().to_object())" % name)
def getAllTypes(descriptors, dictionaries, callbacks, typedefs):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
for typedef in typedefs:
yield (typedef.innerType, None, None)
def UnionTypes(descriptors, dictionaries, callbacks, typedefs, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::ConversionResult',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::Root',
'dom::bindings::str::ByteString',
'dom::bindings::str::DOMString',
'dom::bindings::str::USVString',
'dom::types::*',
'js::error::throw_type_error',
'js::jsapi::HandleValue',
'js::jsapi::JSContext',
'js::jsapi::MutableHandleValue',
'js::jsval::JSVal',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks, typedefs):
assert not descriptor or not dictionary
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if name not in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
])
# Sort unionStructs by key, retrieve value
unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0)))
return CGImports(CGList(unionStructs, "\n\n"),
descriptors=[],
callbacks=[],
dictionaries=[],
enums=[],
imports=imports,
config=config,
ignored_warnings=[])
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
# XXXjdm Support default arguments somehow :/
# if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
docs is None or documentation for the method in a string.
"""
def __init__(self, descriptor, name, returnType, args, inline=False,
alwaysInline=False, extern=False, unsafe_fn=False, pub=False,
templateArgs=None, unsafe=False, docs=None, doesNotPanic=False):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.unsafe_fn = extern or unsafe_fn
self.templateArgs = templateArgs
self.pub = pub
self.unsafe = unsafe
self.docs = docs
self.catchPanic = self.extern and not doesNotPanic
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _docs(self):
if self.docs is None:
return ''
lines = self.docs.splitlines()
return ''.join('/// %s\n' % line for line in lines)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline]')
if self.pub:
decorators.append('pub')
if self.unsafe_fn:
decorators.append('unsafe')
if self.extern:
decorators.append('extern')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
# Method will already be marked `unsafe` if `self.extern == True`
if self.unsafe and not self.extern:
body = CGWrapper(CGIndenter(body), pre="unsafe {\n", post="\n}")
if self.catchPanic:
body = CGWrapper(CGIndenter(body),
pre="let result = panic::catch_unwind(AssertUnwindSafe(|| {\n",
post=("""}));
match result {
Ok(result) => result,
Err(error) => {
store_panic_result(error);
return%s;
}
}
""" % ("" if self.returnType == "void" else " false")))
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%s%sfn %s%s(%s)%s {\n" % (self._docs(), self._decorators(),
self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
raise NotImplementedError # Override me!
class CGConstructorEnabled(CGAbstractMethod):
"""
A method for testing whether we should be exposing this interface object.
This can perform various tests depending on what conditions are specified
on the interface.
"""
def __init__(self, descriptor):
CGAbstractMethod.__init__(self, descriptor,
'ConstructorEnabled', 'bool',
[Argument("*mut JSContext", "aCx"),
Argument("HandleObject", "aObj")],
unsafe_fn=True)
def definition_body(self):
conditions = []
iface = self.descriptor.interface
bits = " | ".join(sorted(
"InterfaceObjectMap::" + camel_to_upper_snake(i) for i in iface.exposureSet
))
conditions.append("is_exposed_in(aObj, %s)" % bits)
pref = iface.getExtendedAttribute("Pref")
if pref:
assert isinstance(pref, list) and len(pref) == 1
conditions.append('PREFS.get("%s").as_boolean().unwrap_or(false)' % pref[0])
func = iface.getExtendedAttribute("Func")
if func:
assert isinstance(func, list) and len(func) == 1
conditions.append("%s(aCx, aObj)" % func[0])
return CGList((CGGeneric(cond) for cond in conditions), " &&\n")
def CreateBindingJSObject(descriptor, parent=None):
assert not descriptor.isGlobal()
create = "let raw = Box::into_raw(object);\nlet _rt = RootedTraceable::new(&*raw);\n"
if descriptor.proxy:
create += """
let handler = RegisterBindings::proxy_handlers[PrototypeList::Proxies::%s as usize];
rooted!(in(cx) let private = PrivateValue(raw as *const libc::c_void));
let obj = NewProxyObject(cx, handler,
private.handle(),
proto.get(), %s.get(),
ptr::null_mut(), ptr::null_mut());
assert!(!obj.is_null());
rooted!(in(cx) let obj = obj);\
""" % (descriptor.name, parent)
else:
create += ("rooted!(in(cx) let obj = JS_NewObjectWithGivenProto(\n"
" cx, &Class.base as *const JSClass, proto.handle()));\n"
"assert!(!obj.is_null());\n"
"\n"
"JS_SetReservedSlot(obj.get(), DOM_OBJECT_SLOT,\n"
" PrivateValue(raw as *const libc::c_void));")
if descriptor.weakReferenceable:
create += """
JS_SetReservedSlot(obj.get(), DOM_WEAK_SLOT, PrivateValue(ptr::null()));"""
return create
def InitUnforgeablePropertiesOnHolder(descriptor, properties):
"""
Define the unforgeable properties on the unforgeable holder for
the interface represented by descriptor.
properties is a PropertyArrays instance.
"""
unforgeables = []
defineUnforgeableAttrs = "define_guarded_properties(cx, unforgeable_holder.handle(), %s);"
defineUnforgeableMethods = "define_guarded_methods(cx, unforgeable_holder.handle(), %s);"
unforgeableMembers = [
(defineUnforgeableAttrs, properties.unforgeable_attrs),
(defineUnforgeableMethods, properties.unforgeable_methods),
]
for template, array in unforgeableMembers:
if array.length() > 0:
unforgeables.append(CGGeneric(template % array.variableName()))
return CGList(unforgeables, "\n")
def CopyUnforgeablePropertiesToInstance(descriptor):
"""
Copy the unforgeable properties from the unforgeable holder for
this interface to the instance object we have.
"""
if not descriptor.hasUnforgeableMembers:
return ""
copyCode = ""
# For proxies, we want to define on the expando object, not directly on the
# reflector, so we can make sure we don't get confused by named getters.
if descriptor.proxy:
copyCode += """\
rooted!(in(cx) let expando = ensure_expando_object(cx, obj.handle()));
"""
obj = "expando"
else:
obj = "obj"
# We can't do the fast copy for globals, because we can't allocate the
# unforgeable holder for those with the right JSClass. Luckily, there
# aren't too many globals being created.
if descriptor.isGlobal():
copyFunc = "JS_CopyPropertiesFrom"
else:
copyFunc = "JS_InitializePropertiesFromCompatibleNativeObject"
copyCode += """\
rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_GetReservedSlot(proto.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT).to_object());
assert!(%(copyFunc)s(cx, %(obj)s.handle(), unforgeable_holder.handle()));
""" % {'copyFunc': copyFunc, 'obj': obj}
return copyCode
class CGWrapMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for non-callback
interfaces.
"""
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
assert not descriptor.isGlobal()
args = [Argument('*mut JSContext', 'cx'), Argument('GlobalRef', 'scope'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
def definition_body(self):
unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor)
create = CreateBindingJSObject(self.descriptor, "scope")
return CGGeneric("""\
let scope = scope.reflector().get_jsobject();
assert!(!scope.get().is_null());
assert!(((*JS_GetClass(scope.get())).flags & JSCLASS_IS_GLOBAL) != 0);
rooted!(in(cx) let mut proto = ptr::null_mut());
let _ac = JSAutoCompartment::new(cx, scope.get());
GetProtoObject(cx, scope, proto.handle_mut());
assert!(!proto.is_null());
%(createObject)s
%(copyUnforgeable)s
(*raw).init_reflector(obj.get());
Root::from_ref(&*raw)""" % {'copyUnforgeable': unforgeable, 'createObject': create})
class CGWrapGlobalMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for global interfaces.
"""
def __init__(self, descriptor, properties):
assert not descriptor.interface.isCallback()
assert descriptor.isGlobal()
args = [Argument('*mut JSContext', 'cx'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
self.properties = properties
def definition_body(self):
values = {
"unforgeable": CopyUnforgeablePropertiesToInstance(self.descriptor)
}
pairs = [
("define_guarded_properties", self.properties.attrs),
("define_guarded_methods", self.properties.methods),
("define_guarded_constants", self.properties.consts)
]
members = ["%s(cx, obj.handle(), %s);" % (function, array.variableName())
for (function, array) in pairs if array.length() > 0]
values["members"] = "\n".join(members)
return CGGeneric("""\
let raw = Box::into_raw(object);
let _rt = RootedTraceable::new(&*raw);
rooted!(in(cx) let mut obj = ptr::null_mut());
create_global_object(
cx,
&Class.base,
raw as *const libc::c_void,
_trace,
obj.handle_mut());
assert!(!obj.is_null());
(*raw).init_reflector(obj.get());
let _ac = JSAutoCompartment::new(cx, obj.get());
rooted!(in(cx) let mut proto = ptr::null_mut());
GetProtoObject(cx, obj.handle(), proto.handle_mut());
assert!(JS_SplicePrototype(cx, obj.handle(), proto.handle()));
let mut immutable = false;
assert!(JS_SetImmutablePrototype(cx, obj.handle(), &mut immutable));
assert!(immutable);
%(members)s
%(unforgeable)s
Root::from_ref(&*raw)\
""" % values)
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
interface = self.descriptor.interface
name = self.descriptor.concreteType
if (interface.getUserData("hasConcreteDescendant", False) or
interface.getUserData("hasProxyDescendant", False)):
depth = self.descriptor.prototypeDepth
check = "class.interface_chain[%s] == PrototypeList::ID::%s" % (depth, name)
elif self.descriptor.proxy:
check = "class as *const _ == &Class as *const _"
else:
check = "class as *const _ == &Class.dom_class as *const _"
return """\
impl IDLInterface for %(name)s {
#[inline]
fn derives(class: &'static DOMClass) -> bool {
%(check)s
}
}
impl PartialEq for %(name)s {
fn eq(&self, other: &%(name)s) -> bool {
self as *const %(name)s == &*other
}
}
""" % {'check': check, 'name': name}
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args, doesNotPanic=False):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True, doesNotPanic=doesNotPanic)
class PropertyArrays():
def __init__(self, descriptor):
self.static_methods = MethodDefiner(descriptor, "StaticMethods",
static=True, unforgeable=False)
self.static_attrs = AttrDefiner(descriptor, "StaticAttributes",
static=True, unforgeable=False)
self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False)
self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods",
static=False, unforgeable=True)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False)
self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes",
static=False, unforgeable=True)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [
"static_methods",
"static_attrs",
"methods",
"unforgeable_methods",
"attrs",
"unforgeable_attrs",
"consts",
]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties, haveUnscopables):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'),
Argument('*mut ProtoOrIfaceArray', 'cache')]
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args,
unsafe=True)
self.properties = properties
self.haveUnscopables = haveUnscopables
def definition_body(self):
name = self.descriptor.interface.identifier.name
if self.descriptor.interface.isNamespace():
if self.descriptor.interface.getExtendedAttribute("ProtoObjectHack"):
proto = "JS_GetObjectPrototype(cx, global)"
else:
proto = "JS_NewPlainObject(cx)"
if self.properties.static_methods.length():
methods = self.properties.static_methods.variableName()
else:
methods = "&[]"
return CGGeneric("""\
rooted!(in(cx) let proto = %(proto)s);
assert!(!proto.is_null());
rooted!(in(cx) let mut namespace = ptr::null_mut());
create_namespace_object(cx, global, proto.handle(), &NAMESPACE_OBJECT_CLASS,
%(methods)s, %(name)s, namespace.handle_mut());
assert!(!namespace.is_null());
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = namespace.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
namespace.get());
""" % {"id": MakeNativeName(name), "methods": methods, "name": str_to_const_array(name), "proto": proto})
if self.descriptor.interface.isCallback():
assert not self.descriptor.interface.ctor() and self.descriptor.interface.hasConstants()
return CGGeneric("""\
rooted!(in(cx) let mut interface = ptr::null_mut());
create_callback_interface_object(cx, global, sConstants, %(name)s, interface.handle_mut());
assert!(!interface.is_null());
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
interface.get());
""" % {"id": name, "name": str_to_const_array(name)})
if len(self.descriptor.prototypeChain) == 1:
if self.descriptor.interface.getExtendedAttribute("ExceptionClass"):
getPrototypeProto = "prototype_proto.set(JS_GetErrorPrototype(cx))"
elif self.descriptor.interface.isIteratorInterface():
getPrototypeProto = "prototype_proto.set(JS_GetIteratorPrototype(cx))"
else:
getPrototypeProto = "prototype_proto.set(JS_GetObjectPrototype(cx, global))"
else:
getPrototypeProto = ("%s::GetProtoObject(cx, global, prototype_proto.handle_mut())" %
toBindingNamespace(self.descriptor.prototypeChain[-2]))
code = [CGGeneric("""\
rooted!(in(cx) let mut prototype_proto = ptr::null_mut());
%s;
assert!(!prototype_proto.is_null());""" % getPrototypeProto)]
properties = {
"id": name,
"unscopables": "unscopable_names" if self.haveUnscopables else "&[]"
}
for arrayName in self.properties.arrayNames():
array = getattr(self.properties, arrayName)
if array.length():
properties[arrayName] = array.variableName()
else:
properties[arrayName] = "&[]"
if self.descriptor.isGlobal():
assert not self.haveUnscopables
proto_properties = {
"attrs": "&[]",
"consts": "&[]",
"id": name,
"methods": "&[]",
"unscopables": "&[]",
}
else:
proto_properties = properties
code.append(CGGeneric("""
rooted!(in(cx) let mut prototype = ptr::null_mut());
create_interface_prototype_object(cx,
prototype_proto.handle(),
&PrototypeClass,
%(methods)s,
%(attrs)s,
%(consts)s,
%(unscopables)s,
prototype.handle_mut());
assert!(!prototype.is_null());
assert!((*cache)[PrototypeList::ID::%(id)s as usize].is_null());
(*cache)[PrototypeList::ID::%(id)s as usize] = prototype.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::ID::%(id)s as isize),
ptr::null_mut(),
prototype.get());
""" % proto_properties))
if self.descriptor.interface.hasInterfaceObject():
properties["name"] = str_to_const_array(name)
if self.descriptor.interface.ctor():
properties["length"] = methodLength(self.descriptor.interface.ctor())
else:
properties["length"] = 0
if self.descriptor.interface.parent:
parentName = toBindingNamespace(self.descriptor.getParentName())
code.append(CGGeneric("""
rooted!(in(cx) let mut interface_proto = ptr::null_mut());
%s::GetConstructorObject(cx, global, interface_proto.handle_mut());""" % parentName))
else:
code.append(CGGeneric("""
rooted!(in(cx) let interface_proto = JS_GetFunctionPrototype(cx, global));"""))
code.append(CGGeneric("""\
assert!(!interface_proto.is_null());
rooted!(in(cx) let mut interface = ptr::null_mut());
create_noncallback_interface_object(cx,
global,
interface_proto.handle(),
&INTERFACE_OBJECT_CLASS,
%(static_methods)s,
%(static_attrs)s,
%(consts)s,
prototype.handle(),
%(name)s,
%(length)s,
interface.handle_mut());
assert!(!interface.is_null());""" % properties))
if self.descriptor.hasDescendants():
code.append(CGGeneric("""\
assert!((*cache)[PrototypeList::Constructor::%(id)s as usize].is_null());
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.get();
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize),
ptr::null_mut(),
interface.get());
""" % properties))
aliasedMembers = [m for m in self.descriptor.interface.members if m.isMethod() and m.aliases]
if aliasedMembers:
def defineAlias(alias):
if alias == "@@iterator":
symbolJSID = "RUST_SYMBOL_TO_JSID(GetWellKnownSymbol(cx, SymbolCode::iterator))"
getSymbolJSID = CGGeneric(fill("rooted!(in(cx) let iteratorId = ${symbolJSID});",
symbolJSID=symbolJSID))
defineFn = "JS_DefinePropertyById2"
prop = "iteratorId.handle()"
elif alias.startswith("@@"):
raise TypeError("Can't handle any well-known Symbol other than @@iterator")
else:
getSymbolJSID = None
defineFn = "JS_DefineProperty"
prop = '"%s"' % alias
return CGList([
getSymbolJSID,
# XXX If we ever create non-enumerable properties that can
# be aliased, we should consider making the aliases
# match the enumerability of the property being aliased.
CGGeneric(fill(
"""
assert!(${defineFn}(cx, prototype.handle(), ${prop}, aliasedVal.handle(),
JSPROP_ENUMERATE, None, None));
""",
defineFn=defineFn,
prop=prop))
], "\n")
def defineAliasesFor(m):
return CGList([
CGGeneric(fill(
"""
assert!(JS_GetProperty(cx, prototype.handle(),
b\"${prop}\0\" as *const u8 as *const _,
aliasedVal.handle_mut()));
""",
prop=m.identifier.name))
] + [defineAlias(alias) for alias in sorted(m.aliases)])
defineAliases = CGList([
CGGeneric(fill("""
// Set up aliases on the interface prototype object we just created.
""")),
CGGeneric("rooted!(in(cx) let mut aliasedVal = UndefinedValue());\n\n")
] + [defineAliasesFor(m) for m in sorted(aliasedMembers)])
code.append(defineAliases)
constructors = self.descriptor.interface.namedConstructors
if constructors:
decl = "let named_constructors: [(ConstructorClassHook, &'static [u8], u32); %d]" % len(constructors)
specs = []
for constructor in constructors:
hook = CONSTRUCT_HOOK_NAME + "_" + constructor.identifier.name
name = str_to_const_array(constructor.identifier.name)
length = methodLength(constructor)
specs.append(CGGeneric("(%s as ConstructorClassHook, %s, %d)" % (hook, name, length)))
values = CGIndenter(CGList(specs, "\n"), 4)
code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];"))
code.append(CGGeneric("create_named_constructors(cx, global, &named_constructors, prototype.handle());"))
if self.descriptor.hasUnforgeableMembers:
# We want to use the same JSClass and prototype as the object we'll
# end up defining the unforgeable properties on in the end, so that
# we can use JS_InitializePropertiesFromCompatibleNativeObject to do
# a fast copy. In the case of proxies that's null, because the
# expando object is a vanilla object, but in the case of other DOM
# objects it's whatever our class is.
#
# Also, for a global we can't use the global's class; just use
# nullpr and when we do the copy off the holder we'll take a slower
# path. This also means that we don't need to worry about matching
# the prototype.
if self.descriptor.proxy or self.descriptor.isGlobal():
holderClass = "ptr::null()"
holderProto = "HandleObject::null()"
else:
holderClass = "&Class.base as *const JSClass"
holderProto = "prototype.handle()"
code.append(CGGeneric("""
rooted!(in(cx) let mut unforgeable_holder = ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_NewObjectWithoutMetadata(cx, %(holderClass)s, %(holderProto)s));
assert!(!unforgeable_holder.is_null());
""" % {'holderClass': holderClass, 'holderProto': holderProto}))
code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties))
code.append(CGGeneric("""\
JS_SetReservedSlot(prototype.get(), DOM_PROTO_UNFORGEABLE_HOLDER_SLOT,
ObjectValue(&*unforgeable_holder.get()))"""))
return CGList(code, "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
Argument('MutableHandleObject', 'rval')]
CGAbstractMethod.__init__(self, descriptor, name,
'void', args, pub=pub, unsafe=True)
self.id = idPrefix + "::" + MakeNativeName(self.descriptor.name)
def definition_body(self):
return CGGeneric("""
assert!(((*JS_GetClass(global.get())).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let proto_or_iface_array = get_proto_or_iface_array(global.get());
rval.set((*proto_or_iface_array)[%(id)s as usize]);
if !rval.get().is_null() {
return;
}
CreateInterfaceObjects(cx, global, proto_or_iface_array);
rval.set((*proto_or_iface_array)[%(id)s as usize]);
assert!(!rval.get().is_null());
""" % {"id": self.id})
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::ID", pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"PrototypeList::Constructor",
pub=True)
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler',
'*const libc::c_void', [],
pub=True, unsafe=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
customDefineProperty = 'proxyhandler::define_property'
if self.descriptor.operations['IndexedSetter'] or self.descriptor.operations['NamedSetter']:
customDefineProperty = 'defineProperty'
customDelete = 'proxyhandler::delete'
if self.descriptor.operations['NamedDeleter']:
customDelete = 'delete'
getOwnEnumerablePropertyKeys = "own_property_keys"
if self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
getOwnEnumerablePropertyKeys = "getOwnEnumerablePropertyKeys"
args = {
"defineProperty": customDefineProperty,
"delete": customDelete,
"getOwnEnumerablePropertyKeys": getOwnEnumerablePropertyKeys,
"trace": TRACE_HOOK_NAME,
"finalize": FINALIZE_HOOK_NAME,
}
return CGGeneric("""\
let traps = ProxyTraps {
enter: None,
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(%(defineProperty)s),
ownPropertyKeys: Some(own_property_keys),
delete_: Some(%(delete)s),
enumerate: None,
getPrototypeIfOrdinary: Some(proxyhandler::get_prototype_if_ordinary),
preventExtensions: Some(proxyhandler::prevent_extensions),
isExtensible: Some(proxyhandler::is_extensible),
has: None,
get: Some(get),
set: None,
call: None,
construct: None,
getPropertyDescriptor: Some(get_property_descriptor),
hasOwn: Some(hasOwn),
getOwnEnumerablePropertyKeys: Some(%(getOwnEnumerablePropertyKeys)s),
nativeCall: None,
hasInstance: None,
objectClassIs: None,
className: Some(className),
fun_toString: None,
boxedValue_unbox: None,
defaultValue: None,
trace: Some(%(trace)s),
finalize: Some(%(finalize)s),
objectMoved: None,
isCallable: None,
isConstructor: None,
};
CreateProxyHandler(&traps, &Class as *const _ as *const _)\
""" % args)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface',
'void', args, pub=True, unsafe_fn=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
if self.descriptor.interface.isCallback() or self.descriptor.interface.isNamespace():
function = "GetConstructorObject"
else:
function = "GetProtoObject"
return CGGeneric("""\
assert!(!global.get().is_null());
if !ConstructorEnabled(cx, global) {
return;
}
rooted!(in(cx) let mut proto = ptr::null_mut());
%s(cx, global, proto.handle_mut());
assert!(!proto.is_null());""" % (function,))
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptor, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptor)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
# XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isDictionary():
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if "cx" not in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % MakeNativeName(descriptor.interface.identifier.name))
else:
call = CGWrapper(call, pre="%s." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = ""
else:
glob = " let global = global_root_from_reflector(this);\n"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
"%s"
" throw_dom_exception(cx, global.r(), e);\n"
" return%s;\n"
" },\n"
"};" % (glob, errorResult)))
def define(self):
return self.cgRoot.define()
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgs(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
errorResult = None
if self.isFallible():
errorResult = " false"
if idlNode.isMethod() and idlNode.isMaplikeOrSetlikeOrIterableMethod():
if idlNode.maplikeOrSetlikeOrIterable.isMaplike() or \
idlNode.maplikeOrSetlikeOrIterable.isSetlike():
raise TypeError('Maplike/Setlike methods are not supported yet')
else:
cgThings.append(CGIterableMethodGenerator(descriptor,
idlNode.maplikeOrSetlikeOrIterable,
idlNode.identifier.name))
else:
cgThings.append(CGCallGenerator(
errorResult,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgs(self):
return "args" if self.argCount > 0 else ""
def getArgc(self):
return "argc"
def getArguments(self):
def process(arg, i):
argVal = "arg" + str(i)
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
return [(a, process(a, i)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return 'infallible' not in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('args.rval()')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"))
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
# bodyList.append(CGGeneric('panic!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList))
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn true;"
def getArgc(self):
return "1"
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "bool", args, extern=True)
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
""")
return CGList([preamble, self.generate_code()])
def generate_code(self):
raise NotImplementedError # Override me!
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*const JSJitMethodCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = &*this;\n"
"let args = &*args;\n"
"let argc = args._base.argc_;\n")
@staticmethod
def makeNativeName(descriptor, method):
name = method.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return MakeNativeName(nativeName)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGMethodCall(["global.r()"], nativeName, True, self.descriptor, self.method)
return CGList([setupArgs, call])
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitGetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
nativeName = MakeNativeName(nativeName)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGGetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([setupArgs, call])
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitSetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return "Set" + MakeNativeName(nativeName)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let args = CallArgs::from_vp(vp, argc);\n"
"if argc == 0 {\n"
" throw_type_error(cx, \"Not enough arguments to %s setter.\");\n"
" return false;\n"
"}" % self.attr.identifier.name)
call = CGSetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGSpecializedForwardingSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL attribute forwarding setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
attrName = self.attr.identifier.name
forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0]
# JS_GetProperty and JS_SetProperty can only deal with ASCII
assert all(ord(c) < 128 for c in attrName)
assert all(ord(c) < 128 for c in forwardToAttrName)
return CGGeneric("""\
rooted!(in(cx) let mut v = UndefinedValue());
if !JS_GetProperty(cx, obj, %s as *const u8 as *const libc::c_char, v.handle_mut()) {
return false;
}
if !v.is_object() {
throw_type_error(cx, "Value.%s is not an object.");
return false;
}
rooted!(in(cx) let target_obj = v.to_object());
JS_SetProperty(cx, target_obj.handle(), %s as *const u8 as *const libc::c_char, args.get(0))
""" % (str_to_const_array(attrName), attrName, str_to_const_array(forwardToAttrName)))
class CGSpecializedReplaceableSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL replaceable attribute setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
assert self.attr.readonly
name = str_to_const_array(self.attr.identifier.name)
# JS_DefineProperty can only deal with ASCII.
assert all(ord(c) < 128 for c in name)
return CGGeneric("""\
JS_DefineProperty(cx, obj, %s as *const u8 as *const libc::c_char,
args.get(0), JSPROP_ENUMERATE, None, None)""" % name)
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, opType, infallible, movable,
aliasSet, alwaysInSlot, lazilyInSlot, slotIndex,
returnTypes, args):
"""
aliasSet is a JSJitInfo_AliasSet value, without the "JSJitInfo_AliasSet::" bit.
args is None if we don't want to output argTypes for some
reason (e.g. we have overloads or we're not a method) and
otherwise an iterable of the arguments for this method.
"""
assert not movable or aliasSet != "AliasEverything" # Can't move write-aliasing things
assert not alwaysInSlot or movable # Things always in slots had better be movable
def jitInfoInitializer(isTypedMethod):
initializer = fill(
"""
JSJitInfo {
call: ${opName} as *const os::raw::c_void,
protoID: PrototypeList::ID::${name} as u16,
depth: ${depth},
_bitfield_1:
JSJitInfo::new_bitfield_1(
JSJitInfo_OpType::${opType} as u8,
JSJitInfo_AliasSet::${aliasSet} as u8,
JSValueType::${returnType} as u8,
${isInfallible},
${isMovable},
${isEliminatable},
${isAlwaysInSlot},
${isLazilyCachedInSlot},
${isTypedMethod},
${slotIndex} as u16,
)
}
""",
opName=opName,
name=self.descriptor.name,
depth=self.descriptor.interface.inheritanceDepth(),
opType=opType,
aliasSet=aliasSet,
returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
""),
isInfallible=toStringBool(infallible),
isMovable=toStringBool(movable),
# FIXME(nox): https://github.com/servo/servo/issues/10991
isEliminatable=toStringBool(False),
isAlwaysInSlot=toStringBool(alwaysInSlot),
isLazilyCachedInSlot=toStringBool(lazilyInSlot),
isTypedMethod=toStringBool(isTypedMethod),
slotIndex=slotIndex)
return initializer.rstrip()
if args is not None:
argTypes = "%s_argTypes" % infoName
args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args]
args.append("JSJitInfo_ArgType::ArgTypeListEnd as i32")
argTypesDecl = (
"const %s: [i32; %d] = [ %s ];\n" %
(argTypes, len(args), ", ".join(args)))
return fill(
"""
$*{argTypesDecl}
const ${infoName}: JSTypedMethodJitInfo = JSTypedMethodJitInfo {
base: ${jitInfo},
argTypes: &${argTypes} as *const _ as *const JSJitInfo_ArgType,
};
""",
argTypesDecl=argTypesDecl,
infoName=infoName,
jitInfo=indent(jitInfoInitializer(True)),
argTypes=argTypes)
return ("\n"
"const %s: JSJitInfo = %s;\n"
% (infoName, jitInfoInitializer(False)))
def define(self):
if self.member.isAttr():
internalMemberName = self.descriptor.internalNameFor(self.member.identifier.name)
getterinfo = ("%s_getterinfo" % internalMemberName)
getter = ("get_%s" % internalMemberName)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
movable = self.mayBeMovable() and getterinfal
aliasSet = self.aliasSet()
isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot")
if self.member.slotIndices is not None:
assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached")
isLazilyCachedInSlot = not isAlwaysInSlot
slotIndex = memberReservedSlot(self.member) # noqa:FIXME: memberReservedSlot is not defined
# We'll statically assert that this is not too big in
# CGUpdateMemberSlotsMethod, in the case when
# isAlwaysInSlot is true.
else:
isLazilyCachedInSlot = False
slotIndex = "0"
result = self.defineJitInfo(getterinfo, getter, "Getter",
getterinfal, movable, aliasSet,
isAlwaysInSlot, isLazilyCachedInSlot,
slotIndex,
[self.member.type], None)
if (not self.member.readonly or self.member.getExtendedAttribute("PutForwards")
or self.member.getExtendedAttribute("Replaceable")):
setterinfo = ("%s_setterinfo" % internalMemberName)
setter = ("set_%s" % internalMemberName)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, "Setter",
False, False, "AliasEverything",
False, False, "0",
[BuiltinTypes[IDLBuiltinType.Types.void]],
None)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
sigs = self.member.signatures()
if len(sigs) != 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
methodInfal = False
args = None
movable = False
else:
sig = sigs[0]
# For methods that affect nothing, it's OK to set movable to our
# notion of infallible on the C++ side, without considering
# argument conversions, since argument conversions that can
# reliably throw would be effectful anyway and the jit doesn't
# move effectful things.
hasInfallibleImpl = "infallible" in self.descriptor.getExtendedAttributes(self.member)
movable = self.mayBeMovable() and hasInfallibleImpl
# XXXbz can we move the smarts about fallibility due to arg
# conversions into the JIT, using our new args stuff?
if (len(sig[1]) != 0):
# We have arguments or our return-value boxing can fail
methodInfal = False
else:
methodInfal = hasInfallibleImpl
# For now, only bother to output args if we're side-effect-free.
if self.member.affects == "Nothing":
args = sig[1]
else:
args = None
aliasSet = self.aliasSet()
result = self.defineJitInfo(methodinfo, method, "Method",
methodInfal, movable, aliasSet,
False, False, "0",
[s[0] for s in sigs], args)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def mayBeMovable(self):
"""
Returns whether this attribute or method may be movable, just
based on Affects/DependsOn annotations.
"""
affects = self.member.affects
dependsOn = self.member.dependsOn
assert affects in IDLInterfaceMember.AffectsValues
assert dependsOn in IDLInterfaceMember.DependsOnValues
# Things that are DependsOn=DeviceState are not movable, because we
# don't want them coalesced with each other or loop-hoisted, since
# their return value can change even if nothing is going on from our
# point of view.
return (affects == "Nothing" and
(dependsOn != "Everything" and dependsOn != "DeviceState"))
def aliasSet(self):
"""Returns the alias set to store in the jitinfo. This may not be the
effective alias set the JIT uses, depending on whether we have enough
information about our args to allow the JIT to prove that effectful
argument conversions won't happen.
"""
dependsOn = self.member.dependsOn
assert dependsOn in IDLInterfaceMember.DependsOnValues
if dependsOn == "Nothing" or dependsOn == "DeviceState":
assert self.member.affects == "Nothing"
return "AliasNone"
if dependsOn == "DOMState":
assert self.member.affects == "Nothing"
return "AliasDOMSets"
return "AliasEverything"
@staticmethod
def getJSReturnTypeTag(t):
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSVAL_TYPE_UNKNOWN"
if t.isVoid():
# No return, every time
return "JSVAL_TYPE_UNDEFINED"
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "JSVAL_TYPE_OBJECT"
if t.isMozMap():
return "JSVAL_TYPE_OBJECT"
if t.isGeckoInterface():
return "JSVAL_TYPE_OBJECT"
if t.isString():
return "JSVAL_TYPE_STRING"
if t.isEnum():
return "JSVAL_TYPE_STRING"
if t.isCallback():
return "JSVAL_TYPE_OBJECT"
if t.isAny():
# The whole point is to return various stuff
return "JSVAL_TYPE_UNKNOWN"
if t.isObject():
return "JSVAL_TYPE_OBJECT"
if t.isSpiderMonkeyInterface():
return "JSVAL_TYPE_OBJECT"
if t.isUnion():
u = t.unroll()
if u.hasNullableType:
# Might be null or not
return "JSVAL_TYPE_UNKNOWN"
return reduce(CGMemberJITInfo.getSingleReturnType,
u.flatMemberTypes, "")
if t.isDictionary():
return "JSVAL_TYPE_OBJECT"
if t.isDate():
return "JSVAL_TYPE_OBJECT"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSVAL_TYPE_BOOLEAN"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSVAL_TYPE_INT32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSVAL_TYPE_DOUBLE"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSVAL_TYPE_DOUBLE"
@staticmethod
def getSingleReturnType(existingType, t):
type = CGMemberJITInfo.getJSReturnTypeTag(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
if ((type == "JSVAL_TYPE_DOUBLE" and
existingType == "JSVAL_TYPE_INT32") or
(existingType == "JSVAL_TYPE_DOUBLE" and
type == "JSVAL_TYPE_INT32")):
# Promote INT32 to DOUBLE as needed
return "JSVAL_TYPE_DOUBLE"
# Different types
return "JSVAL_TYPE_UNKNOWN"
@staticmethod
def getJSArgType(t):
assert not t.isVoid()
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSJitInfo_ArgType::Null as i32 | %s" % CGMemberJITInfo.getJSArgType(t.inner)
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "JSJitInfo_ArgType::Object as i32"
if t.isGeckoInterface():
return "JSJitInfo_ArgType::Object as i32"
if t.isString():
return "JSJitInfo_ArgType::String as i32"
if t.isEnum():
return "JSJitInfo_ArgType::String as i32"
if t.isCallback():
return "JSJitInfo_ArgType::Object as i32"
if t.isAny():
# The whole point is to return various stuff
return "JSJitInfo_ArgType::Any as i32"
if t.isObject():
return "JSJitInfo_ArgType::Object as i32"
if t.isSpiderMonkeyInterface():
return "JSJitInfo_ArgType::Object as i32"
if t.isUnion():
u = t.unroll()
type = "JSJitInfo::Null as i32" if u.hasNullableType else ""
return reduce(CGMemberJITInfo.getSingleArgType,
u.flatMemberTypes, type)
if t.isDictionary():
return "JSJitInfo_ArgType::Object as i32"
if t.isDate():
return "JSJitInfo_ArgType::Object as i32"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSJitInfo_ArgType::Boolean as i32"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSJitInfo_ArgType::Integer as i32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSJitInfo_ArgType::Double as i32"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSJitInfo_ArgType::Double as i32"
@staticmethod
def getSingleArgType(existingType, t):
type = CGMemberJITInfo.getJSArgType(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
return "%s | %s" % (existingType, type)
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
decl = """\
#[repr(usize)]
#[derive(JSTraceable, PartialEq, Copy, Clone, HeapSizeOf, Debug)]
pub enum %s {
%s
}
""" % (enum.identifier.name, ",\n ".join(map(getEnumValueName, enum.values())))
inner = """\
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::{JSContext, MutableHandleValue};
use js::jsval::JSVal;
pub const strings: &'static [&'static str] = &[
%s,
];
impl ToJSValConvertible for super::%s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
strings[*self as usize].to_jsval(cx, rval);
}
}
""" % (",\n ".join(['"%s"' % val for val in enum.values()]), enum.identifier.name)
self.cgRoot = CGList([
CGGeneric(decl),
CGNamespace.build([enum.identifier.name + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constants):
CGThing.__init__(self)
self.constants = constants
def define(self):
def stringDecl(const):
name = const.identifier.name
value = convertConstIDLValueToRust(const.value)
return CGGeneric("pub const %s: %s = %s;\n" % (name, builtinNames[const.value.type.tag()], value))
return CGIndenter(CGList(stringDecl(m) for m in self.constants)).define()
def getUnionTypeTemplateVars(type, descriptorProvider):
# For dictionaries and sequences we need to pass None as the failureCode
# for getJSToNativeConversionInfo.
# Also, for dictionaries we would need to handle conversion of
# null/undefined to the dictionary correctly.
if type.isDictionary():
raise TypeError("Can't handle dictionaries in unions")
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).returnType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isSequence():
name = type.name
inner = getUnionTypeTemplateVars(innerSequenceType(type), descriptorProvider)
typeName = "Vec<" + inner["typeName"] + ">"
elif type.isArray():
name = str(type)
# XXXjdm dunno about typeName here
typeName = "/*" + type.name + "*/"
elif type.isByteString():
name = type.name
typeName = "ByteString"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isUSVString():
name = type.name
typeName = "USVString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
else:
name = type.name
typeName = "/*" + type.name + "*/"
info = getJSToNativeConversionInfo(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True)
template = info.template
assert not type.isObject()
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" %s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" %s::%s(ref inner) => inner.to_jsval(cx, rval),"
% (self.type, v["name"]) for v in templateVars
]
return ("""\
pub enum %s {
%s
}
impl ToJSValConvertible for %s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues), self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(ConversionResult::Success(%s::%s(value))),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isArray() or t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
typeName = arrayObjectMemberTypes[0].name
arrayObject = CGGeneric(get_match(typeName))
names.append(typeName)
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
raise TypeError("No support for unwrapping dictionaries as member "
"of a union")
else:
dictionaryObject = None
if callbackObject or dictionaryObject:
assert False, "Not currently supported"
else:
nonPlatformObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
raise TypeError("Can't handle objects in unions.")
else:
object = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or nonPlatformObject or object
if hasObjectTypes:
assert interfaceObject or arrayObject
templateBody = CGList([], "\n")
if interfaceObject:
templateBody.append(interfaceObject)
if arrayObject:
templateBody.append(arrayObject)
conversions.append(CGIfWrapper("value.get().is_object()", templateBody))
stringTypes = [t for t in memberTypes if t.isString() or t.isEnum()]
numericTypes = [t for t in memberTypes if t.isNumeric()]
booleanTypes = [t for t in memberTypes if t.isBoolean()]
if stringTypes or numericTypes or booleanTypes:
assert len(stringTypes) <= 1
assert len(numericTypes) <= 1
assert len(booleanTypes) <= 1
def getStringOrPrimitiveConversion(memberType):
typename = get_name(memberType)
return CGGeneric(get_match(typename))
other = []
stringConversion = map(getStringOrPrimitiveConversion, stringTypes)
numericConversion = map(getStringOrPrimitiveConversion, numericTypes)
booleanConversion = map(getStringOrPrimitiveConversion, booleanTypes)
if stringConversion:
if booleanConversion:
other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0]))
if numericConversion:
other.append(CGIfWrapper("value.get().is_number()", numericConversion[0]))
other.append(stringConversion[0])
elif numericConversion:
if booleanConversion:
other.append(CGIfWrapper("value.get().is_boolean()", booleanConversion[0]))
other.append(numericConversion[0])
else:
assert booleanConversion
other.append(booleanConversion[0])
conversions.append(CGList(other, "\n\n"))
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="unsafe fn from_jsval(cx: *mut JSContext,\n"
" value: HandleValue,\n"
" _option: ())\n"
" -> Result<ConversionResult<%s>, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(CGList([
CGGeneric("type Config = ();"),
method,
], "\n")),
pre="impl FromJSValConvertible for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
pre="unsafe fn TryConvertTo%s(cx: *mut JSContext, value: HandleValue) -> %s {\n" % (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
assert not (override and static)
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template(
"${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility != 'priv' else ''
})
def define(self, cgClass):
pass
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"let mut ret = Rc::new(%s {\n"
"%s\n"
"});\n"
"// Note: callback cannot be moved after calling init.\n"
"match Rc::get_mut(&mut ret) {\n"
" Some(ref mut callback) => callback.parent.init(%s),\n"
" None => unreachable!(),\n"
"};\n"
"ret") % (cgClass.name, '\n'.join(initializers), self.args[0].name))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass)
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""\
pub fn ${decorators}new(${args}) -> Rc<${className}>${body}
""").substitute({'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body})
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""\
${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body})
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs=[], enums=[], unions=[], templateArgs=[],
templateSpecialization=[],
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' % ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 # XXjdm Can we support multiple inheritance?
result += ' {\n'
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(descriptor.binaryNameFor(operation))
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
if operation.isGetter() and not returnType.nullable():
returnType = IDLNullableType(returnType.location, returnType)
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
info = getJSToNativeConversionInfo(
argument.type, descriptor, treatNullAs=argument.treatNullAs,
exceptionCode="return false;")
template = info.template
declType = info.declType
templateValues = {
"val": "value.handle()",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name))
self.cgRoot.prepend(CGGeneric("rooted!(in(cx) let value = desc.value);"))
def getArguments(self):
def process(arg):
argVal = arg.identifier.name
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
args = [(a, process(a)) for a in self.arguments]
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper("let Some(result) = result", wrap)
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedOperation(CGProxySpecialOperation):
"""
Class to generate a call to a named operation.
"""
def __init__(self, descriptor, name):
CGProxySpecialOperation.__init__(self, descriptor, name)
def define(self):
# Our first argument is the id we're getting.
argName = self.arguments[0].identifier.name
return ("let %s = jsid_to_str(cx, id);\n"
"let this = UnwrapProxy(proxy);\n"
"let this = &*this;\n" % argName +
CGProxySpecialOperation.define(self))
class CGProxyNamedGetter(CGProxyNamedOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedPresenceChecker(CGProxyNamedGetter):
"""
Class to generate a call that checks whether a named property exists.
For now, we just delegate to CGProxyNamedGetter
"""
def __init__(self, descriptor):
CGProxyNamedGetter.__init__(self, descriptor)
class CGProxyNamedSetter(CGProxyNamedOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyNamedDeleter(CGProxyNamedOperation):
"""
Class to generate a call to a named deleter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedDeleter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('HandleObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy",
'*const ' + descriptor.concreteType, args,
alwaysInline=True, unsafe=True)
def definition_body(self):
return CGGeneric("""\
/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(obj.get()).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('MutableHandle<PropertyDescriptor>', 'desc', mutable=True)]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
get = ""
if indexedGetter or indexedSetter:
get = "let index = get_array_index_from_id(cx, id);\n"
if indexedGetter:
attrs = "JSPROP_ENUMERATE"
if self.descriptor.operations['IndexedSetter'] is None:
attrs += " | JSPROP_READONLY"
# FIXME(#11868) Should assign to desc.value, desc.get() is a copy.
fillDescriptor = ("desc.get().value = result_root.get();\n"
"fill_property_descriptor(&mut desc, proxy.get(), %s);\n"
"return true;" % attrs)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());'
}
get += ("if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
attrs = []
if not self.descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
attrs.append("JSPROP_ENUMERATE")
if self.descriptor.operations['NamedSetter'] is None:
attrs.append("JSPROP_READONLY")
if attrs:
attrs = " | ".join(attrs)
else:
attrs = "0"
# FIXME(#11868) Should assign to desc.value, desc.get() is a copy.
fillDescriptor = ("desc.get().value = result_root.get();\n"
"fill_property_descriptor(&mut desc, proxy.get(), %s);\n"
"return true;" % attrs)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'rooted!(in(cx) let mut result_root = UndefinedValue());'
}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = """
if RUST_JSID_IS_STRING(id) {
let mut has_on_proto = false;
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
return false;
}
if !has_on_proto {
%s
}
}
""" % CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues), 8).define()
else:
namedGet = ""
# FIXME(#11868) Should assign to desc.obj, desc.get() is a copy.
return get + """\
rooted!(in(cx) let expando = get_expando_object(proxy));
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if !expando.is_null() {
if !JS_GetPropertyDescriptorById(cx, expando.handle(), id, desc) {
return false;
}
if !desc.obj.is_null() {
// Pretend the property lives on the wrapper.
desc.get().obj = proxy.get();
return true;
}
}
""" + namedGet + """\
desc.get().obj = ptr::null_mut();
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
# TODO(Issue 5876)
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('Handle<PropertyDescriptor>', 'desc'),
Argument('*mut ObjectOpResult', 'opresult')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
set += ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return true;\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if get_array_index_from_id(cx, id).is_some() {\n" +
" return false;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
"}\n") % self.descriptor.name
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a named setter on an interface that has "
"unforgeables. Figure out how that should work!")
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() +
" return (*opresult).succeed();\n" +
"} else {\n" +
" return false;\n" +
"}\n")
else:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if (found) {\n"
# TODO(Issue 5876)
" //return js::IsInNonStrictPropertySet(cx)\n" +
" // ? opresult.succeed()\n" +
" // : ThrowErrorMessage(cx, MSG_NO_NAMED_SETTER, \"${name}\");\n" +
" return (*opresult).succeed();\n" +
" }\n" +
" return (*opresult).succeed();\n"
"}\n") % (self.descriptor.name, self.descriptor.name)
set += "return proxyhandler::define_property(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_delete(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('*mut ObjectOpResult', 'res')]
CGAbstractExternMethod.__init__(self, descriptor, "delete", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
if self.descriptor.operations['NamedDeleter']:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a deleter on an interface that has "
"unforgeables. Figure out how that should work!")
set += CGProxyNamedDeleter(self.descriptor).define()
set += "return proxyhandler::delete(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_ownPropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor, "own_property_keys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
if self.descriptor.operations['NamedGetter']:
body += dedent(
"""
for name in (*unwrapped_proxy).SupportedPropertyNames() {
let cstring = CString::new(name).unwrap();
let jsstring = JS_AtomizeAndPinString(cx, cstring.as_ptr());
rooted!(in(cx) let rooted = jsstring);
let jsid = INTERNED_STRING_TO_JSID(cx, rooted.handle().get());
rooted!(in(cx) let rooted_jsid = jsid);
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
let expando = get_expando_object(proxy);
if !expando.is_null() {
rooted!(in(cx) let rooted_expando = expando);
GetPropertyKeys(cx, rooted_expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
assert (descriptor.operations["IndexedGetter"] and
descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"))
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor,
"getOwnEnumerablePropertyKeys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
rooted!(in(cx) let rooted_jsid = int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
let expando = get_expando_object(proxy);
if !expando.is_null() {
rooted!(in(cx) let rooted_expando = expando);
GetPropertyKeys(cx, rooted_expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'), Argument('*mut bool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = result.is_some();\n" +
" return true;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = """\
if RUST_JSID_IS_STRING(id) {
let mut has_on_proto = false;
if !has_property_on_prototype(cx, proxy, id, &mut has_on_proto) {
return false;
}
if !has_on_proto {
%s
*bp = result.is_some();
return true;
}
}
""" % CGIndenter(CGProxyNamedGetter(self.descriptor), 8).define()
else:
named = ""
return indexed + """\
rooted!(in(cx) let expando = get_expando_object(proxy));
if !expando.is_null() {
let ok = JS_HasPropertyById(cx, expando.handle(), id, bp);
if !ok || *bp {
return ok;
}
}
""" + named + """\
*bp = false;
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleValue', 'receiver'), Argument('HandleId', 'id'),
Argument('MutableHandleValue', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """\
rooted!(in(cx) let expando = get_expando_object(proxy));
if !expando.is_null() {
let mut hasProp = false;
if !JS_HasPropertyById(cx, expando.handle(), id, &mut hasProp) {
return false;
}
if hasProp {
return JS_ForwardGetPropertyTo(cx, expando.handle(), id, receiver, vp);
}
}"""
templateValues = {
'jsvalRef': 'vp',
'successCode': 'return true;',
}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """\
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
getNamed = ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n")
else:
getNamed = ""
return """\
//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !get_property_on_prototype(cx, proxy, receiver, id, &mut found, vp) {
return false;
}
if found {
return true;
}
%s
vp.set(UndefinedValue());
return true;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_className(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "className", "*const i8", args, doesNotPanic=True)
self.descriptor = descriptor
def getBody(self):
return '%s as *const u8 as *const i8' % str_to_const_array(self.descriptor.name)
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args, doesNotPanic=False):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""
let this = native_from_object::<%s>(obj).unwrap();
""" % self.descriptor.concreteType)
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
raise NotImplementedError # Override me!
def finalizeHook(descriptor, hookName, context):
release = ""
if descriptor.isGlobal():
release += """\
finalize_global(obj);
"""
elif descriptor.weakReferenceable:
release += """\
let weak_box_ptr = JS_GetReservedSlot(obj, DOM_WEAK_SLOT).to_private() as *mut WeakBox<%s>;
if !weak_box_ptr.is_null() {
let count = {
let weak_box = &*weak_box_ptr;
assert!(weak_box.value.get().is_some());
assert!(weak_box.count.get() > 0);
weak_box.value.set(None);
let count = weak_box.count.get() - 1;
weak_box.count.set(count);
count
};
if count == 0 {
mem::drop(Box::from_raw(weak_box_ptr));
}
}
""" % descriptor.concreteType
release += """\
if !this.is_null() {
// The pointer can be null if the object is the unforgeable holder of that interface.
let _ = Box::from_raw(this as *mut %s);
}
debug!("%s finalize: {:p}", this);\
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args, doesNotPanic=True)
self.traceGlobal = descriptor.isGlobal()
def generate_code(self):
body = [CGGeneric("if this.is_null() { return; } // GC during obj creation\n"
"(*this).trace(%s);" % self.args[0].name)]
if self.traceGlobal:
body += [CGGeneric("trace_global(trc, obj);")]
return CGList(body, "\n")
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor, constructor=None):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
name = CONSTRUCT_HOOK_NAME
if constructor:
name += "_" + constructor.identifier.name
else:
constructor = descriptor.interface.ctor()
assert constructor
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
self.constructor = constructor
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
let args = CallArgs::from_vp(vp, argc);
""")
name = self.constructor.identifier.name
nativeName = MakeNativeName(self.descriptor.binaryNameFor(name))
callGenerator = CGMethodCall(["global.r()"], nativeName, True,
self.descriptor, self.constructor)
return CGList([preamble, callGenerator])
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSFreeOp', '_fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return "static Class: DOMClass = " + DOMClass(self.descriptor) + ";\n"
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(descriptor, argument)
def members():
for m in descriptor.interface.members:
if (m.isMethod() and not m.isStatic() and
not m.isMaplikeOrSetlikeOrIterableMethod() and
(not m.isIdentifierLess() or m.isStringifier())):
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
yield (name,
attribute_arguments(typeNeedsCx(m.type, True)),
return_type(descriptor, m.type, infallible))
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(typeNeedsCx(m.type, False), m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation or operation.isStringifier():
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
if operation.isGetter():
if not rettype.nullable():
rettype = IDLNullableType(rettype.location, rettype)
arguments = method_arguments(descriptor, rettype, arguments)
# If this interface 'supports named properties', then we
# should be able to access 'supported property names'
#
# WebIDL, Second Draft, section 3.2.4.5
# https://heycam.github.io/webidl/#idl-named-properties
if operation.isNamed():
yield "SupportedPropertyNames", [], "Vec<DOMString>"
else:
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
methods = [
CGGeneric("fn %s(&self%s) -> %s;\n" % (name, fmt(arguments), rettype))
for name, arguments, rettype in members()
]
if methods:
self.cgRoot = CGWrapper(CGIndenter(CGList(methods, "")),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
else:
self.cgRoot = CGGeneric("")
self.empty = not methods
def define(self):
return self.cgRoot.define()
class CGWeakReferenceableTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert descriptor.weakReferenceable
self.code = "impl WeakReferenceable for %s {}" % descriptor.interface.identifier.name
def define(self):
return self.code
def generate_imports(config, cgthings, descriptors, callbacks=None, dictionaries=None, enums=None):
if not callbacks:
callbacks = []
if not dictionaries:
dictionaries = []
if not enums:
enums = []
return CGImports(cgthings, descriptors, callbacks, dictionaries, enums, [
'core::nonzero::NonZero',
'js',
'js::JSCLASS_GLOBAL_SLOT_COUNT',
'js::JSCLASS_IS_DOMJSCLASS',
'js::JSCLASS_IS_GLOBAL',
'js::JSCLASS_RESERVED_SLOTS_MASK',
'js::JS_CALLEE',
'js::error::throw_type_error',
'js::jsapi::AutoIdVector',
'js::jsapi::Call',
'js::jsapi::CallArgs',
'js::jsapi::FreeOp',
'js::jsapi::GetPropertyKeys',
'js::jsapi::GetWellKnownSymbol',
'js::jsapi::Handle',
'js::jsapi::HandleId',
'js::jsapi::HandleObject',
'js::jsapi::HandleValue',
'js::jsapi::HandleValueArray',
'js::jsapi::INTERNED_STRING_TO_JSID',
'js::jsapi::IsCallable',
'js::jsapi::JSAutoCompartment',
'js::jsapi::JSCLASS_RESERVED_SLOTS_SHIFT',
'js::jsapi::JSClass',
'js::jsapi::JSContext',
'js::jsapi::JSFreeOp',
'js::jsapi::JSFunctionSpec',
'js::jsapi::JSITER_HIDDEN',
'js::jsapi::JSITER_OWNONLY',
'js::jsapi::JSITER_SYMBOLS',
'js::jsapi::JSJitGetterCallArgs',
'js::jsapi::JSJitInfo',
'js::jsapi::JSJitInfo_AliasSet',
'js::jsapi::JSJitInfo_ArgType',
'js::jsapi::JSJitInfo_OpType',
'js::jsapi::JSJitMethodCallArgs',
'js::jsapi::JSJitSetterCallArgs',
'js::jsapi::JSNative',
'js::jsapi::JSNativeWrapper',
'js::jsapi::JSObject',
'js::jsapi::JSPROP_ENUMERATE',
'js::jsapi::JSPROP_PERMANENT',
'js::jsapi::JSPROP_READONLY',
'js::jsapi::JSPROP_SHARED',
'js::jsapi::JSPropertySpec',
'js::jsapi::JSString',
'js::jsapi::JSTracer',
'js::jsapi::JSType',
'js::jsapi::JSTypedMethodJitInfo',
'js::jsapi::JSValueType',
'js::jsapi::JS_AtomizeAndPinString',
'js::jsapi::JS_CallFunctionValue',
'js::jsapi::JS_CopyPropertiesFrom',
'js::jsapi::JS_DefineProperty',
'js::jsapi::JS_DefinePropertyById2',
'js::jsapi::JS_ForwardGetPropertyTo',
'js::jsapi::JS_GetClass',
'js::jsapi::JS_GetErrorPrototype',
'js::jsapi::JS_GetFunctionPrototype',
'js::jsapi::JS_GetGlobalForObject',
'js::jsapi::JS_GetIteratorPrototype',
'js::jsapi::JS_GetObjectPrototype',
'js::jsapi::JS_GetProperty',
'js::jsapi::JS_GetPropertyById',
'js::jsapi::JS_GetPropertyDescriptorById',
'js::jsapi::JS_GetReservedSlot',
'js::jsapi::JS_HasProperty',
'js::jsapi::JS_HasPropertyById',
'js::jsapi::JS_InitializePropertiesFromCompatibleNativeObject',
'js::jsapi::JS_NewObject',
'js::jsapi::JS_NewObjectWithGivenProto',
'js::jsapi::JS_NewObjectWithoutMetadata',
'js::jsapi::JS_SetImmutablePrototype',
'js::jsapi::JS_SetProperty',
'js::jsapi::JS_SetReservedSlot',
'js::jsapi::JS_SplicePrototype',
'js::jsapi::MutableHandle',
'js::jsapi::MutableHandleObject',
'js::jsapi::MutableHandleValue',
'js::jsapi::ObjectOpResult',
'js::jsapi::PropertyDescriptor',
'js::jsapi::RootedObject',
'js::jsapi::SymbolCode',
'js::jsapi::jsid',
'js::jsval::JSVal',
'js::jsval::NullValue',
'js::jsval::ObjectValue',
'js::jsval::ObjectOrNullValue',
'js::jsval::PrivateValue',
'js::jsval::UndefinedValue',
'js::glue::AppendToAutoIdVector',
'js::glue::CallJitGetterOp',
'js::glue::CallJitMethodOp',
'js::glue::CallJitSetterOp',
'js::glue::CreateProxyHandler',
'js::glue::GetProxyPrivate',
'js::glue::NewProxyObject',
'js::glue::ProxyTraps',
'js::glue::RUST_JSID_IS_STRING',
'js::glue::RUST_SYMBOL_TO_JSID',
'js::glue::int_to_jsid',
'js::rust::GCMethods',
'js::rust::define_methods',
'js::rust::define_properties',
'dom',
'dom::bindings',
'dom::bindings::codegen::InterfaceObjectMap',
'dom::bindings::constant::ConstantSpec',
'dom::bindings::constant::ConstantVal',
'dom::bindings::global::GlobalRef',
'dom::bindings::global::global_root_from_object',
'dom::bindings::global::global_root_from_reflector',
'dom::bindings::interface::ConstructorClassHook',
'dom::bindings::interface::InterfaceConstructorBehavior',
'dom::bindings::interface::NonCallbackInterfaceObjectClass',
'dom::bindings::interface::create_callback_interface_object',
'dom::bindings::interface::create_global_object',
'dom::bindings::interface::create_interface_prototype_object',
'dom::bindings::interface::create_named_constructors',
'dom::bindings::interface::create_noncallback_interface_object',
'dom::bindings::interface::define_guarded_constants',
'dom::bindings::interface::define_guarded_methods',
'dom::bindings::interface::define_guarded_properties',
'dom::bindings::interface::is_exposed_in',
'dom::bindings::iterable::Iterable',
'dom::bindings::iterable::IteratorType',
'dom::bindings::js::JS',
'dom::bindings::js::OptionalRootedReference',
'dom::bindings::js::Root',
'dom::bindings::js::RootedReference',
'dom::bindings::namespace::NamespaceObjectClass',
'dom::bindings::namespace::create_namespace_object',
'dom::bindings::reflector::MutReflectable',
'dom::bindings::reflector::Reflectable',
'dom::bindings::utils::DOMClass',
'dom::bindings::utils::DOMJSClass',
'dom::bindings::utils::DOM_PROTO_UNFORGEABLE_HOLDER_SLOT',
'dom::bindings::utils::JSCLASS_DOM_GLOBAL',
'dom::bindings::utils::ProtoOrIfaceArray',
'dom::bindings::utils::enumerate_global',
'dom::bindings::utils::finalize_global',
'dom::bindings::utils::find_enum_string_index',
'dom::bindings::utils::generic_getter',
'dom::bindings::utils::generic_lenient_getter',
'dom::bindings::utils::generic_lenient_setter',
'dom::bindings::utils::generic_method',
'dom::bindings::utils::generic_setter',
'dom::bindings::utils::get_array_index_from_id',
'dom::bindings::utils::get_dictionary_property',
'dom::bindings::utils::get_property_on_prototype',
'dom::bindings::utils::get_proto_or_iface_array',
'dom::bindings::utils::has_property_on_prototype',
'dom::bindings::utils::is_platform_object',
'dom::bindings::utils::resolve_global',
'dom::bindings::utils::set_dictionary_property',
'dom::bindings::utils::trace_global',
'dom::bindings::trace::JSTraceable',
'dom::bindings::trace::RootedTraceable',
'dom::bindings::callback::CallSetup',
'dom::bindings::callback::CallbackContainer',
'dom::bindings::callback::CallbackInterface',
'dom::bindings::callback::CallbackFunction',
'dom::bindings::callback::ExceptionHandling',
'dom::bindings::callback::wrap_call_this_object',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::ConversionResult',
'dom::bindings::conversions::DOM_OBJECT_SLOT',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::IDLInterface',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::is_array_like',
'dom::bindings::conversions::jsid_to_str',
'dom::bindings::conversions::native_from_handlevalue',
'dom::bindings::conversions::native_from_object',
'dom::bindings::conversions::private_from_object',
'dom::bindings::conversions::root_from_handleobject',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::conversions::root_from_object',
'dom::bindings::codegen::PrototypeList',
'dom::bindings::codegen::RegisterBindings',
'dom::bindings::codegen::UnionTypes',
'dom::bindings::error::Error',
'dom::bindings::error::ErrorResult',
'dom::bindings::error::Fallible',
'dom::bindings::error::Error::JSFailed',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::guard::Condition',
'dom::bindings::guard::Guard',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::ensure_expando_object',
'dom::bindings::proxyhandler::fill_property_descriptor',
'dom::bindings::proxyhandler::get_expando_object',
'dom::bindings::proxyhandler::get_property_descriptor',
'dom::bindings::num::Finite',
'dom::bindings::str::ByteString',
'dom::bindings::str::DOMString',
'dom::bindings::str::USVString',
'dom::bindings::weakref::DOM_WEAK_SLOT',
'dom::bindings::weakref::WeakBox',
'dom::bindings::weakref::WeakReferenceable',
'dom::browsingcontext::BrowsingContext',
'mem::heap_size_of_raw_self_and_children',
'libc',
'util::prefs::PREFS',
'script_runtime::maybe_take_panic_result',
'script_runtime::store_panic_result',
'std::borrow::ToOwned',
'std::cmp',
'std::mem',
'std::num',
'std::os',
'std::panic',
'std::panic::AssertUnwindSafe',
'std::ptr',
'std::str',
'std::rc',
'std::rc::Rc',
'std::default::Default',
'std::ffi::CString',
], config)
class CGDescriptor(CGThing):
def __init__(self, descriptor, config, soleDescriptor):
CGThing.__init__(self)
assert not descriptor.concrete or not descriptor.interface.isCallback()
reexports = []
def reexportedName(name):
if name.startswith(descriptor.name):
return name
if not soleDescriptor:
return '%s as %s%s' % (name, descriptor.name, name)
return name
cgThings = []
if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace():
cgThings.append(CGGetProtoObjectMethod(descriptor))
reexports.append('GetProtoObject')
if (descriptor.interface.hasInterfaceObject() and
descriptor.shouldHaveGetConstructorObjectMethod()):
cgThings.append(CGGetConstructorObjectMethod(descriptor))
reexports.append('GetConstructorObject')
unscopableNames = []
for m in descriptor.interface.members:
if (m.isMethod() and
(not m.isIdentifierLess() or m == descriptor.operations["Stringifier"])):
if m.getExtendedAttribute("Unscopable"):
assert not m.isStatic()
unscopableNames.append(m.identifier.name)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
elif m.isAttr():
if m.stringifier:
raise TypeError("Stringifier attributes not supported yet. "
"See https://github.com/servo/servo/issues/7590\n"
"%s" % m.location)
if m.getExtendedAttribute("Unscopable"):
assert not m.isStatic()
unscopableNames.append(m.identifier.name)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedGetter(descriptor, m))
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedSetter(descriptor, m))
elif m.getExtendedAttribute("PutForwards"):
cgThings.append(CGSpecializedForwardingSetter(descriptor, m))
elif m.getExtendedAttribute("Replaceable"):
cgThings.append(CGSpecializedReplaceableSetter(descriptor, m))
if (not m.isStatic() and not descriptor.interface.isCallback()):
cgThings.append(CGMemberJITInfo(descriptor, m))
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
if descriptor.interface.hasInterfaceObject():
if descriptor.interface.ctor():
cgThings.append(CGClassConstructHook(descriptor))
for ctor in descriptor.interface.namedConstructors:
cgThings.append(CGClassConstructHook(descriptor, ctor))
if not descriptor.interface.isCallback():
cgThings.append(CGInterfaceObjectJSClass(descriptor))
if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace():
cgThings.append(CGPrototypeJSClass(descriptor))
# If there are no constant members, don't make a module for constants
constMembers = [m for m in descriptor.interface.members if m.isConst()]
if constMembers:
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGConstant(constMembers),
public=True))
reexports.append(descriptor.name + 'Constants')
if descriptor.interface.hasInterfaceObject() and descriptor.register:
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
reexports.append('DefineDOMInterface')
cgThings.append(CGConstructorEnabled(descriptor))
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
properties = PropertyArrays(descriptor)
if descriptor.concrete:
if descriptor.proxy:
# cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_ownPropertyKeys(descriptor))
if descriptor.interface.getExtendedAttribute("LegacyUnenumerableNamedProperties"):
cgThings.append(CGDOMJSProxyHandler_getOwnEnumerablePropertyKeys(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_className(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
# We want to prevent indexed deleters from compiling at all.
assert not descriptor.operations['IndexedDeleter']
if descriptor.operations['NamedDeleter']:
cgThings.append(CGDOMJSProxyHandler_delete(descriptor))
# cgThings.append(CGDOMJSProxyHandler(descriptor))
# cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
if descriptor.isGlobal():
cgThings.append(CGWrapGlobalMethod(descriptor, properties))
else:
cgThings.append(CGWrapMethod(descriptor))
reexports.append('Wrap')
haveUnscopables = False
if not descriptor.interface.isCallback() and not descriptor.interface.isNamespace():
if unscopableNames:
haveUnscopables = True
cgThings.append(
CGList([CGGeneric("const unscopable_names: &'static [&'static [u8]] = &["),
CGIndenter(CGList([CGGeneric(str_to_const_array(name)) for
name in unscopableNames], ",\n")),
CGGeneric("];\n")], "\n"))
if descriptor.concrete or descriptor.hasDescendants():
cgThings.append(CGIDLInterface(descriptor))
interfaceTrait = CGInterfaceTrait(descriptor)
cgThings.append(interfaceTrait)
if not interfaceTrait.empty:
reexports.append('%sMethods' % descriptor.name)
if descriptor.weakReferenceable:
cgThings.append(CGWeakReferenceableTrait(descriptor))
cgThings.append(CGGeneric(str(properties)))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties, haveUnscopables))
cgThings = generate_imports(config, CGList(cgThings, '\n'), [descriptor])
cgThings = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
cgThings, public=True),
post='\n')
reexports = ', '.join(map(lambda name: reexportedName(name), reexports))
self.cgRoot = CGList([CGGeneric('pub use self::%s::{%s};' % (toBindingNamespace(descriptor.name), reexports)),
cgThings], '\n')
def define(self):
return self.cgRoot.define()
class CGNonNamespacedEnum(CGThing):
def __init__(self, enumName, names, first, comment="", deriving="", repr=""):
# Account for first value
entries = ["%s = %s" % (names[0], first)] + names[1:]
# Append a Last.
entries.append('Last = ' + str(first + len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if repr:
enumstr = ('#[repr(%s)]\n' % repr) + enumstr
if deriving:
enumstr = ('#[derive(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n', post='\n')
# Add the typedef
# typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
# curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionInfo(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
exceptionCode="return Err(());"))
for member in dictionary.members]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"pub struct ${selfName} {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute({"selfName": self.makeClassName(d),
"inheritance": inheritance}))
def impl(self):
d = self.dictionary
if d.parent:
initParent = ("parent: match try!(%s::%s::new(cx, val)) {\n"
" ConversionResult::Success(v) => v,\n"
" ConversionResult::Failure(error) => {\n"
" throw_type_error(cx, &error);\n"
" return Err(());\n"
" }\n"
" },\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent)))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo, member.type)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
def varInsert(varName, dictionaryName):
insertion = ("rooted!(in(cx) let mut %s_js = UndefinedValue());\n"
"%s.to_jsval(cx, %s_js.handle_mut());\n"
"set_dictionary_property(cx, obj.handle(), \"%s\", %s_js.handle()).unwrap();"
% (varName, varName, varName, dictionaryName, varName))
return CGGeneric(insertion)
def memberInsert(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
if member.optional and not member.defaultValue:
insertion = CGIfWrapper("let Some(ref %s) = self.%s" % (name, name),
varInsert(name, member.identifier.name))
else:
insertion = CGGeneric("let %s = &self.%s;\n%s" %
(name, name, varInsert(name, member.identifier.name).define()))
return CGGeneric("%s\n" % insertion.define())
memberInits = CGList([memberInit(m) for m in self.memberInfo])
memberInserts = CGList([memberInsert(m) for m in self.memberInfo])
return string.Template(
"impl ${selfName} {\n"
" pub unsafe fn empty(cx: *mut JSContext) -> ${selfName} {\n"
" match ${selfName}::new(cx, HandleValue::null()) {\n"
" Ok(ConversionResult::Success(v)) => v,\n"
" _ => unreachable!(),\n"
" }\n"
" }\n"
" pub unsafe fn new(cx: *mut JSContext, val: HandleValue) \n"
" -> Result<ConversionResult<${selfName}>, ()> {\n"
" let object = if val.get().is_null_or_undefined() {\n"
" ptr::null_mut()\n"
" } else if val.get().is_object() {\n"
" val.get().to_object()\n"
" } else {\n"
" throw_type_error(cx, \"Value not an object.\");\n"
" return Err(());\n"
" };\n"
" rooted!(in(cx) let object = object);\n"
" Ok(ConversionResult::Success(${selfName} {\n"
"${initParent}"
"${initMembers}"
" }))\n"
" }\n"
"}\n"
"\n"
"impl FromJSValConvertible for ${selfName} {\n"
" type Config = ();\n"
" unsafe fn from_jsval(cx: *mut JSContext, value: HandleValue, _option: ())\n"
" -> Result<ConversionResult<${selfName}>, ()> {\n"
" ${selfName}::new(cx, value)\n"
" }\n"
"}\n"
"\n"
"impl ToJSValConvertible for ${selfName} {\n"
" unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {\n"
" rooted!(in(cx) let obj = JS_NewObject(cx, ptr::null()));\n"
"${insertMembers}"
" rval.set(ObjectOrNullValue(obj.get()))\n"
" }\n"
"}\n").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=12).define(),
"initMembers": CGIndenter(memberInits, indentLevel=12).define(),
"insertMembers": CGIndenter(memberInserts, indentLevel=8).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
return getModuleFromObject(dictionary)
def getMemberType(self, memberInfo):
member, info = memberInfo
declType = info.declType
if member.optional and not member.defaultValue:
declType = CGWrapper(info.declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo, memberType):
def indent(s):
return CGIndenter(CGGeneric(s), 8).define()
member, info = memberInfo
templateBody = info.template
default = info.default
replacements = {"val": "rval.handle()"}
conversion = string.Template(templateBody).substitute(replacements)
if memberType.isAny():
conversion = "%s.get()" % conversion
assert (member.defaultValue is None) == (default is None)
if not member.optional:
assert default is None
default = ("throw_type_error(cx, \"Missing required member \\\"%s\\\".\");\n"
"return Err(());") % member.identifier.name
elif not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"{\n"
"rooted!(in(cx) let mut rval = UndefinedValue());\n"
"match try!(get_dictionary_property(cx, object.handle(), \"%s\", rval.handle_mut())) {\n"
" true => {\n"
"%s\n"
" },\n"
" false => {\n"
"%s\n"
" },\n"
"}\n}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name in RUST_KEYWORDS:
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set()
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
docs = "Create the global vtables used by the generated DOM bindings to implement JS proxies."
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True, docs=docs)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("proxy_handlers[Proxies::%s as usize] = Bindings::%s::DefineProxyHandler();"
% (desc.name, '::'.join([desc.name + 'Binding'] * 2)))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut proxy_handlers: [*const libc::c_void; %d] = [0 as *const libc::c_void; %d];"
% (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=True)
# We also want descriptors that have an interface prototype object
# (isCallback=False), but we don't want to include a second copy
# of descriptors that we also matched in the previous line
# (hence hasInterfaceObject=False).
descriptors.extend(config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=False,
isCallback=False,
register=True))
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
enums = config.getEnums(webIDLFile)
typedefs = config.getTypedefs(webIDLFile)
if not (descriptors or dictionaries or mainCallbacks or callbackDescriptors or enums):
self.root = None
return
# Do codegen for all the enums.
cgthings = [CGEnum(e) for e in enums]
# Do codegen for all the typdefs
for t in typedefs:
if t.innerType.isUnion():
cgthings.extend([CGGeneric("\npub use dom::bindings::codegen::UnionTypes::%s as %s;\n\n" %
(t.innerType, t.identifier.name))])
else:
assert not typeNeedsRooting(t.innerType, config.getDescriptorProvider)
cgthings.extend([CGGeneric("\npub type %s = " % (t.identifier.name)),
getRetvalDeclarationForType(t.innerType, config.getDescriptorProvider()),
CGGeneric(";\n\n")])
# Do codegen for all the dictionaries.
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x, config, len(descriptors) == 1) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x.interface)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Add imports
curr = generate_imports(config, curr, callbackDescriptors, mainCallbacks,
dictionaries, enums)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
if not self.root:
return None
return stripTrailingWhitespace(self.root.define())
def argument_type(descriptorProvider, ty, optional=False, defaultValue=None, variadic=False):
info = getJSToNativeConversionInfo(
ty, descriptorProvider, isArgument=True)
declType = info.declType
if variadic:
if ty.isGeckoInterface():
declType = CGWrapper(declType, pre="&[", post="]")
else:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isDictionary():
declType = CGWrapper(declType, pre="&")
return declType.define()
def method_arguments(descriptorProvider, returnType, arguments, passJSBits=True, trailing=None):
if needCx(returnType, arguments, passJSBits):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(descriptorProvider, argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(descriptorProvider, rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptorProvider)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public"):
"""
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0]),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type):
infallible = 'infallible' in self.extendedAttrs
typeDecl = return_type(self.descriptorProvider, type, infallible)
return typeDecl
def getArgs(self, returnType, argList):
return [Argument(arg[1], arg[0]) for arg in method_arguments(self.descriptorProvider,
returnType,
argList,
self.passJSBitsAsNeeded)]
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods + getters + setters,
decorators="#[derive(JSTraceable, PartialEq)]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new()" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "HandleObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
argnamesWithoutThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&T", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = ("let mut s_ec = RootedObject::new_unrooted(ptr::null_mut());\n"
"let s = CallSetup::new(&mut s_ec, self, aExceptionHandling);\n"
"if s.get_context().is_null() {\n"
" return Err(JSFailed);\n"
"}\n")
bodyWithThis = string.Template(
setupCall +
"rooted!(in(s.get_context()) let mut thisObjJS = ptr::null_mut());\n"
"wrap_call_this_object(s.get_context(), thisObj, thisObjJS.handle_mut());\n"
"if thisObjJS.is_null() {\n"
" return Err(JSFailed);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"rooted!(in(s.get_context()) let thisObjJS = ptr::null_mut());"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name + '_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: Reflectable"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name + '__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr, descriptor):
return "Get" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
def callbackSetterName(attr, descriptor):
return "Set" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""\
impl CallbackContainer for ${type} {
fn new(callback: *mut JSObject) -> Rc<${type}> {
${type}::new(callback)
}
fn callback(&self) -> *mut JSObject {
self.parent.callback()
}
}
impl ToJSValConvertible for ${type} {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
self.callback().to_jsval(cx, rval);
}
}\
""").substitute({"type": callback.identifier.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount - 1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.len()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode = "return Err(JSFailed);"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"let mut argv = vec![UndefinedValue(); ${argCount}];\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGWrapper(CGIndenter(CGList([
CGGeneric(pre),
CGGeneric(body),
], "\n"), 4), pre="unsafe {\n", post="\n}").define()
def getResultConversion(self):
replacements = {
"val": "rval.handle()",
}
info = getJSToNativeConversionInfo(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
template = info.template
declType = info.declType
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl")
if self.retvalType is None or self.retvalType.isVoid():
retval = "()"
elif self.retvalType.isAny():
retval = "rvalDecl.get()"
else:
retval = "rvalDecl"
return "%s\nOk(%s)\n" % (convertType.define(), retval)
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse()
argConversions = [CGGeneric(c) for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx].get()"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType(
"argv_root.handle_mut()", result=argval,
successCode="argv[%s] = argv_root.get();" % jsvalIndex,
pre="rooted!(in(cx) let mut argv_root = UndefinedValue());")
if arg.variadic:
conversion = string.Template(
"for idx in 0..${arg}.len() {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}"
).substitute({"arg": arg.identifier.name})
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper("%s.is_some()" % arg.identifier.name,
CGGeneric(conversion)).define() +
" else if argc == %d {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" argv[%d] = UndefinedValue();\n"
"}" % (i + 1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("HandleObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
return (
"CallSetup s(CallbackPreserveColor(), aRv, aExceptionHandling);\n"
"JSContext* cx = s.get_context();\n"
"if (!cx) {\n"
" return Err(JSFailed);\n"
"}\n")
def getArgcDecl(self):
if self.argCount <= 1:
return CGGeneric("let argc = %s;" % self.argCountStr)
return CGGeneric("let mut argc = %s;" % self.argCountStr)
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling)
def getRvalDecl(self):
return "rooted!(in(cx) let mut rval = UndefinedValue());\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl(),
"callGuard": self.getCallGuard(),
}
if self.argCount > 0:
replacements["argv"] = "argv.as_ptr()"
replacements["argc"] = "argc"
else:
replacements["argv"] = "ptr::null_mut()"
replacements["argc"] = "0"
return string.Template(
"${getCallable}"
"rooted!(in(cx) let rootedThis = ${thisObj});\n"
"let ok = ${callGuard}JS_CallFunctionValue(\n"
" cx, rootedThis.handle(), callable.handle(),\n"
" &HandleValueArray {\n"
" length_: ${argc} as ::libc::size_t,\n"
" elements_: ${argv}\n"
" }, rval.handle_mut());\n"
"if let Some(error) = maybe_take_panic_result() {\n"
" panic::resume_unwind(error);\n"
"}\n"
"if !ok {\n"
" return Err(JSFailed);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
self.callback = callback
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj.get()"
def getCallableDecl(self):
return "rooted!(in(cx) let callable = ObjectValue(&*self.parent.callback()));\n"
def getCallGuard(self):
if self.callback._treatNonObjectAsNull:
return "!IsCallable(self.parent.callback()) || "
return ""
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation)
def getThisObj(self):
if not self.singleOperation:
return "self.parent.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj.get() } else { self.parent.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'try!(self.parent.get_callable_property(cx, "${methodName}"))'
).substitute(replacements)
if not self.singleOperation:
return 'rooted!(in(cx) let callable =\n' + getCallableFromProp + ');\n'
return (
'let isCallable = IsCallable(self.parent.callback());\n'
'rooted!(in(cx) let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('ObjectValue(&*self.parent.callback())'),
CGGeneric(getCallableFromProp))).define() + ');\n')
def getCallGuard(self):
return ""
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName,
MakeNativeName(descriptor.binaryNameFor(jsName)),
descriptor, descriptor.interface.isSingleOperationInterface())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class CGIterableMethodGenerator(CGGeneric):
"""
Creates methods for iterable interfaces. Unwrapping/wrapping
will be taken care of by the usual method generation machinery in
CGMethodCall/CGPerSignatureCall. Functionality is filled in here instead of
using CGCallGenerator.
"""
def __init__(self, descriptor, iterable, methodName):
if methodName == "forEach":
CGGeneric.__init__(self, fill(
"""
if !IsCallable(arg0) {
throw_type_error(cx, "Argument 1 of ${ifaceName}.forEach is not callable.");
return false;
}
rooted!(in(cx) let arg0 = ObjectValue(&*arg0));
rooted!(in(cx) let mut call_arg1 = UndefinedValue());
rooted!(in(cx) let mut call_arg2 = UndefinedValue());
let mut call_args = vec![UndefinedValue(), UndefinedValue(), ObjectValue(&**_obj)];
rooted!(in(cx) let mut ignoredReturnVal = UndefinedValue());
for i in 0..(*this).get_iterable_length() {
(*this).get_value_at_index(i).to_jsval(cx, call_arg1.handle_mut());
(*this).get_key_at_index(i).to_jsval(cx, call_arg2.handle_mut());
call_args[0] = call_arg1.handle().get();
call_args[1] = call_arg2.handle().get();
let call_args = HandleValueArray { length_: 3, elements_: call_args.as_ptr() };
if !Call(cx, arg1, arg0.handle(), &call_args,
ignoredReturnVal.handle_mut()) {
return false;
}
}
let result = ();
""",
ifaceName=descriptor.interface.identifier.name))
return
CGGeneric.__init__(self, fill(
"""
let result = ${iterClass}::new(&*this,
IteratorType::${itrMethod},
super::${ifaceName}IteratorBinding::Wrap);
""",
iterClass=iteratorNativeType(descriptor, True),
ifaceName=descriptor.interface.identifier.name,
itrMethod=methodName.title()))
def camel_to_upper_snake(s):
return "_".join(m.group(0).upper() for m in re.finditer("[A-Z][a-z]*", s))
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def InterfaceObjectMap(config):
mods = [
"dom::bindings::codegen",
"js::jsapi::{HandleObject, JSContext}",
"phf",
]
imports = CGList([CGGeneric("use %s;" % mod) for mod in mods], "\n")
global_descriptors = config.getDescriptors(isGlobal=True)
flags = [("EMPTY", 0)]
flags.extend(
(camel_to_upper_snake(d.name), 2 ** idx)
for (idx, d) in enumerate(global_descriptors)
)
global_flags = CGWrapper(CGIndenter(CGList([
CGGeneric("const %s = %#x," % args)
for args in flags
], "\n")), pre="pub flags Globals: u8 {\n", post="\n}")
globals_ = CGWrapper(CGIndenter(global_flags), pre="bitflags! {\n", post="\n}")
pairs = []
for d in config.getDescriptors(hasInterfaceObject=True):
binding = toBindingNamespace(d.name)
pairs.append((d.name, binding, binding))
for ctor in d.interface.namedConstructors:
pairs.append((ctor.identifier.name, binding, binding))
pairs.sort(key=operator.itemgetter(0))
mappings = [
CGGeneric('b"%s" => codegen::Bindings::%s::%s::DefineDOMInterface as unsafe fn(_, _),' % pair)
for pair in pairs
]
mapType = "phf::Map<&'static [u8], unsafe fn(*mut JSContext, HandleObject)>"
phf = CGWrapper(
CGIndenter(CGList(mappings, "\n")),
pre="pub static MAP: %s = phf_map! {\n" % mapType,
post="\n};\n")
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGList([imports, globals_, phf], "\n\n")
])
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
interfaces = config.getDescriptors(isCallback=False, isNamespace=False)
protos = [d.name for d in interfaces]
constructors = sorted([MakeNativeName(d.name)
for d in config.getDescriptors(hasInterfaceObject=True)
if d.shouldHaveGetConstructorObjectMethod()])
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub const PROTO_OR_IFACE_LENGTH: usize = %d;\n" % (len(protos) + len(constructors))),
CGGeneric("pub const MAX_PROTO_CHAIN_LENGTH: usize = %d;\n\n" % config.maxProtoChainLength),
CGNonNamespacedEnum('ID', protos, 0, deriving="PartialEq, Copy, Clone", repr="u16"),
CGNonNamespacedEnum('Constructor', constructors, len(protos),
deriving="PartialEq, Copy, Clone", repr="u16"),
CGWrapper(CGIndenter(CGList([CGGeneric('"' + name + '"') for name in protos],
",\n"),
indentLevel=4),
pre="static INTERFACES: [&'static str; %d] = [\n" % len(protos),
post="\n];\n\n"),
CGGeneric("pub fn proto_id_to_name(proto_id: u16) -> &'static str {\n"
" debug_assert!(proto_id < ID::Last as u16);\n"
" INTERFACES[proto_id as usize]\n"
"}\n\n"),
CGNonNamespacedEnum('Proxies', proxies, 0, deriving="PartialEq, Copy, Clone"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, descriptors=[], callbacks=[], dictionaries=[], enums=[], imports=[
'dom::bindings::codegen::Bindings',
'dom::bindings::codegen::PrototypeList::Proxies',
'libc',
], config=config, ignored_warnings=[])
@staticmethod
def InterfaceTypes(config):
descriptors = sorted([MakeNativeName(d.name)
for d in config.getDescriptors(register=True,
isCallback=False,
isIteratorInterface=False)])
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(),
MakeNativeName(name)))
for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
def leafModule(d):
return getModuleFromObject(d).split('::')[-1]
descriptors = config.getDescriptors(register=True, isIteratorInterface=False)
descriptors = (set(toBindingNamespace(d.name) for d in descriptors) |
set(leafModule(d) for d in config.callbacks) |
set(leafModule(d) for d in config.getDictionaries()))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
imports = [CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::conversions::{DerivedFrom, get_dom_class};\n"),
CGGeneric("use dom::bindings::inheritance::Castable;\n"),
CGGeneric("use dom::bindings::js::{JS, LayoutJS, Root};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::reflector::Reflectable;\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n"),
CGGeneric("use std::mem;\n\n")]
allprotos = []
topTypes = []
hierarchy = defaultdict(list)
for descriptor in descriptors:
name = descriptor.name
chain = descriptor.prototypeChain
upcast = descriptor.hasDescendants()
downcast = len(chain) != 1
if upcast and not downcast:
topTypes.append(name)
if not upcast:
# No other interface will implement DeriveFrom<Foo> for this Foo, so avoid
# implementing it for itself.
chain = chain[:-1]
# Implement `DerivedFrom<Bar>` for `Foo`, for all `Bar` that `Foo` inherits from.
if chain:
allprotos.append(CGGeneric("impl Castable for %s {}\n" % name))
for baseName in chain:
allprotos.append(CGGeneric("impl DerivedFrom<%s> for %s {}\n" % (baseName, name)))
if chain:
allprotos.append(CGGeneric("\n"))
if downcast:
hierarchy[descriptor.getParentName()].append(name)
typeIdCode = []
topTypeVariants = [
("ID used by abstract interfaces.", "Abstract"),
("ID used by interfaces that are not castable.", "Alone"),
]
topTypeVariants += [
("ID used by interfaces that derive from %s." % typeName, "%s(%sTypeId)" % (typeName, typeName))
for typeName in topTypes
]
topTypeVariantsAsStrings = [CGGeneric("/// %s\n%s," % variant) for variant in topTypeVariants]
typeIdCode.append(CGWrapper(CGIndenter(CGList(topTypeVariantsAsStrings, "\n"), 4),
pre="#[derive(Clone, Copy, Debug)]\npub enum TopTypeId {\n",
post="\n}\n\n"))
def type_id_variant(name):
# If `name` is present in the hierarchy keys', that means some other interfaces
# derive from it and this enum variant should have an argument with its own
# TypeId enum.
return "%s(%sTypeId)" % (name, name) if name in hierarchy else name
for base, derived in hierarchy.iteritems():
variants = []
if not config.getInterface(base).getExtendedAttribute("Abstract"):
variants.append(CGGeneric(base))
variants += [CGGeneric(type_id_variant(derivedName)) for derivedName in derived]
derives = "Clone, Copy, Debug, PartialEq"
typeIdCode.append(CGWrapper(CGIndenter(CGList(variants, ",\n"), 4),
pre="#[derive(%s)]\npub enum %sTypeId {\n" % (derives, base),
post="\n}\n\n"))
if base in topTypes:
typeIdCode.append(CGGeneric("""\
impl %(base)s {
pub fn type_id(&self) -> &'static %(base)sTypeId {
let domclass = unsafe {
get_dom_class(self.reflector().get_jsobject().get()).unwrap()
};
match domclass.type_id {
TopTypeId::%(base)s(ref type_id) => type_id,
_ => unreachable!(),
}
}
}
""" % {'base': base}))
curr = CGList(imports + typeIdCode + allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config.typedefs,
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
@staticmethod
def SupportedDomApis(config):
descriptors = config.getDescriptors(isExposedConditionally=False)
base_path = os.path.join('dom', 'bindings', 'codegen')
with open(os.path.join(base_path, 'apis.html.template')) as f:
base_template = f.read()
with open(os.path.join(base_path, 'api.html.template')) as f:
api_template = f.read()
with open(os.path.join(base_path, 'property.html.template')) as f:
property_template = f.read()
with open(os.path.join(base_path, 'interface.html.template')) as f:
interface_template = f.read()
apis = []
interfaces = []
for descriptor in descriptors:
props = []
for m in descriptor.interface.members:
if PropertyDefiner.getStringAttr(m, 'Pref') or \
PropertyDefiner.getStringAttr(m, 'Func') or \
(m.isMethod() and m.isIdentifierLess()):
continue
display = m.identifier.name + ('()' if m.isMethod() else '')
props += [property_template.replace('${name}', display)]
name = descriptor.interface.identifier.name
apis += [(api_template.replace('${interface}', name)
.replace('${properties}', '\n'.join(props)))]
interfaces += [interface_template.replace('${interface}', name)]
return CGGeneric((base_template.replace('${apis}', '\n'.join(apis))
.replace('${interfaces}', '\n'.join(interfaces))))
| jaysonsantos/servo | components/script/dom/bindings/codegen/CodegenRust.py | Python | mpl-2.0 | 279,760 |
"""Unit tests for the Bugzilla hosting service."""
from reviewboard.hostingsvcs.testing import HostingServiceTestCase
class BugzillaTests(HostingServiceTestCase):
"""Unit tests for the Bugzilla hosting service."""
service_name = 'bugzilla'
fixtures = ['test_scmtools']
def test_service_support(self):
"""Testing Bugzilla service support capabilities"""
self.assertTrue(self.service_class.supports_bug_trackers)
self.assertFalse(self.service_class.supports_repositories)
def test_bug_tracker_field(self):
"""Testing Bugzilla.get_bug_tracker_field"""
self.assertFalse(
self.service_class.get_bug_tracker_requires_username())
self.assertEqual(
self.service_class.get_bug_tracker_field(None, {
'bugzilla_url': 'http://bugzilla.example.com',
}),
'http://bugzilla.example.com/show_bug.cgi?id=%s')
| reviewboard/reviewboard | reviewboard/hostingsvcs/tests/test_bugzilla.py | Python | mit | 929 |
#!/usr/bin/env python
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import ipaddr
from maas_common import metric
from maas_common import metric_bool
from maas_common import print_output
from maas_common import status_err
from maas_common import status_ok
import requests
from requests import exceptions as exc
def check(args):
metadata_endpoint = ('{protocol}://{ip}:{port}'.format(
ip=args.ip,
protocol=args.protocol,
port=args.port
))
is_up = True
s = requests.Session()
try:
# looks like we can only get / (ec2 versions) without specifying
# an instance ID and other headers
versions = s.get('%s/' % metadata_endpoint,
verify=False,
timeout=5)
milliseconds = versions.elapsed.total_seconds() * 1000
if not versions.ok or '1.0' not in versions.content.splitlines():
is_up = False
except (exc.ConnectionError, exc.HTTPError, exc.Timeout) as e:
is_up = False
metric_bool('client_success', False, m_name='maas_nova')
except Exception as e:
metric_bool('client_success', False, m_name='maas_nova')
status_err(str(e), m_name='maas_nova')
else:
metric_bool('client_success', True, m_name='maas_nova')
status_ok(m_name='maas_nova')
metric_bool('nova_api_metadata_local_status', is_up, m_name='maas_nova')
# only want to send other metrics if api is up
if is_up:
metric('nova_api_metadata_local_response_time',
'double',
'%.3f' % milliseconds,
'ms')
def main(args):
check(args)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Check nova-api-metdata API')
parser.add_argument('ip',
type=ipaddr.IPv4Address,
help='nova-api-metadata IP address')
parser.add_argument('--telegraf-output',
action='store_true',
default=False,
help='Set the output format to telegraf')
parser.add_argument('--port',
default='8775',
help='Port for the nova metadata service')
parser.add_argument('--protocol',
default='http',
help='Protocol for the nova metadata service')
args = parser.parse_args()
with print_output(print_telegraf=args.telegraf_output):
main(args)
| briancurtin/rpc-maas | playbooks/files/rax-maas/plugins/nova_api_metadata_local_check.py | Python | apache-2.0 | 3,057 |
#pylint: disable=C0111
#pylint: disable=W0621
from lettuce import world, step
from selenium.webdriver.common.keys import Keys
from common import type_in_codemirror
from nose.tools import assert_in # pylint: disable=E0611
@step(u'I go to the course updates page')
def go_to_updates(_step):
menu_css = 'li.nav-course-courseware'
updates_css = 'li.nav-course-courseware-updates a'
world.css_click(menu_css)
world.css_click(updates_css)
@step(u'I add a new update with the text "([^"]*)"$')
def add_update(_step, text):
update_css = 'a.new-update-button'
world.css_click(update_css)
change_text(text)
@step(u'I should see the update "([^"]*)"$')
def check_update(_step, text):
update_css = 'div.update-contents'
update_html = world.css_find(update_css).html
assert_in(text, update_html)
@step(u'I should not see the update "([^"]*)"$')
def check_no_update(_step, text):
update_css = 'div.update-contents'
assert world.is_css_not_present(update_css)
@step(u'I modify the text to "([^"]*)"$')
def modify_update(_step, text):
button_css = 'div.post-preview a.edit-button'
world.css_click(button_css)
change_text(text)
@step(u'I change the update from "([^"]*)" to "([^"]*)"$')
def change_existing_update(_step, before, after):
verify_text_in_editor_and_update('div.post-preview a.edit-button', before, after)
@step(u'I change the handout from "([^"]*)" to "([^"]*)"$')
def change_existing_handout(_step, before, after):
verify_text_in_editor_and_update('div.course-handouts a.edit-button', before, after)
@step(u'I delete the update$')
def click_button(_step):
button_css = 'div.post-preview a.delete-button'
world.css_click(button_css)
@step(u'I edit the date to "([^"]*)"$')
def change_date(_step, new_date):
button_css = 'div.post-preview a.edit-button'
world.css_click(button_css)
date_css = 'input.date'
date = world.css_find(date_css)
for i in range(len(date.value)):
date._element.send_keys(Keys.END, Keys.BACK_SPACE)
date._element.send_keys(new_date)
save_css = 'a.save-button'
world.css_click(save_css)
@step(u'I should see the date "([^"]*)"$')
def check_date(_step, date):
date_css = 'span.date-display'
assert date == world.css_html(date_css)
@step(u'I modify the handout to "([^"]*)"$')
def edit_handouts(_step, text):
edit_css = 'div.course-handouts > a.edit-button'
world.css_click(edit_css)
change_text(text)
@step(u'I see the handout "([^"]*)"$')
def check_handout(_step, handout):
handout_css = 'div.handouts-content'
assert handout in world.css_html(handout_css)
@step(u'I see the handout error text')
def check_handout_error(_step):
handout_error_css = 'div#handout_error'
assert world.css_has_class(handout_error_css, 'is-shown')
@step(u'I see handout save button disabled')
def check_handout_error(_step):
handout_save_button = 'form.edit-handouts-form a.save-button'
assert world.css_has_class(handout_save_button, 'is-disabled')
@step(u'I edit the handout to "([^"]*)"$')
def edit_handouts(_step, text):
type_in_codemirror(0, text)
@step(u'I see handout save button re-enabled')
def check_handout_error(_step):
handout_save_button = 'form.edit-handouts-form a.save-button'
assert not world.css_has_class(handout_save_button, 'is-disabled')
@step(u'I save handout edit')
def check_handout_error(_step):
save_css = 'a.save-button'
world.css_click(save_css)
def change_text(text):
type_in_codemirror(0, text)
save_css = 'a.save-button'
world.css_click(save_css)
def verify_text_in_editor_and_update(button_css, before, after):
world.css_click(button_css)
text = world.css_find(".cm-string").html
assert before in text
change_text(after)
| XiaodunServerGroup/xiaodun-platform | cms/djangoapps/contentstore/features/course-updates.py | Python | agpl-3.0 | 3,799 |
# coding=utf-8
# This file is part of SickRage.
#
# Git: https://github.com/PyMedusa/SickRage.git
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
"""
Test NZBProvider
"""
from __future__ import print_function
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
from six import iteritems
import sickbeard
from generic_provider_tests import GenericProviderTests
from sickrage.providers.GenericProvider import GenericProvider
from sickrage.providers.nzb.NZBProvider import NZBProvider
class NZBProviderTests(GenericProviderTests):
"""
Test NZBProvider
"""
def test___init__(self):
"""
Test __init__
"""
self.assertEqual(NZBProvider('Test Provider').provider_type, GenericProvider.NZB)
def test_is_active(self):
"""
Test is_active
"""
test_cases = {
(False, False): False,
(False, None): False,
(False, True): False,
(None, False): False,
(None, None): False,
(None, True): False,
(True, False): False,
(True, None): False,
(True, True): True,
}
for ((use_nzb, enabled), result) in iteritems(test_cases):
sickbeard.USE_NZBS = use_nzb
provider = NZBProvider('Test Provider')
provider.enabled = enabled
self.assertEqual(provider.is_active(), result)
def test__get_size(self):
"""
Test _get_size
"""
items_list = [
None, {}, {'links': None}, {'links': []}, {'links': [{}]},
{'links': [{'length': 1}, {'length': None}, {'length': 3}]},
{'links': [{'length': 1}, {'length': ''}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '0'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '123'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '12.3'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '-123'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': '-12.3'}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 0}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 123}, {'length': 3}]},
{'links': [{'length': 1}, {'length': 12.3}, {'length': 3}]},
{'links': [{'length': 1}, {'length': -123}, {'length': 3}]},
{'links': [{'length': 1}, {'length': -12.3}, {'length': 3}]},
]
results_list = [
-1, -1, -1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12
]
unicode_items_list = [
{u'links': None}, {u'links': []}, {u'links': [{}]},
{u'links': [{u'length': 1}, {u'length': None}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u''}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'0'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'123'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'12.3'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'-123'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': u'-12.3'}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 0}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 123}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': 12.3}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': -123}, {u'length': 3}]},
{u'links': [{u'length': 1}, {u'length': -12.3}, {u'length': 3}]},
]
unicode_results_list = [
-1, -1, -1, -1, -1, 0, 123, -1, -123, -1, 0, 123, 12, -123, -12
]
self.assertEqual(
len(items_list), len(results_list),
'Number of parameters (%d) and results (%d) does not match' % (len(items_list), len(results_list))
)
self.assertEqual(
len(unicode_items_list), len(unicode_results_list),
'Number of parameters (%d) and results (%d) does not match' % (
len(unicode_items_list), len(unicode_results_list))
)
for (index, item) in enumerate(items_list):
self.assertEqual(NZBProvider('Test Provider')._get_size(item), results_list[index])
for (index, item) in enumerate(unicode_items_list):
self.assertEqual(NZBProvider('Test Provider')._get_size(item), unicode_results_list[index])
def test__get_storage_dir(self):
"""
Test _get_storage_dir
"""
test_cases = [
None, 123, 12.3, '', os.path.join('some', 'path', 'to', 'folder')
]
for nzb_dir in test_cases:
sickbeard.NZB_DIR = nzb_dir
self.assertEqual(NZBProvider('Test Provider')._get_storage_dir(), nzb_dir)
if __name__ == '__main__':
print('=====> Testing %s' % __file__)
SUITE = unittest.TestLoader().loadTestsFromTestCase(NZBProviderTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)
| Thraxis/pymedusa | tests/sickrage_tests/providers/nzb_provider_tests.py | Python | gpl-3.0 | 5,810 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import base64
import random
import re
from odoo import api, fields, models, modules
class ImLivechatChannel(models.Model):
""" Livechat Channel
Define a communication channel, which can be accessed with 'script_external' (script tag to put on
external website), 'script_internal' (code to be integrated with odoo website) or via 'web_page' link.
It provides rating tools, and access rules for anonymous people.
"""
_name = 'im_livechat.channel'
_inherit = ['rating.parent.mixin']
_description = 'Livechat Channel'
_rating_satisfaction_days = 7 # include only last 7 days to compute satisfaction
def _default_image(self):
image_path = modules.get_module_resource('im_livechat', 'static/src/img', 'default.png')
return base64.b64encode(open(image_path, 'rb').read())
def _default_user_ids(self):
return [(6, 0, [self._uid])]
# attribute fields
name = fields.Char('Name', required=True, help="The name of the channel")
button_text = fields.Char('Text of the Button', default='Have a Question? Chat with us.',
help="Default text displayed on the Livechat Support Button")
default_message = fields.Char('Welcome Message', default='How may I help you?',
help="This is an automated 'welcome' message that your visitor will see when they initiate a new conversation.")
input_placeholder = fields.Char('Chat Input Placeholder', help='Text that prompts the user to initiate the chat.')
header_background_color = fields.Char(default="#875A7B", help="Default background color of the channel header once open")
title_color = fields.Char(default="#FFFFFF", help="Default title color of the channel once open")
button_background_color = fields.Char(default="#878787", help="Default background color of the Livechat button")
button_text_color = fields.Char(default="#FFFFFF", help="Default text color of the Livechat button")
# computed fields
web_page = fields.Char('Web Page', compute='_compute_web_page_link', store=False, readonly=True,
help="URL to a static page where you client can discuss with the operator of the channel.")
are_you_inside = fields.Boolean(string='Are you inside the matrix?',
compute='_are_you_inside', store=False, readonly=True)
script_external = fields.Text('Script (external)', compute='_compute_script_external', store=False, readonly=True)
nbr_channel = fields.Integer('Number of conversation', compute='_compute_nbr_channel', store=False, readonly=True)
image_128 = fields.Image("Image", max_width=128, max_height=128, default=_default_image)
# relationnal fields
user_ids = fields.Many2many('res.users', 'im_livechat_channel_im_user', 'channel_id', 'user_id', string='Operators', default=_default_user_ids)
channel_ids = fields.One2many('mail.channel', 'livechat_channel_id', 'Sessions')
rule_ids = fields.One2many('im_livechat.channel.rule', 'channel_id', 'Rules')
def _are_you_inside(self):
for channel in self:
channel.are_you_inside = bool(self.env.uid in [u.id for u in channel.user_ids])
def _compute_script_external(self):
view = self.env['ir.model.data'].get_object('im_livechat', 'external_loader')
values = {
"url": self.env['ir.config_parameter'].sudo().get_param('web.base.url'),
"dbname": self._cr.dbname,
}
for record in self:
values["channel_id"] = record.id
record.script_external = view._render(values)
def _compute_web_page_link(self):
base_url = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
for record in self:
record.web_page = "%s/im_livechat/support/%i" % (base_url, record.id)
@api.depends('channel_ids')
def _compute_nbr_channel(self):
channels = self.env['mail.channel'].search([('livechat_channel_id', 'in', self.ids)])
channel_count = dict.fromkeys(self.ids, 0)
for channel in channels.filtered(lambda c: c.channel_message_ids):
channel_count[channel.livechat_channel_id.id] += 1
for record in self:
record.nbr_channel = channel_count.get(record.id, 0)
# --------------------------
# Action Methods
# --------------------------
def action_join(self):
self.ensure_one()
return self.write({'user_ids': [(4, self._uid)]})
def action_quit(self):
self.ensure_one()
return self.write({'user_ids': [(3, self._uid)]})
def action_view_rating(self):
""" Action to display the rating relative to the channel, so all rating of the
sessions of the current channel
:returns : the ir.action 'action_view_rating' with the correct domain
"""
self.ensure_one()
action = self.env['ir.actions.act_window'].for_xml_id('im_livechat', 'rating_rating_action_view_livechat_rating')
action['domain'] = [('parent_res_id', '=', self.id), ('parent_res_model', '=', 'im_livechat.channel')]
return action
# --------------------------
# Channel Methods
# --------------------------
def _get_available_users(self):
""" get available user of a given channel
:retuns : return the res.users having their im_status online
"""
self.ensure_one()
return self.user_ids.filtered(lambda user: user.im_status == 'online')
def _get_livechat_mail_channel_vals(self, anonymous_name, operator, user_id=None, country_id=None):
# partner to add to the mail.channel
operator_partner_id = operator.partner_id.id
channel_partner_to_add = [(4, operator_partner_id)]
visitor_user = False
if user_id:
visitor_user = self.env['res.users'].browse(user_id)
if visitor_user and visitor_user.active: # valid session user (not public)
channel_partner_to_add.append((4, visitor_user.partner_id.id))
return {
'channel_partner_ids': channel_partner_to_add,
'livechat_active': True,
'livechat_operator_id': operator_partner_id,
'livechat_channel_id': self.id,
'anonymous_name': False if user_id else anonymous_name,
'country_id': country_id,
'channel_type': 'livechat',
'name': ' '.join([visitor_user.display_name if visitor_user else anonymous_name, operator.livechat_username if operator.livechat_username else operator.name]),
'public': 'private',
'email_send': False,
}
def _open_livechat_mail_channel(self, anonymous_name, previous_operator_id=None, user_id=None, country_id=None):
""" Return a mail.channel given a livechat channel. It creates one with a connected operator, or return false otherwise
:param anonymous_name : the name of the anonymous person of the channel
:param previous_operator_id : partner_id.id of the previous operator that this visitor had in the past
:param user_id : the id of the logged in visitor, if any
:param country_code : the country of the anonymous person of the channel
:type anonymous_name : str
:return : channel header
:rtype : dict
If this visitor already had an operator within the last 7 days (information stored with the 'im_livechat_previous_operator_pid' cookie),
the system will first try to assign that operator if he's available (to improve user experience).
"""
self.ensure_one()
operator = False
if previous_operator_id:
available_users = self._get_available_users()
# previous_operator_id is the partner_id of the previous operator, need to convert to user
if previous_operator_id in available_users.mapped('partner_id').ids:
operator = next(available_user for available_user in available_users if available_user.partner_id.id == previous_operator_id)
if not operator:
operator = self._get_random_operator()
if not operator:
# no one available
return False
# create the session, and add the link with the given channel
mail_channel_vals = self._get_livechat_mail_channel_vals(anonymous_name, operator, user_id=user_id, country_id=country_id)
mail_channel = self.env["mail.channel"].with_context(mail_create_nosubscribe=False).sudo().create(mail_channel_vals)
mail_channel._broadcast([operator.partner_id.id])
return mail_channel.sudo().channel_info()[0]
def _get_random_operator(self):
""" Return a random operator from the available users of the channel that have the lowest number of active livechats.
A livechat is considered 'active' if it has at least one message within the 30 minutes.
(Some annoying conversions have to be made on the fly because this model holds 'res.users' as available operators
and the mail_channel model stores the partner_id of the randomly selected operator)
:return : user
:rtype : res.users
"""
operators = self._get_available_users()
if len(operators) == 0:
return False
self.env.cr.execute("""SELECT COUNT(DISTINCT c.id), c.livechat_operator_id
FROM mail_channel c
LEFT OUTER JOIN mail_message_mail_channel_rel r ON c.id = r.mail_channel_id
LEFT OUTER JOIN mail_message m ON r.mail_message_id = m.id
WHERE c.channel_type = 'livechat'
AND c.livechat_operator_id in %s
AND m.create_date > ((now() at time zone 'UTC') - interval '30 minutes')
GROUP BY c.livechat_operator_id
ORDER BY COUNT(DISTINCT c.id) asc""", (tuple(operators.mapped('partner_id').ids),))
active_channels = self.env.cr.dictfetchall()
# If inactive operator(s), return one of them
active_channel_operator_ids = [active_channel['livechat_operator_id'] for active_channel in active_channels]
inactive_operators = [operator for operator in operators if operator.partner_id.id not in active_channel_operator_ids]
if inactive_operators:
return random.choice(inactive_operators)
# If no inactive operator, active_channels is not empty as len(operators) > 0 (see above).
# Get the less active operator using the active_channels first element's count (since they are sorted 'ascending')
lowest_number_of_conversations = active_channels[0]['count']
less_active_operator = random.choice([
active_channel['livechat_operator_id'] for active_channel in active_channels
if active_channel['count'] == lowest_number_of_conversations])
# convert the selected 'partner_id' to its corresponding res.users
return next(operator for operator in operators if operator.partner_id.id == less_active_operator)
def _get_channel_infos(self):
self.ensure_one()
return {
'header_background_color': self.header_background_color,
'button_background_color': self.button_background_color,
'title_color': self.title_color,
'button_text_color': self.button_text_color,
'button_text': self.button_text,
'input_placeholder': self.input_placeholder,
'default_message': self.default_message,
"channel_name": self.name,
"channel_id": self.id,
}
def get_livechat_info(self, username='Visitor'):
self.ensure_one()
info = {}
info['available'] = len(self._get_available_users()) > 0
info['server_url'] = self.env['ir.config_parameter'].sudo().get_param('web.base.url')
if info['available']:
info['options'] = self._get_channel_infos()
info['options']["default_username"] = username
return info
class ImLivechatChannelRule(models.Model):
""" Channel Rules
Rules defining access to the channel (countries, and url matching). It also provide the 'auto pop'
option to open automatically the conversation.
"""
_name = 'im_livechat.channel.rule'
_description = 'Livechat Channel Rules'
_order = 'sequence asc'
regex_url = fields.Char('URL Regex',
help="Regular expression specifying the web pages this rule will be applied on.")
action = fields.Selection([('display_button', 'Display the button'), ('auto_popup', 'Auto popup'), ('hide_button', 'Hide the button')],
string='Action', required=True, default='display_button',
help="* 'Display the button' displays the chat button on the pages.\n"\
"* 'Auto popup' displays the button and automatically open the conversation pane.\n"\
"* 'Hide the button' hides the chat button on the pages.")
auto_popup_timer = fields.Integer('Auto popup timer', default=0,
help="Delay (in seconds) to automatically open the conversation window. Note: the selected action must be 'Auto popup' otherwise this parameter will not be taken into account.")
channel_id = fields.Many2one('im_livechat.channel', 'Channel',
help="The channel of the rule")
country_ids = fields.Many2many('res.country', 'im_livechat_channel_country_rel', 'channel_id', 'country_id', 'Country',
help="The rule will only be applied for these countries. Example: if you select 'Belgium' and 'United States' and that you set the action to 'Hide Button', the chat button will be hidden on the specified URL from the visitors located in these 2 countries. This feature requires GeoIP installed on your server.")
sequence = fields.Integer('Matching order', default=10,
help="Given the order to find a matching rule. If 2 rules are matching for the given url/country, the one with the lowest sequence will be chosen.")
def match_rule(self, channel_id, url, country_id=False):
""" determine if a rule of the given channel matches with the given url
:param channel_id : the identifier of the channel_id
:param url : the url to match with a rule
:param country_id : the identifier of the country
:returns the rule that matches the given condition. False otherwise.
:rtype : im_livechat.channel.rule
"""
def _match(rules):
for rule in rules:
# url might not be set because it comes from referer, in that
# case match the first rule with no regex_url
if re.search(rule.regex_url or '', url or ''):
return rule
return False
# first, search the country specific rules (the first match is returned)
if country_id: # don't include the country in the research if geoIP is not installed
domain = [('country_ids', 'in', [country_id]), ('channel_id', '=', channel_id)]
rule = _match(self.search(domain))
if rule:
return rule
# second, fallback on the rules without country
domain = [('country_ids', '=', False), ('channel_id', '=', channel_id)]
return _match(self.search(domain))
| ddico/odoo | addons/im_livechat/models/im_livechat_channel.py | Python | agpl-3.0 | 15,347 |
from idbase.middleware import (LoginUrlMiddleware, SessionTimeoutMiddleware,
get_authenticated_uwnetid)
from pytest import fixture, raises, mark
from django.core.exceptions import ImproperlyConfigured
from idbase.exceptions import InvalidSessionError, LoginNotPerson, ServiceError
@fixture(autouse=True)
def settings(settings):
settings.LOGIN_URL = '/foo/login'
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = True
return settings
@fixture
def req_func(rf, session):
def func(url='/'):
request = rf.get(url)
request.session = session
return request
return func
@fixture
def login_req(req_func):
req = req_func(url='/foo/login')
req.META.update({
'REMOTE_USER': '[email protected]',
'Shib-Identity-Provider': 'urn:mace:incommon:washington.edu'})
req.session['_uw_postlogin'] = '/home'
return req
@fixture
def req(req_func):
"""A mock Django request get with a mock session."""
return req_func()
def test_login_url_middleware_is_login_url(login_req):
LoginUrlMiddleware().process_request(login_req)
assert login_req.uwnetid == 'foo'
assert login_req.session._session == {
'_login_url_uwnetid': 'foo',
'_uw_postlogin': '/home'
}
def test_login_url_middleware_bad_idp(login_req):
login_req.META['Shib-Identity-Provider'] = 'google.com'
LoginUrlMiddleware().process_request(login_req)
assert not login_req.uwnetid
assert isinstance(login_req.login_url_error, InvalidSessionError)
def test_login_url_middleware_existing_user(req):
req.session['_login_url_uwnetid'] = 'javerage'
LoginUrlMiddleware().process_request(req)
assert req.uwnetid == 'javerage'
def test_login_url_middleware_no_user(req):
LoginUrlMiddleware().process_request(req)
assert not req.uwnetid
def test_login_url_middleware_login_page_unprotected(login_req):
"""A case where someone set a login page that's not SSO-protected."""
login_req.META = {}
login_req.session.flush()
LoginUrlMiddleware().process_request(login_req)
assert not login_req.uwnetid
assert login_req.session._session == {}
def test_login_url_middleware_broken_irws(login_req, monkeypatch):
def blowup(self, netid=None):
raise Exception()
monkeypatch.setattr('idbase.mock.IRWS.get_regid', blowup)
LoginUrlMiddleware().process_request(login_req)
assert not login_req.uwnetid
assert isinstance(login_req.login_url_error, Exception)
def test_get_authenticated_uwnetid_basic(monkeypatch):
monkeypatch.setattr('idbase.middleware.is_personal_netid',
lambda **args: True)
netid = get_authenticated_uwnetid(
remote_user='[email protected]',
saml_idp='urn:mace:incommon:washington.edu')
assert netid == 'joe'
@mark.parametrize('remote_user,saml_idp,is_person,expected_error', [
('', '', True, ServiceError),
('[email protected]', 'google.com', True, InvalidSessionError),
('joe', 'urn:mace:incommon:washington.edu', True, InvalidSessionError),
('[email protected]', 'urn:mace:incommon:washington.edu', False,
LoginNotPerson)])
def test_get_authenticated_uwnetid_error(
remote_user, saml_idp, is_person, expected_error, monkeypatch):
monkeypatch.setattr('idbase.middleware.is_personal_netid',
lambda **args: is_person)
with raises(expected_error):
get_authenticated_uwnetid(remote_user=remote_user, saml_idp=saml_idp)
@fixture(autouse=True)
def mock_localized_datetime(monkeypatch):
monkeypatch.setattr('idbase.middleware.localized_datetime_string_now',
lambda: 'just now')
@fixture(autouse=True)
def mock_date_diff(monkeypatch):
monkeypatch.setattr('idbase.middleware.datetime_diff_seconds',
lambda x: (10
if x in ('just now', 'just then')
else (20*60) + 1))
def test_session_timeout_middleware_init(settings):
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = False
with raises(ImproperlyConfigured):
SessionTimeoutMiddleware()
def test_session_timeout_middleware_process_request_active(req):
SessionTimeoutMiddleware().process_request(req)
assert req.session['active'] is True
def test_session_timeout_process_request_expired(req):
req.session['_session_timeout_last_update'] = 'a while back'
SessionTimeoutMiddleware().process_request(req)
assert 'active' not in req.session
def test_session_timeout_active_session(req):
req.session['_session_timeout_last_update'] = 'just then'
SessionTimeoutMiddleware().process_request(req)
assert req.session['active'] is True
def test_session_timeout_default(req, settings):
req.session['_session_timeout_last_update'] = 'just then'
settings.SESSION_TIMEOUT_DEFAULT_SECONDS = 10
SessionTimeoutMiddleware().process_request(req)
assert req.session['active'] is True
settings.SESSION_TIMEOUT_DEFAULT_SECONDS = 9
SessionTimeoutMiddleware().process_request(req)
assert 'active' not in req.session
def test_session_process_response_modified(req):
req.session['updated'] = True
assert SessionTimeoutMiddleware().process_response(req, 'blah') == 'blah'
assert req.session['_session_timeout_last_update'] == 'just now'
def test_sesssion_process_response_unmodified(req):
assert SessionTimeoutMiddleware().process_response(req, 'blah') == 'blah'
assert '_session_timeout_last_update' not in req.session
| jeffFranklin/iam-idbase | tests/test_middleware.py | Python | apache-2.0 | 5,568 |
#!/usr/bin/env python3
from collections import namedtuple
from selfdrive.car.chrysler.values import CAR as CHRYSLER
from selfdrive.car.gm.values import CAR as GM
from selfdrive.car.honda.values import CAR as HONDA
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.nissan.values import CAR as NISSAN
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.car.subaru.values import CAR as SUBARU
from selfdrive.car.toyota.values import CAR as TOYOTA
from selfdrive.car.volkswagen.values import CAR as VOLKSWAGEN
from selfdrive.car.tesla.values import CAR as TESLA
# TODO: add routes for these cars
non_tested_cars = [
GM.CADILLAC_ATS,
GM.HOLDEN_ASTRA,
GM.MALIBU,
HYUNDAI.ELANTRA_GT_I30,
HYUNDAI.GENESIS_G90,
HYUNDAI.KIA_OPTIMA_H,
]
TestRoute = namedtuple('TestRoute', ['route', 'car_fingerprint'])
routes = [
TestRoute("0c94aa1e1296d7c6|2021-05-05--19-48-37", CHRYSLER.JEEP_CHEROKEE),
TestRoute("91dfedae61d7bd75|2021-05-22--20-07-52", CHRYSLER.JEEP_CHEROKEE_2019),
TestRoute("420a8e183f1aed48|2020-03-05--07-15-29", CHRYSLER.PACIFICA_2017_HYBRID),
TestRoute("43a685a66291579b|2021-05-27--19-47-29", CHRYSLER.PACIFICA_2018),
TestRoute("378472f830ee7395|2021-05-28--07-38-43", CHRYSLER.PACIFICA_2018_HYBRID),
TestRoute("8190c7275a24557b|2020-01-29--08-33-58", CHRYSLER.PACIFICA_2019_HYBRID),
TestRoute("3d84727705fecd04|2021-05-25--08-38-56", CHRYSLER.PACIFICA_2020),
#TestRoute("f1b4c567731f4a1b|2018-04-30--10-15-35", FORD.FUSION),
TestRoute("7cc2a8365b4dd8a9|2018-12-02--12-10-44", GM.ACADIA),
TestRoute("aa20e335f61ba898|2019-02-05--16-59-04", GM.BUICK_REGAL),
TestRoute("46460f0da08e621e|2021-10-26--07-21-46", GM.ESCALADE_ESV),
TestRoute("c950e28c26b5b168|2018-05-30--22-03-41", GM.VOLT),
TestRoute("0e7a2ba168465df5|2020-10-18--14-14-22", HONDA.ACURA_RDX_3G),
TestRoute("a74b011b32b51b56|2020-07-26--17-09-36", HONDA.CIVIC),
TestRoute("a859a044a447c2b0|2020-03-03--18-42-45", HONDA.CRV_EU),
TestRoute("68aac44ad69f838e|2021-05-18--20-40-52", HONDA.CRV),
TestRoute("14fed2e5fa0aa1a5|2021-05-25--14-59-42", HONDA.CRV_HYBRID),
TestRoute("52f3e9ae60c0d886|2021-05-23--15-59-43", HONDA.FIT),
TestRoute("2c4292a5cd10536c|2021-08-19--21-32-15", HONDA.FREED),
TestRoute("03be5f2fd5c508d1|2020-04-19--18-44-15", HONDA.HRV),
TestRoute("917b074700869333|2021-05-24--20-40-20", HONDA.ACURA_ILX),
TestRoute("81722949a62ea724|2019-04-06--15-19-25", HONDA.ODYSSEY_CHN),
TestRoute("08a3deb07573f157|2020-03-06--16-11-19", HONDA.ACCORD), # 1.5T
TestRoute("1da5847ac2488106|2021-05-24--19-31-50", HONDA.ACCORD), # 2.0T
TestRoute("085ac1d942c35910|2021-03-25--20-11-15", HONDA.ACCORD), # 2021 with new style HUD msgs
TestRoute("07585b0da3c88459|2021-05-26--18-52-04", HONDA.ACCORDH),
TestRoute("f29e2b57a55e7ad5|2021-03-24--20-52-38", HONDA.ACCORDH), # 2021 with new style HUD msgs
TestRoute("1ad763dd22ef1a0e|2020-02-29--18-37-03", HONDA.CRV_5G),
TestRoute("0a96f86fcfe35964|2020-02-05--07-25-51", HONDA.ODYSSEY),
TestRoute("d83f36766f8012a5|2020-02-05--18-42-21", HONDA.CIVIC_BOSCH_DIESEL),
TestRoute("f0890d16a07a236b|2021-05-25--17-27-22", HONDA.INSIGHT),
TestRoute("07d37d27996096b6|2020-03-04--21-57-27", HONDA.PILOT),
TestRoute("684e8f96bd491a0e|2021-11-03--11-08-42", HONDA.PASSPORT),
TestRoute("0a78dfbacc8504ef|2020-03-04--13-29-55", HONDA.CIVIC_BOSCH),
TestRoute("f34a60d68d83b1e5|2020-10-06--14-35-55", HONDA.ACURA_RDX),
TestRoute("54fd8451b3974762|2021-04-01--14-50-10", HONDA.RIDGELINE),
TestRoute("2d5808fae0b38ac6|2021-09-01--17-14-11", HONDA.HONDA_E),
TestRoute("6fe86b4e410e4c37|2020-07-22--16-27-13", HYUNDAI.HYUNDAI_GENESIS),
TestRoute("70c5bec28ec8e345|2020-08-08--12-22-23", HYUNDAI.GENESIS_G70),
TestRoute("6b301bf83f10aa90|2020-11-22--16-45-07", HYUNDAI.GENESIS_G80),
TestRoute("4dbd55df87507948|2022-03-01--09-45-38", HYUNDAI.SANTA_FE),
TestRoute("bf43d9df2b660eb0|2021-09-23--14-16-37", HYUNDAI.SANTA_FE_2022),
TestRoute("37398f32561a23ad|2021-11-18--00-11-35", HYUNDAI.SANTA_FE_HEV_2022),
TestRoute("656ac0d830792fcc|2021-12-28--14-45-56", HYUNDAI.SANTA_FE_PHEV_2022),
TestRoute("e0e98335f3ebc58f|2021-03-07--16-38-29", HYUNDAI.KIA_CEED),
TestRoute("7653b2bce7bcfdaa|2020-03-04--15-34-32", HYUNDAI.KIA_OPTIMA),
TestRoute("c75a59efa0ecd502|2021-03-11--20-52-55", HYUNDAI.KIA_SELTOS),
TestRoute("5b7c365c50084530|2020-04-15--16-13-24", HYUNDAI.SONATA),
TestRoute("b2a38c712dcf90bd|2020-05-18--18-12-48", HYUNDAI.SONATA_LF),
TestRoute("5875672fc1d4bf57|2020-07-23--21-33-28", HYUNDAI.KIA_SORENTO),
TestRoute("9c917ba0d42ffe78|2020-04-17--12-43-19", HYUNDAI.PALISADE),
TestRoute("fa8db5869167f821|2021-06-10--22-50-10", HYUNDAI.IONIQ_PHEV),
TestRoute("2c5cf2dd6102e5da|2020-12-17--16-06-44", HYUNDAI.IONIQ_EV_2020),
TestRoute("610ebb9faaad6b43|2020-06-13--15-28-36", HYUNDAI.IONIQ_EV_LTD),
TestRoute("2c5cf2dd6102e5da|2020-06-26--16-00-08", HYUNDAI.IONIQ),
TestRoute("ab59fe909f626921|2021-10-18--18-34-28", HYUNDAI.IONIQ_HEV_2022),
TestRoute("22d955b2cd499c22|2020-08-10--19-58-21", HYUNDAI.KONA),
TestRoute("efc48acf44b1e64d|2021-05-28--21-05-04", HYUNDAI.KONA_EV),
TestRoute("49f3c13141b6bc87|2021-07-28--08-05-13", HYUNDAI.KONA_HEV),
TestRoute("5dddcbca6eb66c62|2020-07-26--13-24-19", HYUNDAI.KIA_STINGER),
TestRoute("d624b3d19adce635|2020-08-01--14-59-12", HYUNDAI.VELOSTER),
TestRoute("007d5e4ad9f86d13|2021-09-30--15-09-23", HYUNDAI.KIA_K5_2021),
TestRoute("50c6c9b85fd1ff03|2020-10-26--17-56-06", HYUNDAI.KIA_NIRO_EV),
TestRoute("173219cf50acdd7b|2021-07-05--10-27-41", HYUNDAI.KIA_NIRO_HEV),
TestRoute("34a875f29f69841a|2021-07-29--13-02-09", HYUNDAI.KIA_NIRO_HEV_2021),
TestRoute("50a2212c41f65c7b|2021-05-24--16-22-06", HYUNDAI.KIA_FORTE),
TestRoute("c5ac319aa9583f83|2021-06-01--18-18-31", HYUNDAI.ELANTRA),
TestRoute("82e9cdd3f43bf83e|2021-05-15--02-42-51", HYUNDAI.ELANTRA_2021),
TestRoute("715ac05b594e9c59|2021-06-20--16-21-07", HYUNDAI.ELANTRA_HEV_2021),
TestRoute("7120aa90bbc3add7|2021-08-02--07-12-31", HYUNDAI.SONATA_HYBRID),
TestRoute("715ac05b594e9c59|2021-10-27--23-24-56", HYUNDAI.GENESIS_G70_2020),
TestRoute("00c829b1b7613dea|2021-06-24--09-10-10", TOYOTA.ALPHARD_TSS2),
TestRoute("000cf3730200c71c|2021-05-24--10-42-05", TOYOTA.AVALON),
TestRoute("0bb588106852abb7|2021-05-26--12-22-01", TOYOTA.AVALON_2019),
TestRoute("87bef2930af86592|2021-05-30--09-40-54", TOYOTA.AVALONH_2019),
TestRoute("e9966711cfb04ce3|2022-01-11--07-59-43", TOYOTA.AVALON_TSS2),
TestRoute("6cdecc4728d4af37|2020-02-23--15-44-18", TOYOTA.CAMRY),
TestRoute("3456ad0cd7281b24|2020-12-13--17-45-56", TOYOTA.CAMRY_TSS2),
TestRoute("ffccc77938ddbc44|2021-01-04--16-55-41", TOYOTA.CAMRYH_TSS2),
TestRoute("54034823d30962f5|2021-05-24--06-37-34", TOYOTA.CAMRYH),
TestRoute("4e45c89c38e8ec4d|2021-05-02--02-49-28", TOYOTA.COROLLA),
TestRoute("5f5afb36036506e4|2019-05-14--02-09-54", TOYOTA.COROLLA_TSS2),
TestRoute("5ceff72287a5c86c|2019-10-19--10-59-02", TOYOTA.COROLLAH_TSS2),
TestRoute("d2525c22173da58b|2021-04-25--16-47-04", TOYOTA.PRIUS),
TestRoute("b14c5b4742e6fc85|2020-07-28--19-50-11", TOYOTA.RAV4),
TestRoute("32a7df20486b0f70|2020-02-06--16-06-50", TOYOTA.RAV4H),
TestRoute("cdf2f7de565d40ae|2019-04-25--03-53-41", TOYOTA.RAV4_TSS2),
TestRoute("7e34a988419b5307|2019-12-18--19-13-30", TOYOTA.RAV4H_TSS2),
TestRoute("e6a24be49a6cd46e|2019-10-29--10-52-42", TOYOTA.LEXUS_ES_TSS2),
TestRoute("25057fa6a5a63dfb|2020-03-04--08-44-23", TOYOTA.LEXUS_CTH),
TestRoute("f49e8041283f2939|2019-05-30--11-51-51", TOYOTA.LEXUS_ESH_TSS2),
TestRoute("37041c500fd30100|2020-12-30--12-17-24", TOYOTA.LEXUS_ESH),
TestRoute("32696cea52831b02|2021-11-19--18-13-30", TOYOTA.LEXUS_RC),
TestRoute("886fcd8408d570e9|2020-01-29--05-11-22", TOYOTA.LEXUS_RX),
TestRoute("886fcd8408d570e9|2020-01-29--02-18-55", TOYOTA.LEXUS_RX),
TestRoute("d27ad752e9b08d4f|2021-05-26--19-39-51", TOYOTA.LEXUS_RXH),
TestRoute("01b22eb2ed121565|2020-02-02--11-25-51", TOYOTA.LEXUS_RX_TSS2),
TestRoute("b74758c690a49668|2020-05-20--15-58-57", TOYOTA.LEXUS_RXH_TSS2),
TestRoute("ec429c0f37564e3c|2020-02-01--17-28-12", TOYOTA.LEXUS_NXH),
TestRoute("964c09eb11ca8089|2020-11-03--22-04-00", TOYOTA.LEXUS_NX),
TestRoute("3fd5305f8b6ca765|2021-04-28--19-26-49", TOYOTA.LEXUS_NX_TSS2),
TestRoute("0a302ffddbb3e3d3|2020-02-08--16-19-08", TOYOTA.HIGHLANDER_TSS2),
TestRoute("437e4d2402abf524|2021-05-25--07-58-50", TOYOTA.HIGHLANDERH_TSS2),
TestRoute("3183cd9b021e89ce|2021-05-25--10-34-44", TOYOTA.HIGHLANDER),
TestRoute("80d16a262e33d57f|2021-05-23--20-01-43", TOYOTA.HIGHLANDERH),
TestRoute("eb6acd681135480d|2019-06-20--20-00-00", TOYOTA.SIENNA),
TestRoute("2e07163a1ba9a780|2019-08-25--13-15-13", TOYOTA.LEXUS_IS),
TestRoute("0a0de17a1e6a2d15|2020-09-21--21-24-41", TOYOTA.PRIUS_TSS2),
TestRoute("9b36accae406390e|2021-03-30--10-41-38", TOYOTA.MIRAI),
TestRoute("cd9cff4b0b26c435|2021-05-13--15-12-39", TOYOTA.CHR),
TestRoute("57858ede0369a261|2021-05-18--20-34-20", TOYOTA.CHRH),
TestRoute("14623aae37e549f3|2021-10-24--01-20-49", TOYOTA.PRIUS_V),
TestRoute("202c40641158a6e5|2021-09-21--09-43-24", VOLKSWAGEN.ARTEON_MK1),
TestRoute("2c68dda277d887ac|2021-05-11--15-22-20", VOLKSWAGEN.ATLAS_MK1),
TestRoute("cae14e88932eb364|2021-03-26--14-43-28", VOLKSWAGEN.GOLF_MK7),
TestRoute("58a7d3b707987d65|2021-03-25--17-26-37", VOLKSWAGEN.JETTA_MK7),
TestRoute("4d134e099430fba2|2021-03-26--00-26-06", VOLKSWAGEN.PASSAT_MK8),
TestRoute("0cd0b7f7e31a3853|2021-11-03--19-30-22", VOLKSWAGEN.POLO_MK6),
TestRoute("7d82b2f3a9115f1f|2021-10-21--15-39-42", VOLKSWAGEN.TAOS_MK1),
TestRoute("2744c89a8dda9a51|2021-07-24--21-28-06", VOLKSWAGEN.TCROSS_MK1),
TestRoute("2cef8a0b898f331a|2021-03-25--20-13-57", VOLKSWAGEN.TIGUAN_MK2),
TestRoute("a589dcc642fdb10a|2021-06-14--20-54-26", VOLKSWAGEN.TOURAN_MK2),
TestRoute("a459f4556782eba1|2021-09-19--09-48-00", VOLKSWAGEN.TRANSPORTER_T61),
TestRoute("0cd0b7f7e31a3853|2021-11-18--00-38-32", VOLKSWAGEN.TROC_MK1),
TestRoute("07667b885add75fd|2021-01-23--19-48-42", VOLKSWAGEN.AUDI_A3_MK3),
TestRoute("6c6b466346192818|2021-06-06--14-17-47", VOLKSWAGEN.AUDI_Q2_MK1),
TestRoute("0cd0b7f7e31a3853|2021-12-03--03-12-05", VOLKSWAGEN.AUDI_Q3_MK2),
TestRoute("8f205bdd11bcbb65|2021-03-26--01-00-17", VOLKSWAGEN.SEAT_ATECA_MK1),
TestRoute("fc6b6c9a3471c846|2021-05-27--13-39-56", VOLKSWAGEN.SEAT_LEON_MK3),
TestRoute("12d6ae3057c04b0d|2021-09-15--00-04-07", VOLKSWAGEN.SKODA_KAMIQ_MK1),
TestRoute("12d6ae3057c04b0d|2021-09-04--21-21-21", VOLKSWAGEN.SKODA_KAROQ_MK1),
TestRoute("90434ff5d7c8d603|2021-03-15--12-07-31", VOLKSWAGEN.SKODA_KODIAQ_MK1),
TestRoute("66e5edc3a16459c5|2021-05-25--19-00-29", VOLKSWAGEN.SKODA_OCTAVIA_MK3),
TestRoute("026b6d18fba6417f|2021-03-26--09-17-04", VOLKSWAGEN.SKODA_SCALA_MK1),
TestRoute("b2e9858e29db492b|2021-03-26--16-58-42", VOLKSWAGEN.SKODA_SUPERB_MK3),
TestRoute("3c8f0c502e119c1c|2020-06-30--12-58-02", SUBARU.ASCENT),
TestRoute("c321c6b697c5a5ff|2020-06-23--11-04-33", SUBARU.FORESTER),
TestRoute("791340bc01ed993d|2019-03-10--16-28-08", SUBARU.IMPREZA),
TestRoute("8bf7e79a3ce64055|2021-05-24--09-36-27", SUBARU.IMPREZA_2020),
# Dashcam
TestRoute("95441c38ae8c130e|2020-06-08--12-10-17", SUBARU.FORESTER_PREGLOBAL),
# Dashcam
TestRoute("df5ca7660000fba8|2020-06-16--17-37-19", SUBARU.LEGACY_PREGLOBAL),
# Dashcam
TestRoute("5ab784f361e19b78|2020-06-08--16-30-41", SUBARU.OUTBACK_PREGLOBAL),
# Dashcam
TestRoute("e19eb5d5353b1ac1|2020-08-09--14-37-56", SUBARU.OUTBACK_PREGLOBAL_2018),
TestRoute("fbbfa6af821552b9|2020-03-03--08-09-43", NISSAN.XTRAIL),
TestRoute("5b7c365c50084530|2020-03-25--22-10-13", NISSAN.LEAF),
TestRoute("22c3dcce2dd627eb|2020-12-30--16-38-48", NISSAN.LEAF_IC),
TestRoute("059ab9162e23198e|2020-05-30--09-41-01", NISSAN.ROGUE),
TestRoute("b72d3ec617c0a90f|2020-12-11--15-38-17", NISSAN.ALTIMA),
TestRoute("32a319f057902bb3|2020-04-27--15-18-58", MAZDA.CX5),
TestRoute("10b5a4b380434151|2020-08-26--17-11-45", MAZDA.CX9),
TestRoute("74f1038827005090|2020-08-26--20-05-50", MAZDA.MAZDA3),
TestRoute("fb53c640f499b73d|2021-06-01--04-17-56", MAZDA.MAZDA6),
TestRoute("f6d5b1a9d7a1c92e|2021-07-08--06-56-59", MAZDA.CX9_2021),
TestRoute("a4af1602d8e668ac|2022-02-03--12-17-07", MAZDA.CX5_2022),
TestRoute("6c14ee12b74823ce|2021-06-30--11-49-02", TESLA.AP1_MODELS),
TestRoute("bb50caf5f0945ab1|2021-06-19--17-20-18", TESLA.AP2_MODELS),
]
| commaai/openpilot | selfdrive/test/test_routes.py | Python | mit | 12,465 |
""".. Ignore pydocstyle D400.
================================
Knowledge Base App Configuration
================================
"""
from django.apps import AppConfig
class KnowledgeBaseConfig(AppConfig):
"""App configuration."""
name = "resolwe_bio.kb"
label = "resolwe_bio_kb"
verbose_name = "Resolwe Bioinformatics Knowledge Base"
def ready(self):
"""Perform application initialization."""
from resolwe_bio.kb.listener_plugin import ( # noqa: F401
ExposeFeature,
ExposeMapping,
)
| genialis/resolwe-bio | resolwe_bio/kb/apps.py | Python | apache-2.0 | 558 |
"""
A simple command-line interface for Mininet.
The Mininet CLI provides a simple control console which
makes it easy to talk to nodes. For example, the command
mininet> h27 ifconfig
runs 'ifconfig' on host h27.
Having a single console rather than, for example, an xterm for each
node is particularly convenient for networks of any reasonable
size.
The CLI automatically substitutes IP addresses for node names,
so commands like
mininet> h2 ping h3
should work correctly and allow host h2 to ping host h3
Several useful commands are provided, including the ability to
list all nodes ('nodes'), to print out the network topology
('net') and to check connectivity ('pingall', 'pingpair')
and bandwidth ('iperf'.)
"""
from subprocess import call
from cmd import Cmd
from os import isatty
from select import poll, POLLIN
import sys
import time
import os
import atexit
from mininet.log import info, output, error
from mininet.term import makeTerms, runX11
from mininet.util import ( quietRun, dumpNodeConnections,
dumpPorts )
class CLI( Cmd ):
"Simple command-line interface to talk to nodes."
prompt = 'mininet> '
def __init__( self, mininet, stdin=sys.stdin, script=None ):
"""Start and run interactive or batch mode CLI
mininet: Mininet network object
stdin: standard input for CLI
script: script to run in batch mode"""
self.mn = mininet
# Local variable bindings for py command
self.locals = { 'net': mininet }
# Attempt to handle input
self.stdin = stdin
self.inPoller = poll()
self.inPoller.register( stdin )
self.inputFile = script
Cmd.__init__( self )
info( '*** Starting CLI:\n' )
if self.inputFile:
self.do_source( self.inputFile )
return
self.initReadline()
self.run()
readlineInited = False
@classmethod
def initReadline( cls ):
"Set up history if readline is available"
# Only set up readline once to prevent multiplying the history file
if cls.readlineInited:
return
cls.readlineInited = True
try:
from readline import read_history_file, write_history_file
except ImportError:
pass
else:
history_path = os.path.expanduser( '~/.mininet_history' )
if os.path.isfile( history_path ):
read_history_file( history_path )
atexit.register( lambda: write_history_file( history_path ) )
def run( self ):
"Run our cmdloop(), catching KeyboardInterrupt"
while True:
try:
# Make sure no nodes are still waiting
for node in self.mn.values():
while node.waiting:
info( 'stopping', node, '\n' )
node.sendInt()
node.waitOutput()
if self.isatty():
quietRun( 'stty echo sane intr ^C' )
self.cmdloop()
break
except KeyboardInterrupt:
# Output a message - unless it's also interrupted
# pylint: disable=broad-except
try:
output( '\nInterrupt\n' )
except Exception:
pass
# pylint: enable=broad-except
def emptyline( self ):
"Don't repeat last command when you hit return."
pass
def getLocals( self ):
"Local variable bindings for py command"
self.locals.update( self.mn )
return self.locals
helpStr = (
'You may also send a command to a node using:\n'
' <node> command {args}\n'
'For example:\n'
' mininet> h1 ifconfig\n'
'\n'
'The interpreter automatically substitutes IP addresses\n'
'for node names when a node is the first arg, so commands\n'
'like\n'
' mininet> h2 ping h3\n'
'should work.\n'
'\n'
'Some character-oriented interactive commands require\n'
'noecho:\n'
' mininet> noecho h2 vi foo.py\n'
'However, starting up an xterm/gterm is generally better:\n'
' mininet> xterm h2\n\n'
)
def do_help( self, line ):
"Describe available CLI commands."
Cmd.do_help( self, line )
if line is '':
output( self.helpStr )
def do_nodes( self, _line ):
"List all nodes."
nodes = ' '.join( sorted( self.mn ) )
output( 'available nodes are: \n%s\n' % nodes )
def do_ports( self, _line ):
"display ports and interfaces for each switch"
dumpPorts( self.mn.switches )
def do_net( self, _line ):
"List network connections."
dumpNodeConnections( self.mn.values() )
def do_sh( self, line ):
"""Run an external shell command
Usage: sh [cmd args]"""
assert self # satisfy pylint and allow override
call( line, shell=True )
# do_py() and do_px() need to catch any exception during eval()/exec()
# pylint: disable=broad-except
def do_py( self, line ):
"""Evaluate a Python expression.
Node names may be used, e.g.: py h1.cmd('ls')"""
try:
result = eval( line, globals(), self.getLocals() )
if not result:
return
elif isinstance( result, str ):
output( result + '\n' )
else:
output( repr( result ) + '\n' )
except Exception, e:
output( str( e ) + '\n' )
# We are in fact using the exec() pseudo-function
# pylint: disable=exec-used
def do_px( self, line ):
"""Execute a Python statement.
Node names may be used, e.g.: px print h1.cmd('ls')"""
try:
exec( line, globals(), self.getLocals() )
except Exception, e:
output( str( e ) + '\n' )
# pylint: enable=broad-except,exec-used
def do_pingall( self, line ):
"Ping between all hosts."
self.mn.pingAll( line )
def do_pingpair( self, _line ):
"Ping between first two hosts, useful for testing."
self.mn.pingPair()
def do_pingallfull( self, _line ):
"Ping between all hosts, returns all ping results."
self.mn.pingAllFull()
def do_pingpairfull( self, _line ):
"Ping between first two hosts, returns all ping results."
self.mn.pingPairFull()
def do_iperf( self, line ):
"""Simple iperf TCP test between two (optionally specified) hosts.
Usage: iperf node1 node2"""
args = line.split()
if not args:
self.mn.iperf()
elif len(args) == 2:
hosts = []
err = False
for arg in args:
if arg not in self.mn:
err = True
error( "node '%s' not in network\n" % arg )
else:
hosts.append( self.mn[ arg ] )
if not err:
self.mn.iperf( hosts )
else:
error( 'invalid number of args: iperf src dst\n' )
def do_iperfudp( self, line ):
"""Simple iperf UDP test between two (optionally specified) hosts.
Usage: iperfudp bw node1 node2"""
args = line.split()
if not args:
self.mn.iperf( l4Type='UDP' )
elif len(args) == 3:
udpBw = args[ 0 ]
hosts = []
err = False
for arg in args[ 1:3 ]:
if arg not in self.mn:
err = True
error( "node '%s' not in network\n" % arg )
else:
hosts.append( self.mn[ arg ] )
if not err:
self.mn.iperf( hosts, l4Type='UDP', udpBw=udpBw )
else:
error( 'invalid number of args: iperfudp bw src dst\n' +
'bw examples: 10M\n' )
def do_intfs( self, _line ):
"List interfaces."
for node in self.mn.values():
output( '%s: %s\n' %
( node.name, ','.join( node.intfNames() ) ) )
def do_dump( self, _line ):
"Dump node info."
for node in self.mn.values():
output( '%s\n' % repr( node ) )
def do_link( self, line ):
"""Bring link(s) between two nodes up or down.
Usage: link node1 node2 [up/down]"""
args = line.split()
if len(args) != 3:
error( 'invalid number of args: link end1 end2 [up down]\n' )
elif args[ 2 ] not in [ 'up', 'down' ]:
error( 'invalid type: link end1 end2 [up down]\n' )
else:
self.mn.configLinkStatus( *args )
def do_xterm( self, line, term='xterm' ):
"""Spawn xterm(s) for the given node(s).
Usage: xterm node1 node2 ..."""
args = line.split()
if not args:
error( 'usage: %s node1 node2 ...\n' % term )
else:
for arg in args:
if arg not in self.mn:
error( "node '%s' not in network\n" % arg )
else:
node = self.mn[ arg ]
self.mn.terms += makeTerms( [ node ], term = term )
def do_x( self, line ):
"""Create an X11 tunnel to the given node,
optionally starting a client.
Usage: x node [cmd args]"""
args = line.split()
if not args:
error( 'usage: x node [cmd args]...\n' )
else:
node = self.mn[ args[ 0 ] ]
cmd = args[ 1: ]
self.mn.terms += runX11( node, cmd )
def do_gterm( self, line ):
"""Spawn gnome-terminal(s) for the given node(s).
Usage: gterm node1 node2 ..."""
self.do_xterm( line, term='gterm' )
def do_exit( self, _line ):
"Exit"
assert self # satisfy pylint and allow override
return 'exited by user command'
def do_quit( self, line ):
"Exit"
return self.do_exit( line )
def do_EOF( self, line ):
"Exit"
output( '\n' )
return self.do_exit( line )
def isatty( self ):
"Is our standard input a tty?"
return isatty( self.stdin.fileno() )
def do_noecho( self, line ):
"""Run an interactive command with echoing turned off.
Usage: noecho [cmd args]"""
if self.isatty():
quietRun( 'stty -echo' )
self.default( line )
if self.isatty():
quietRun( 'stty echo' )
def do_source( self, line ):
"""Read commands from an input file.
Usage: source <file>"""
args = line.split()
if len(args) != 1:
error( 'usage: source <file>\n' )
return
try:
self.inputFile = open( args[ 0 ] )
while True:
line = self.inputFile.readline()
if len( line ) > 0:
self.onecmd( line )
else:
break
except IOError:
error( 'error reading file %s\n' % args[ 0 ] )
self.inputFile.close()
self.inputFile = None
def do_dpctl( self, line ):
"""Run dpctl (or ovs-ofctl) command on all switches.
Usage: dpctl command [arg1] [arg2] ..."""
args = line.split()
if len(args) < 1:
error( 'usage: dpctl command [arg1] [arg2] ...\n' )
return
for sw in self.mn.switches:
output( '*** ' + sw.name + ' ' + ('-' * 72) + '\n' )
output( sw.dpctl( *args ) )
def do_time( self, line ):
"Measure time taken for any command in Mininet."
start = time.time()
self.onecmd(line)
elapsed = time.time() - start
self.stdout.write("*** Elapsed time: %0.6f secs\n" % elapsed)
def do_links( self, _line ):
"Report on links"
for link in self.mn.links:
print link, link.status()
def do_switch( self, line ):
"Starts or stops a switch"
args = line.split()
if len(args) != 2:
error( 'invalid number of args: switch <switch name>'
'{start, stop}\n' )
return
sw = args[ 0 ]
command = args[ 1 ]
if sw not in self.mn or self.mn.get( sw ) not in self.mn.switches:
error( 'invalid switch: %s\n' % args[ 1 ] )
else:
sw = args[ 0 ]
command = args[ 1 ]
if command == 'start':
self.mn.get( sw ).start( self.mn.controllers )
elif command == 'stop':
self.mn.get( sw ).stop( deleteIntfs=False )
else:
error( 'invalid command: '
'switch <switch name> {start, stop}\n' )
def default( self, line ):
"""Called on an input line when the command prefix is not recognized.
Overridden to run shell commands when a node is the first CLI argument.
Past the first CLI argument, node names are automatically replaced with
corresponding IP addrs."""
first, args, line = self.parseline( line )
if first in self.mn:
if not args:
print "*** Enter a command for node: %s <cmd>" % first
return
node = self.mn[ first ]
rest = args.split( ' ' )
# Substitute IP addresses for node names in command
# If updateIP() returns None, then use node name
rest = [ self.mn[ arg ].defaultIntf().updateIP() or arg
if arg in self.mn else arg
for arg in rest ]
rest = ' '.join( rest )
# Run cmd on node:
node.sendCmd( rest )
self.waitForNode( node )
else:
error( '*** Unknown command: %s\n' % line )
def waitForNode( self, node ):
"Wait for a node to finish, and print its output."
# Pollers
nodePoller = poll()
nodePoller.register( node.stdout )
bothPoller = poll()
bothPoller.register( self.stdin, POLLIN )
bothPoller.register( node.stdout, POLLIN )
if self.isatty():
# Buffer by character, so that interactive
# commands sort of work
quietRun( 'stty -icanon min 1' )
while True:
try:
bothPoller.poll()
# XXX BL: this doesn't quite do what we want.
if False and self.inputFile:
key = self.inputFile.read( 1 )
if key is not '':
node.write( key )
else:
self.inputFile = None
if isReadable( self.inPoller ):
key = self.stdin.read( 1 )
node.write( key )
if isReadable( nodePoller ):
data = node.monitor()
output( data )
if not node.waiting:
break
except KeyboardInterrupt:
# There is an at least one race condition here, since
# it's possible to interrupt ourselves after we've
# read data but before it has been printed.
node.sendInt()
def precmd( self, line ):
"allow for comments in the cli"
if '#' in line:
line = line.split( '#' )[ 0 ]
return line
# Helper functions
def isReadable( poller ):
"Check whether a Poll object has a readable fd."
for fdmask in poller.poll( 0 ):
mask = fdmask[ 1 ]
if mask & POLLIN:
return True
| ool2016-seclab/quarantineSystem | mininet/cli.py | Python | mit | 15,798 |
"""
FacetGrid with custom projection
================================
_thumb: .33, .5
"""
import numpy as np
import pandas as pd
import seaborn as sns
sns.set()
# Generate an example radial datast
r = np.linspace(0, 10, num=100)
df = pd.DataFrame({'r': r, 'slow': r, 'medium': 2 * r, 'fast': 4 * r})
# Convert the dataframe to long-form or "tidy" format
df = pd.melt(df, id_vars=['r'], var_name='speed', value_name='theta')
# Set up a grid of axes with a polar projection
g = sns.FacetGrid(df, col="speed", hue="speed",
subplot_kws=dict(projection='polar'), height=4.5,
sharex=False, sharey=False, despine=False)
# Draw a scatterplot onto each axes in the grid
g.map(sns.scatterplot, "theta", "r")
| phobson/seaborn | examples/facet_projections.py | Python | bsd-3-clause | 741 |
# Egill Sveinbjornsson
# Reykjavik University 2015-1
def parse_code(c):
tuples = {}
times = 1
idx = 0
for b in c.split('>'): # Split by nesting level
idx += 1
tuples[idx] = []
for t in b.split('+'):
if '*' not in t:
times = 1
tuples[idx] += [[t, times]]
else:
lis = t.split('*')
tuples[idx] += [[lis[0], int(lis[1])]]
times = int(lis[1])
return [(x, tuples[x]) for x in tuples]
def zen_expand(c):
s = ''
blocks = parse_code(c)
# print(blocks)
for nesting_level in blocks[::-1]: # Start innermost and work outwards
tmp = ''
for i, t in enumerate(nesting_level[1]):
start_tag = '<' + t[0] + '>'
end_tag = '</' + t[0] + '>'
amount = t[1]
# Check here if its the last tag on this nesting level
if i == len(nesting_level[1])-1:
tmp += (start_tag + s + end_tag) * amount
else:
tmp += (start_tag + end_tag) * amount
s = tmp
# print('Nesting level: ' + str(nesting_level[0]), nesting_level[1])
# s = s + (tag * amount) + s
return s
# zen_expand("a+div+p*3>a")
# "<a></a><div></div><p></p><p></p><p></p>"
# zen_expand("dd")
# "<dd></dd>"
# zen_expand("table>tr*3>td*2")
# "<table><tr><td></td><td></td></tr><tr><td></td><td></td></tr><tr><td></td><td></td></tr></table>"
# zen_expand("div*3+p*4+span*2")
# "<div></div><div></div><div></div><p></p><p></p><p></p><p></p><span></span><span></span>"
# zen_expand("table>tr*2>td*2")
# zen_expand('html>head+body>div+div+p>ul>li*3>a')
# "<html><head></head><body><div></div><div></div><p><ul><li><a></a></li><li><a></a></li><li><a></a></li></ul></p></body></html>" | Kallehz/Python | Verkefni 2/ZenCoding.py | Python | apache-2.0 | 1,814 |
# Fantasy Game Inventory -1 practice problem from Automate Boring Stuff - Chapter 5
# https://automatetheboringstuff.com/chapter5/
def displayInventory(inventory):
totalItems = 0
print('Inventory:')
for k in inventory.keys():
totalItems = totalItems + inventory[k]
print(str(inventory[k]) + ' ' + k)
print('Total number of items: ' + str(totalItems))
inventory = {'rope': 1, 'torch': 6, 'gold coin': 42, 'dagger': 1, 'arrow': 12}
displayInventory(inventory)
| anirudhvarma12/learning-python | chapter5/fantasyGameInventory.py | Python | mit | 470 |
import socket
import subprocess
from subprocess import call
import shlex
file_type_dict = {
'Cxx': ['.c', '.cpp', '.cc', '.h', '.hh', '.hpp'],
'Java': ['.java'] }
class YavideUtils():
@staticmethod
def file_type_to_programming_language(file_type):
for lang, file_types in file_type_dict.iteritems():
if file_type in file_types:
return lang
return ''
@staticmethod
def programming_language_to_extension(programming_language):
return file_type_dict.get(programming_language, '')
@staticmethod
def send_vim_remote_command(vim_instance, command):
cmd = 'gvim --servername ' + vim_instance + ' --remote-send "<ESC>' + command + '<CR>"'
return call(shlex.split(cmd))
@staticmethod
def call_vim_remote_function(vim_instance, function):
cmd = 'gvim --servername ' + vim_instance + ' --remote-expr "' + function + '"'
return call(shlex.split(cmd))
@staticmethod
def is_port_available(port):
s = socket.socket()
try:
s.bind(('localhost', port))
s.close()
return True
except socket.error, msg:
s.close()
return False
@staticmethod
def get_available_port(port_begin, port_end):
for port in range(port_begin, port_end):
if YavideUtils.is_port_available(port) == True:
return port
return -1
| JBakamovic/yavide | core/common/yavide_utils.py | Python | gpl-2.0 | 1,452 |
import unittest
import random
import time
from dreamland import LRUCache
class TestLRUCache(unittest.TestCase):
def setUp(self):
print '\n'
print '='*64
def print_node(self, node):
return "Node: %s \n\tkey: %s, value: %s, \n\tprev: %s, \n\tnext: %s" \
% (node, node.key, node.value, node.prev, node.next)
def print_link_list(self, head):
node = head
while node is not None:
print self.print_node(node)
node = node.next
def test_a_small_land(self):
stack = LRUCache(3)
data = [(1,1),(2,2),(3,3),(4,4)]
for d in data:
stack.touch(d[0],d[1])
self.print_link_list(stack.head)
def test_b_big_land(self):
start = time.time()
stack = LRUCache(100000)
for i in range(0,150000):
(a,b) = (random.randrange(0,100000), random.randrange(0,100000))
stack.touch((a,b), "Value")
print "Insert complete, time used: %s checking correstness:" % str(time.time() - start)
print "Mapping size: %s" % len(stack.mapping)
node = stack.head
count = 0
while True:
count += 1
if stack.mapping.get(node.key) is None:
print "Inconsistant: %s" % key
if node.next is not None:
if node != node.next.prev:
print "Node pointer mismatch:\n\t%s\n\t%s" \
% (self.print_node(node), self.print_node(node.next))
node = node.next
else: break
print "Data check complete, count: %s" % count
def tearDown(self):
print '='*64 | ksang/dreamland | tests/lrucache.py | Python | mit | 1,686 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import fields,osv
from lxml import etree
from tools import graph
from tools.safe_eval import safe_eval as eval
import tools
import os
import logging
_logger = logging.getLogger(__name__)
class view_custom(osv.osv):
_name = 'ir.ui.view.custom'
_order = 'create_date desc' # search(limit=1) should return the last customization
_columns = {
'ref_id': fields.many2one('ir.ui.view', 'Original View', select=True, required=True, ondelete='cascade'),
'user_id': fields.many2one('res.users', 'User', select=True, required=True, ondelete='cascade'),
'arch': fields.text('View Architecture', required=True),
}
def _auto_init(self, cr, context=None):
super(view_custom, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_custom_user_id_ref_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_custom_user_id_ref_id ON ir_ui_view_custom (user_id, ref_id)')
view_custom()
class view(osv.osv):
_name = 'ir.ui.view'
_columns = {
'name': fields.char('View Name',size=64, required=True),
'model': fields.char('Object', size=64, required=True, select=True),
'priority': fields.integer('Sequence', required=True),
'type': fields.selection((
('tree','Tree'),
('form','Form'),
('mdx','mdx'),
('graph', 'Graph'),
('calendar', 'Calendar'),
('diagram','Diagram'),
('gantt', 'Gantt'),
('kanban', 'Kanban'),
('search','Search')), 'View Type', required=True, select=True),
'arch': fields.text('View Architecture', required=True),
'inherit_id': fields.many2one('ir.ui.view', 'Inherited View', ondelete='cascade', select=True),
'field_parent': fields.char('Child Field',size=64),
'xml_id': fields.function(osv.osv.get_xml_id, type='char', size=128, string="External ID",
help="ID of the view defined in xml file"),
}
_defaults = {
'arch': '<?xml version="1.0"?>\n<tree string="My view">\n\t<field name="name"/>\n</tree>',
'priority': 16
}
_order = "priority,name"
def _check_xml(self, cr, uid, ids, context=None):
for view in self.browse(cr, uid, ids, context):
eview = etree.fromstring(view.arch.encode('utf8'))
frng = tools.file_open(os.path.join('base','rng','view.rng'))
try:
relaxng_doc = etree.parse(frng)
relaxng = etree.RelaxNG(relaxng_doc)
if not relaxng.validate(eview):
for error in relaxng.error_log:
_logger.error(tools.ustr(error))
return False
finally:
frng.close()
return True
_constraints = [
(_check_xml, 'Invalid XML for View Architecture!', ['arch'])
]
def _auto_init(self, cr, context=None):
super(view, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_model_type_inherit_id\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_model_type_inherit_id ON ir_ui_view (model, type, inherit_id)')
def get_inheriting_views_arch(self, cr, uid, view_id, model, context=None):
"""Retrieves the architecture of views that inherit from the given view.
:param int view_id: id of the view whose inheriting views should be retrieved
:param str model: model identifier of the view's related model (for double-checking)
:rtype: list of tuples
:return: [(view_arch,view_id), ...]
"""
cr.execute("""SELECT arch, id FROM ir_ui_view WHERE inherit_id=%s AND model=%s
ORDER BY priority""",
(view_id, model))
return cr.fetchall()
def write(self, cr, uid, ids, vals, context=None):
if not isinstance(ids, (list, tuple)):
ids = [ids]
result = super(view, self).write(cr, uid, ids, vals, context)
# drop the corresponding view customizations (used for dashboards for example), otherwise
# not all users would see the updated views
custom_view_ids = self.pool.get('ir.ui.view.custom').search(cr, uid, [('ref_id','in',ids)])
if custom_view_ids:
self.pool.get('ir.ui.view.custom').unlink(cr, uid, custom_view_ids)
return result
def graph_get(self, cr, uid, id, model, node_obj, conn_obj, src_node, des_node, label, scale, context=None):
nodes=[]
nodes_name=[]
transitions=[]
start=[]
tres={}
labels={}
no_ancester=[]
blank_nodes = []
_Model_Obj=self.pool.get(model)
_Node_Obj=self.pool.get(node_obj)
_Arrow_Obj=self.pool.get(conn_obj)
for model_key,model_value in _Model_Obj._columns.items():
if model_value._type=='one2many':
if model_value._obj==node_obj:
_Node_Field=model_key
_Model_Field=model_value._fields_id
flag=False
for node_key,node_value in _Node_Obj._columns.items():
if node_value._type=='one2many':
if node_value._obj==conn_obj:
if src_node in _Arrow_Obj._columns and flag:
_Source_Field=node_key
if des_node in _Arrow_Obj._columns and not flag:
_Destination_Field=node_key
flag = True
datas = _Model_Obj.read(cr, uid, id, [],context)
for a in _Node_Obj.read(cr,uid,datas[_Node_Field],[]):
if a[_Source_Field] or a[_Destination_Field]:
nodes_name.append((a['id'],a['name']))
nodes.append(a['id'])
else:
blank_nodes.append({'id': a['id'],'name':a['name']})
if a.has_key('flow_start') and a['flow_start']:
start.append(a['id'])
else:
if not a[_Source_Field]:
no_ancester.append(a['id'])
for t in _Arrow_Obj.read(cr,uid, a[_Destination_Field],[]):
if not t[des_node]:
continue
transitions.append((a['id'], t[des_node][0]))
tres[str(t['id'])] = (a['id'],t[des_node][0])
label_string = ""
if label:
for lbl in eval(label):
if t.has_key(tools.ustr(lbl)) and tools.ustr(t[lbl])=='False':
label_string = label_string + ' '
else:
label_string = label_string + " " + tools.ustr(t[lbl])
labels[str(t['id'])] = (a['id'],label_string)
g = graph(nodes, transitions, no_ancester)
g.process(start)
g.scale(*scale)
result = g.result_get()
results = {}
for node in nodes_name:
results[str(node[0])] = result[node[0]]
results[str(node[0])]['name'] = node[1]
return {'nodes': results,
'transitions': tres,
'label' : labels,
'blank_nodes': blank_nodes,
'node_parent_field': _Model_Field,}
view()
class view_sc(osv.osv):
_name = 'ir.ui.view_sc'
_columns = {
'name': fields.char('Shortcut Name', size=64), # Kept for backwards compatibility only - resource name used instead (translatable)
'res_id': fields.integer('Resource Ref.', help="Reference of the target resource, whose model/table depends on the 'Resource Name' field."),
'sequence': fields.integer('Sequence'),
'user_id': fields.many2one('res.users', 'User Ref.', required=True, ondelete='cascade', select=True),
'resource': fields.char('Resource Name', size=64, required=True, select=True)
}
def _auto_init(self, cr, context=None):
super(view_sc, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_ui_view_sc_user_id_resource\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_ui_view_sc_user_id_resource ON ir_ui_view_sc (user_id, resource)')
def get_sc(self, cr, uid, user_id, model='ir.ui.menu', context=None):
ids = self.search(cr, uid, [('user_id','=',user_id),('resource','=',model)], context=context)
results = self.read(cr, uid, ids, ['res_id'], context=context)
name_map = dict(self.pool.get(model).name_get(cr, uid, [x['res_id'] for x in results], context=context))
# Make sure to return only shortcuts pointing to exisintg menu items.
filtered_results = filter(lambda result: result['res_id'] in name_map, results)
for result in filtered_results:
result.update(name=name_map[result['res_id']])
return filtered_results
_order = 'sequence,name'
_defaults = {
'resource': lambda *a: 'ir.ui.menu',
'user_id': lambda obj, cr, uid, context: uid,
}
_sql_constraints = [
('shortcut_unique', 'unique(res_id, resource, user_id)', 'Shortcut for this menu already exists!'),
]
view_sc()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| enapps/enapps-openerp-server | openerp/addons/base/ir/ir_ui_view.py | Python | agpl-3.0 | 10,473 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the version string."""
VERSION = '1.8.0'
| ryfeus/lambda-packs | Keras_tensorflow_nightly/source2.7/tensorboard/version.py | Python | mit | 744 |
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
'recipe_engine/context',
'recipe_engine/step',
'run',
]
| youtube/cobalt | third_party/skia_next/third_party/skia/infra/bots/recipe_modules/doxygen/__init__.py | Python | bsd-3-clause | 272 |
from .models import ssm_backends
from ..core.models import base_decorator
mock_ssm = base_decorator(ssm_backends)
| spulec/moto | moto/ssm/__init__.py | Python | apache-2.0 | 115 |
from redis import Redis
from redis.lock import Lock
import logging
import base64
import os
from datetime import datetime
from cryptography.fernet import Fernet
from uuid import uuid4
TASK_LIST_KEY = 'dataplatform.service.tasks'
def task_list(redis):
return redis.hkeys(TASK_LIST_KEY)
def task_authenticate(redis,key,username,password):
id = uuid4()
auth = username+':'+password
bauth = auth.encode('utf-8')
if type(key)==str:
key = key.encode('utf-8')
f = Fernet(key)
redis.set(id,f.encrypt(bauth))
return id
def task_authentication(redis,key,id):
if type(key)==str:
key = key.encode('utf-8')
f = Fernet(key)
bauth = redis.get(id)
if type(bauth)==str:
bauth = bauth.encode('utf-8')
auth = f.decrypt(bauth).decode('utf-8')
return auth.split(':')
def task_create(redis,**kwargs):
task_id = uuid4()
for name in kwargs:
redis.hset(task_id,name,kwargs[name])
redis.hset(TASK_LIST_KEY,task_id,datetime.now().isoformat())
return task_id
def task_lock(redis,id,timeout=60):
lock_name = id+'.lock'
if redis.setnx(lock_name,datetime.now().isoformat()):
return True
else:
tstamp = redis.get(lock_name)
locked_on = datetime.strptime(tstamp,"%Y-%m-%dT%H:%M:%S.%f")
delta = datetime.now() - locked_on
if delta.seconds > timeout:
redis.delete(lock_name)
return task_lock(redis,id,timeout=timeout)
else:
return False
def task_unlock(redis,id):
lock_name = id+'.lock'
redis.delete(lock_name)
def task_get(redis,id):
task = {'id':id}
for name in redis.hkeys(id):
task[name] = redis.hget(id,name)
return task
def task_set_properties(redis,id,**kwargs):
for name in kwargs:
redis.hset(id,name,kwargs[name])
def task_set_property(redis,id,name,value):
redis.hset(id,name,value)
def task_delete_properties(redis,id,*args):
for name in args:
redis.hdel(id,name)
def task_delete_property(redis,id,name):
redis.hdel(id,name)
def task_get_properties(redis,id,*args):
return list(map(lambda name:redis.hdget(id,name),args))
def task_get_property(redis,id,name):
redis.hget(id,name)
def task_delete(redis,id):
for name in redis.hkeys(id):
redis.hdel(id,name)
redis.hdel(TASK_LIST_KEY,id)
| alexmilowski/python-hadoop-rest-api | pyox/apps/tracker/tasks.py | Python | apache-2.0 | 2,289 |
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
import hashlib
import unittest
from harvester import harvester
class CubeuploadTest(unittest.TestCase):
def setUp(self):
self.nick = "test"
self.chan = '#brotherBot'
self.mask = "brotherBox!~brotherBo@unaffiliated/brotherbox"
self.h = harvester.HarvesterBot
def test_fetch_cubeupload_share(self):
msg = "http://cubeupload.com/im/YhUxlj.jpg"
test_hash = "3c1a8ef650f3c3c3c2f4dd115931c0ca"
c = self.h._retrieve_content(self.h, self.mask, msg, self.chan)
md5 = hashlib.md5()
md5.update(c[0]['content'])
self.assertEqual(md5.hexdigest(), test_hash)
def test_fetch_cubeupload_raw(self):
msg = "http://i.cubeupload.com/YhUxlj.jpg"
test_hash = "3c1a8ef650f3c3c3c2f4dd115931c0ca"
c = self.h._retrieve_content(self.h, self.mask, msg, self.chan)
md5 = hashlib.md5()
md5.update(c[0]['content'])
self.assertEqual(md5.hexdigest(), test_hash)
if __name__ == '__main__':
unittest.main()
| luceatnobis/harvester | harvester/tests/test_cubeupload.py | Python | unlicense | 1,072 |
# -*- coding: utf-8 -*-
"""
End-to-end tests for Student's Profile Page.
"""
from contextlib import contextmanager
from datetime import datetime
from bok_choy.web_app_test import WebAppTest
from nose.plugins.attrib import attr
from ...pages.common.logout import LogoutPage
from ...pages.lms.account_settings import AccountSettingsPage
from ...pages.lms.auto_auth import AutoAuthPage
from ...pages.lms.learner_profile import LearnerProfilePage
from ...pages.lms.dashboard import DashboardPage
from ..helpers import EventsTestMixin
class LearnerProfileTestMixin(EventsTestMixin):
"""
Mixin with helper methods for testing learner profile pages.
"""
PRIVACY_PUBLIC = u'all_users'
PRIVACY_PRIVATE = u'private'
PUBLIC_PROFILE_FIELDS = ['username', 'country', 'language_proficiencies', 'bio']
PRIVATE_PROFILE_FIELDS = ['username']
PUBLIC_PROFILE_EDITABLE_FIELDS = ['country', 'language_proficiencies', 'bio']
USER_SETTINGS_CHANGED_EVENT_NAME = u"edx.user.settings.changed"
def log_in_as_unique_user(self):
"""
Create a unique user and return the account's username and id.
"""
username = "test_{uuid}".format(uuid=self.unique_id[0:6])
auto_auth_page = AutoAuthPage(self.browser, username=username).visit()
user_id = auto_auth_page.get_user_id()
return username, user_id
def set_public_profile_fields_data(self, profile_page):
"""
Fill in the public profile fields of a user.
"""
profile_page.value_for_dropdown_field('language_proficiencies', 'English')
profile_page.value_for_dropdown_field('country', 'United Arab Emirates')
profile_page.set_value_for_textarea_field('bio', 'Nothing Special')
def visit_profile_page(self, username, privacy=None):
"""
Visit a user's profile page and if a privacy is specified and
is different from the displayed value, then set the privacy to that value.
"""
profile_page = LearnerProfilePage(self.browser, username)
# Change the privacy if requested by loading the page and
# changing the drop down
if privacy is not None:
profile_page.visit()
# Change the privacy setting if it is not the desired one already
profile_page.privacy = privacy
# Verify the current setting is as expected
if privacy == self.PRIVACY_PUBLIC:
self.assertEqual(profile_page.privacy, 'all_users')
else:
self.assertEqual(profile_page.privacy, 'private')
if privacy == self.PRIVACY_PUBLIC:
self.set_public_profile_fields_data(profile_page)
# Reset event tracking so that the tests only see events from
# loading the profile page.
self.start_time = datetime.now() # pylint: disable=attribute-defined-outside-init
# Load the page
profile_page.visit()
return profile_page
def set_birth_year(self, birth_year):
"""
Set birth year for the current user to the specified value.
"""
account_settings_page = AccountSettingsPage(self.browser)
account_settings_page.visit()
account_settings_page.wait_for_page()
self.assertEqual(
account_settings_page.value_for_dropdown_field('year_of_birth', str(birth_year)),
str(birth_year)
)
def verify_profile_page_is_public(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently public.
"""
self.assertEqual(profile_page.visible_fields, self.PUBLIC_PROFILE_FIELDS)
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.editable_fields, self.PUBLIC_PROFILE_EDITABLE_FIELDS)
else:
self.assertEqual(profile_page.editable_fields, [])
def verify_profile_page_is_private(self, profile_page, is_editable=True):
"""
Verify that the profile page is currently private.
"""
if is_editable:
self.assertTrue(profile_page.privacy_field_visible)
self.assertEqual(profile_page.visible_fields, self.PRIVATE_PROFILE_FIELDS)
def verify_profile_page_view_event(self, requesting_username, profile_user_id, visibility=None):
"""
Verifies that the correct view event was captured for the profile page.
"""
actual_events = self.wait_for_events(
start_time=self.start_time,
event_filter={'event_type': 'edx.user.settings.viewed', 'username': requesting_username},
number_of_matches=1)
self.assert_events_match(
[
{
'username': requesting_username,
'event': {
'user_id': int(profile_user_id),
'page': 'profile',
'visibility': unicode(visibility)
}
}
],
actual_events
)
@contextmanager
def verify_pref_change_event_during(self, username, user_id, setting, **kwargs):
"""Assert that a single setting changed event is emitted for the user_api_userpreference table."""
expected_event = {
'username': username,
'event': {
'setting': setting,
'user_id': int(user_id),
'table': 'user_api_userpreference',
'truncated': []
}
}
expected_event['event'].update(kwargs)
event_filter = {
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'username': username,
}
with self.assert_events_match_during(event_filter=event_filter, expected_events=[expected_event]):
yield
def initialize_different_user(self, privacy=None, birth_year=None):
"""
Initialize the profile page for a different test user
"""
username, user_id = self.log_in_as_unique_user()
# Set the privacy for the new user
if privacy is None:
privacy = self.PRIVACY_PUBLIC
self.visit_profile_page(username, privacy=privacy)
# Set the user's year of birth
if birth_year:
self.set_birth_year(birth_year)
# Log the user out
LogoutPage(self.browser).visit()
return username, user_id
@attr('shard_4')
class OwnLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify a student's own profile page.
"""
def verify_profile_forced_private_message(self, username, birth_year, message=None):
"""
Verify age limit messages for a user.
"""
if birth_year is None:
birth_year = ""
self.set_birth_year(birth_year=birth_year)
profile_page = self.visit_profile_page(username)
self.assertTrue(profile_page.privacy_field_visible)
if message:
self.assertTrue(profile_page.age_limit_message_present)
else:
self.assertFalse(profile_page.age_limit_message_present)
self.assertIn(message, profile_page.profile_forced_private_message)
def test_profile_defaults_to_public(self):
"""
Scenario: Verify that a new user's profile defaults to public.
Given that I am a new user.
When I go to my profile page.
Then I see that the profile visibility is set to public.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
self.verify_profile_page_is_public(profile_page)
def assert_default_image_has_public_access(self, profile_page):
"""
Assert that profile image has public access.
"""
self.assertTrue(profile_page.profile_has_default_image)
self.assertTrue(profile_page.profile_has_image_with_public_access())
def test_make_profile_public(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my private profile page
And I set the profile visibility to public
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as public
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=self.PRIVACY_PRIVATE, new=self.PRIVACY_PUBLIC
):
profile_page.privacy = self.PRIVACY_PUBLIC
# Reload the page and verify that the profile is now public
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_public(profile_page)
def test_make_profile_private(self):
"""
Scenario: Verify that the user can change their privacy.
Given that I am a registered user
And I visit my public profile page
And I set the profile visibility to private
Then a user preference changed event should be recorded
When I reload the page
Then the profile visibility should be shown as private
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
with self.verify_pref_change_event_during(
username, user_id, 'account_privacy', old=None, new=self.PRIVACY_PRIVATE
):
profile_page.privacy = self.PRIVACY_PRIVATE
# Reload the page and verify that the profile is now private
self.browser.refresh()
profile_page.wait_for_page()
self.verify_profile_page_is_private(profile_page)
def test_dashboard_learner_profile_link(self):
"""
Scenario: Verify that my profile link is present on dashboard page and we can navigate to correct page.
Given that I am a registered user.
When I go to Dashboard page.
And I click on username dropdown.
Then I see Profile link in the dropdown menu.
When I click on Profile link.
Then I will be navigated to Profile page.
"""
username, user_id = self.log_in_as_unique_user()
dashboard_page = DashboardPage(self.browser)
dashboard_page.visit()
dashboard_page.click_username_dropdown()
self.assertIn('Profile', dashboard_page.username_dropdown_link_text)
dashboard_page.click_my_profile_link()
my_profile_page = LearnerProfilePage(self.browser, username)
my_profile_page.wait_for_page()
def test_fields_on_my_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own private profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to private.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_page_is_private(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_fields_on_my_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at her own public profile.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public.
And I reload the page.
Then I should see the profile visibility selector dropdown.
Then I see all the profile fields are shown.
And `location`, `language` and `about me` fields are editable.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.verify_profile_page_is_public(profile_page)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PUBLIC)
def _test_dropdown_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a dropdown field.
"""
profile_page.value_for_dropdown_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def _test_textarea_field(self, profile_page, field_id, new_value, displayed_value, mode):
"""
Test behaviour of a textarea field.
"""
profile_page.set_value_for_textarea_field(field_id, new_value)
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
self.browser.refresh()
profile_page.wait_for_page()
self.assertEqual(profile_page.get_non_editable_mode_value(field_id), displayed_value)
self.assertTrue(profile_page.mode_for_field(field_id), mode)
def test_country_field(self):
"""
Test behaviour of `Country` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set country value to `Pakistan`.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I reload the page.
Then displayed country should be `Pakistan` and country field mode should be `display`
And I make `country` field editable
Then `country` field mode should be `edit`
And `country` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'country', 'Pakistan', 'Pakistan', 'display')
profile_page.make_field_editable('country')
self.assertEqual(profile_page.mode_for_field('country'), 'edit')
self.assertTrue(profile_page.field_icon_present('country'))
def test_language_field(self):
"""
Test behaviour of `Language` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set language value to `Urdu`.
Then displayed language should be `Urdu` and language field mode should be `display`
And I reload the page.
Then displayed language should be `Urdu` and language field mode should be `display`
Then I set empty value for language.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I reload the page.
Then displayed language should be `Add language` and language field mode should be `placeholder`
And I make `language` field editable
Then `language` field mode should be `edit`
And `language` field icon should be visible.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_dropdown_field(profile_page, 'language_proficiencies', 'Urdu', 'Urdu', 'display')
self._test_dropdown_field(profile_page, 'language_proficiencies', '', 'Add language', 'placeholder')
profile_page.make_field_editable('language_proficiencies')
self.assertTrue(profile_page.mode_for_field('language_proficiencies'), 'edit')
self.assertTrue(profile_page.field_icon_present('language_proficiencies'))
def test_about_me_field(self):
"""
Test behaviour of `About Me` field.
Given that I am a registered user.
And I visit my Profile page.
And I set the profile visibility to public and set default values for public fields.
Then I set about me value to `ThisIsIt`.
Then displayed about me should be `ThisIsIt` and about me field mode should be `display`
And I reload the page.
Then displayed about me should be `ThisIsIt` and about me field mode should be `display`
Then I set empty value for about me.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I reload the page.
Then displayed about me should be `Tell other edX learners a little about yourself: where you live,
what your interests are, why you're taking courses on edX, or what you hope to learn.` and about me
field mode should be `placeholder`
And I make `about me` field editable
Then `about me` field mode should be `edit`
"""
placeholder_value = (
"Tell other learners a little about yourself: where you live, what your interests are, "
"why you're taking courses, or what you hope to learn."
)
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self._test_textarea_field(profile_page, 'bio', 'ThisIsIt', 'ThisIsIt', 'display')
self._test_textarea_field(profile_page, 'bio', '', placeholder_value, 'placeholder')
profile_page.make_field_editable('bio')
self.assertTrue(profile_page.mode_for_field('bio'), 'edit')
def test_birth_year_not_set(self):
"""
Verify message if birth year is not set.
Given that I am a registered user.
And birth year is not set for the user.
And I visit my profile page.
Then I should see a message that the profile is private until the year of birth is set.
"""
username, user_id = self.log_in_as_unique_user()
message = "You must specify your birth year before you can share your full profile."
self.verify_profile_forced_private_message(username, birth_year=None, message=message)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_is_under_age(self):
"""
Verify message if user is under age.
Given that I am a registered user.
And birth year is set so that age is less than 13.
And I visit my profile page.
Then I should see a message that the profile is private as I am under thirteen.
"""
username, user_id = self.log_in_as_unique_user()
under_age_birth_year = datetime.now().year - 10
self.verify_profile_forced_private_message(
username,
birth_year=under_age_birth_year,
message='You must be over 13 to share a full profile.'
)
self.verify_profile_page_view_event(username, user_id, visibility=self.PRIVACY_PRIVATE)
def test_user_can_only_see_default_image_for_private_profile(self):
"""
Scenario: Default profile image behaves correctly for under age user.
Given that I am on my profile page with private access
And I can see default image
When I move my cursor to the image
Then i cannot see the upload/remove image text
And i cannot upload/remove the image.
"""
year_of_birth = datetime.now().year - 5
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PRIVATE)
self.verify_profile_forced_private_message(
username,
year_of_birth,
message='You must be over 13 to share a full profile.'
)
self.assertTrue(profile_page.profile_has_default_image)
self.assertFalse(profile_page.profile_has_image_with_private_access())
def test_user_can_see_default_image_for_public_profile(self):
"""
Scenario: Default profile image behaves correctly for public profile.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
And i am able to upload new image
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
def test_user_can_upload_the_profile_image_with_success(self):
"""
Scenario: Upload profile image works correctly.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new image via file uploader
Then i can see the changed image
And i can also see the latest image after reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
profile_page.visit()
self.assertTrue(profile_page.image_upload_success)
def test_user_can_see_error_for_exceeding_max_file_size_limit(self):
"""
Scenario: Upload profile image does not work for > 1MB image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new > 1MB image via file uploader
Then i can see the error message for file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='larger_image.jpg')
self.assertEqual(profile_page.profile_image_message, "The file must be smaller than 1 MB in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_file_size_below_the_min_limit(self):
"""
Scenario: Upload profile image does not work for < 100 Bytes image file.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new < 100 Bytes image via file uploader
Then i can see the error message for minimum file size limit
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='list-icon-visited.png')
self.assertEqual(profile_page.profile_image_message, "The file must be at least 100 bytes in size.")
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_see_error_for_wrong_file_type(self):
"""
Scenario: Upload profile image does not work for wrong file types.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i upload new csv file via file uploader
Then i can see the error message for wrong/unsupported file type
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
profile_page.upload_file(filename='generic_csv.csv')
self.assertEqual(
profile_page.profile_image_message,
"The file must be one of the following types: .gif, .png, .jpeg, .jpg."
)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
self.assert_no_matching_events_were_emitted({
'event_type': self.USER_SETTINGS_CHANGED_EVENT_NAME,
'event': {
'setting': 'profile_image_uploaded_at',
'user_id': int(user_id),
}
})
def test_user_can_remove_profile_image(self):
"""
Scenario: Remove profile image works correctly.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see the upload/remove image text
When i click on the remove image link
Then i can see the default image
And i can still see the default image after page reload.
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
self.assertTrue(profile_page.remove_profile_image())
self.assertTrue(profile_page.profile_has_default_image)
profile_page.visit()
self.assertTrue(profile_page.profile_has_default_image)
def test_user_cannot_remove_default_image(self):
"""
Scenario: Remove profile image does not works for default images.
Given that I am on my profile page with public access
And I can see default image
When I move my cursor to the image
Then i can see only the upload image text
And i cannot see the remove image text
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
self.assertFalse(profile_page.remove_link_present)
def test_eventing_after_multiple_uploads(self):
"""
Scenario: An event is fired when a user with a profile image uploads another image
Given that I am on my profile page with public access
And I upload a new image via file uploader
When I upload another image via the file uploader
Then two upload events have been emitted
"""
username, user_id = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username, privacy=self.PRIVACY_PUBLIC)
self.assert_default_image_has_public_access(profile_page)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg')
self.assertTrue(profile_page.image_upload_success)
with self.verify_pref_change_event_during(
username, user_id, 'profile_image_uploaded_at', table='auth_userprofile'
):
profile_page.upload_file(filename='image.jpg', wait_for_upload_button=False)
@attr('shard_4')
class DifferentUserLearnerProfilePageTest(LearnerProfileTestMixin, WebAppTest):
"""
Tests that verify viewing the profile page of a different user.
"""
def test_different_user_private_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's private profile.
Given that I am a registered user.
And I visit a different user's private profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see some of the profile fields are shown.
"""
different_username, different_user_id = self.initialize_different_user(privacy=self.PRIVACY_PRIVATE)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
def test_different_user_under_age(self):
"""
Scenario: Verify that an under age user's profile is private to others.
Given that I am a registered user.
And I visit an under age user's profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then I see that only the private fields are shown.
"""
under_age_birth_year = datetime.now().year - 10
different_username, different_user_id = self.initialize_different_user(
privacy=self.PRIVACY_PUBLIC,
birth_year=under_age_birth_year
)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
self.verify_profile_page_is_private(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PRIVATE)
def test_different_user_public_profile(self):
"""
Scenario: Verify that desired fields are shown when looking at a different user's public profile.
Given that I am a registered user.
And I visit a different user's public profile page.
Then I shouldn't see the profile visibility selector dropdown.
Then all the profile fields are shown.
Then I shouldn't see the profile visibility selector dropdown.
Also `location`, `language` and `about me` fields are not editable.
"""
different_username, different_user_id = self.initialize_different_user(privacy=self.PRIVACY_PUBLIC)
username, __ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.wait_for_public_fields()
self.verify_profile_page_is_public(profile_page, is_editable=False)
self.verify_profile_page_view_event(username, different_user_id, visibility=self.PRIVACY_PUBLIC)
@attr('a11y')
class LearnerProfileA11yTest(LearnerProfileTestMixin, WebAppTest):
"""
Class to test learner profile accessibility.
"""
def test_editable_learner_profile_a11y(self):
"""
Test the accessibility of the editable version of the profile page
(user viewing her own public profile).
"""
username, _ = self.log_in_as_unique_user()
profile_page = self.visit_profile_page(username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'skip-link', # TODO: AC-179
'link-href', # TODO: AC-231
],
})
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('language_proficiencies')
profile_page.a11y_audit.check_for_accessibility_errors()
profile_page.make_field_editable('bio')
profile_page.a11y_audit.check_for_accessibility_errors()
def test_read_only_learner_profile_a11y(self):
"""
Test the accessibility of the read-only version of a public profile page
(user viewing someone else's profile page).
"""
# initialize_different_user should cause country, language, and bio to be filled out (since
# privacy is public). It doesn't appear that this is happening, although the method
# works in regular bokchoy tests. Perhaps a problem with phantomjs? So this test is currently
# only looking at a read-only profile page with a username.
different_username, _ = self.initialize_different_user(privacy=self.PRIVACY_PUBLIC)
self.log_in_as_unique_user()
profile_page = self.visit_profile_page(different_username)
profile_page.a11y_audit.config.set_rules({
"ignore": [
'skip-link', # TODO: AC-179
'link-href', # TODO: AC-231
],
})
profile_page.a11y_audit.check_for_accessibility_errors()
| alu042/edx-platform | common/test/acceptance/tests/lms/test_learner_profile.py | Python | agpl-3.0 | 34,219 |
#!/usr/bin/env python
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Written by Henry 'Pi' James, Loring Holden and Matt Chisholm
app_name = "BitTorrent"
from BTL.translation import _
import time
from sys import *
from os.path import *
from sha import *
from BTL.bencode import *
from BitTorrent import version
NAME, EXT = splitext(basename(argv[0]))
print _("%s %s - decode %s metainfo files") % (NAME, version, app_name)
print
if len(argv) == 1:
print _("Usage: %s [TORRENTFILE [TORRENTFILE ... ] ]") % basename(argv[0])
print
exit(2) # common exit code for syntax error
labels = {'metafile' : _("metainfo file: %s" ),
'infohash' : _("info hash: %s" ),
'filename' : _("file name: %s" ),
'filesize' : _("file size:" ),
'files' : _("files:" ),
'title' : _("title: %s" ),
'dirname' : _("directory name: %s" ),
'creation date' : _("creation date: %s" ),
'archive' : _("archive size:" ),
'announce' : _("tracker announce url: %s"),
'announce-list' : _("tracker announce list: %s"),
'nodes' : _("trackerless nodes:" ),
'comment' : _("comment:" ),
'content_type' : _("content_type: %s" ),
'url-list' : _("url sources: %s" ),
}
maxlength = max( [len(v[:v.find(':')]) for v in labels.values()] )
# run through l10n-ed labels and make them all the same length
for k,v in labels.items():
if ':' in v:
index = v.index(':')
newlabel = v.replace(':', '.'*(maxlength-index) + ':')
labels[k] = newlabel
for metainfo_name in argv[1:]:
metainfo_file = open(metainfo_name, 'rb')
metainfo = bdecode(metainfo_file.read())
metainfo_file.close()
info = metainfo['info']
info_hash = sha(bencode(info))
if metainfo.has_key('title'):
print labels['title'] % metainfo['title']
print labels['metafile'] % basename(metainfo_name)
print labels['infohash'] % info_hash.hexdigest()
piece_length = info['piece length']
if info.has_key('length'):
# let's assume we just have a file
print labels['filename'] % info['name']
file_length = info['length']
name = labels['filesize']
if info.has_key('content_type'):
print labels['content_type'] % info['content_type']
else:
# let's assume we have a directory structure
print labels['dirname'] % info['name']
print labels['files']
file_length = 0;
for file in info['files']:
path = ''
for item in file['path']:
if (path != ''):
path = path + "/"
path = path + item
if file.has_key('content_type'):
print ' %s (%d,%s)' % (path, file['length'],
file['content_type'])
else:
print ' %s (%d)' % (path, file['length'])
file_length += file['length']
name = labels['archive']
piece_number, last_piece_length = divmod(file_length, piece_length)
print '%s %i (%i * %i + %i)' \
% (name,file_length, piece_number, piece_length, last_piece_length)
if metainfo.has_key('announce'):
print labels['announce'] % metainfo['announce']
if 'announce-list' in metainfo:
print labels['announce-list'] % metainfo['announce-list']
if metainfo.has_key('nodes'):
print labels['nodes']
for n in metainfo['nodes']:
print '\t%s\t:%d' % (n[0], n[1])
if metainfo.has_key('comment'):
print labels['comment'], metainfo['comment']
else:
print labels['comment']
if metainfo.has_key('url-list'):
print labels['url-list'] % '\n'.join(metainfo['url-list'])
if metainfo.has_key('creation date'):
fmt = "%a, %d %b %Y %H:%M:%S"
gm = time.gmtime(metainfo['creation date'])
s = time.strftime(fmt, gm)
print labels['creation date'] % s
# DANGER: modifies torrent file
if False:
metainfo_file = open(metainfo_name, 'wb')
metainfo_file.write(bencode(metainfo))
metainfo_file.close() | kenorb/BitTorrent | torrentinfo-console.py | Python | gpl-3.0 | 4,948 |
"""The tests for the MQTT subscription component."""
from unittest import mock
from homeassistant.components.mqtt.subscription import (
async_subscribe_topics,
async_unsubscribe_topics,
)
from homeassistant.core import callback
from tests.common import async_fire_mqtt_message, async_mock_mqtt_component
async def test_subscribe_topics(hass, mqtt_mock, caplog):
"""Test subscription to topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload1")
assert len(calls1) == 1
assert calls1[0][0].topic == "test-topic1"
assert calls1[0][0].payload == "test-payload1"
assert len(calls2) == 0
async_fire_mqtt_message(hass, "test-topic2", "test-payload2")
assert len(calls1) == 1
assert len(calls2) == 1
assert calls2[0][0].topic == "test-topic2"
assert calls2[0][0].payload == "test-payload2"
await async_unsubscribe_topics(hass, sub_state)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async def test_modify_topics(hass, mqtt_mock, caplog):
"""Test modification of topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 0
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1_1", "msg_callback": record_calls1}},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async_fire_mqtt_message(hass, "test-topic1_1", "test-payload")
assert len(calls1) == 2
assert calls1[1][0].topic == "test-topic1_1"
assert calls1[1][0].payload == "test-payload"
assert len(calls2) == 1
await async_unsubscribe_topics(hass, sub_state)
async_fire_mqtt_message(hass, "test-topic1_1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 2
assert len(calls2) == 1
async def test_qos_encoding_default(hass, mqtt_mock, caplog):
"""Test default qos and encoding."""
mock_mqtt = await async_mock_mqtt_component(hass)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
mock_mqtt.async_subscribe.assert_called_once_with(
"test-topic1", mock.ANY, 0, "utf-8"
)
async def test_qos_encoding_custom(hass, mqtt_mock, caplog):
"""Test custom qos and encoding."""
mock_mqtt = await async_mock_mqtt_component(hass)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {
"topic": "test-topic1",
"msg_callback": msg_callback,
"qos": 1,
"encoding": "utf-16",
}
},
)
mock_mqtt.async_subscribe.assert_called_once_with(
"test-topic1", mock.ANY, 1, "utf-16"
)
async def test_no_change(hass, mqtt_mock, caplog):
"""Test subscription to topics without change."""
mock_mqtt = await async_mock_mqtt_component(hass)
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
call_count = mock_mqtt.async_subscribe.call_count
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
assert call_count == mock_mqtt.async_subscribe.call_count
| fbradyirl/home-assistant | tests/components/mqtt/test_subscription.py | Python | apache-2.0 | 5,237 |
#!/usr/bin/env python
# This file is copied from Printator.
#
# Printator is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printator is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printator. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import argparse
import time
import traceback
import math
import numpy
import threading
from Queue import Queue
import serial
import subprocess
import tempfile
import socket
import wx
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "Printrun"))
from printrun import gui # NOQA
from printrun import gcoder
from printrun import gcview
from printrun.gl.libtatlin import actors
com_timeout = 0.25
serial_baudrate = 115200
socket_host = "127.0.0.1"
socket_port = 12421
build_dimensions = [200, 200, 100, 0, 0, 0, 0, 0, 0]
parser = argparse.ArgumentParser(description = "3D printer simulator", add_help = False)
parser.add_argument('--help', help = "Show this help message and quit", action = "store_true")
parser.add_argument('-f', '--fast', help = "Process commands without reallistically waiting", action = "store_true")
parser.add_argument('--speed', type = float, help = "Speed factor (> 1.0 is faster)", default = 1.0)
parser.add_argument('-d', '--debug', help = "Display debug messages", action = "store_true")
parser.add_argument('-s', '--serial', help = "Simulator serial port")
parser.add_argument('-n', '--network', help = "Use networking instead of serial communications", action = "store_true")
parser.add_argument('-h', '--host', help = "Host to bind", default = str(socket_host))
parser.add_argument('-p', '--port', help = "Port to bind", default = str(socket_port))
args = parser.parse_args()
if args.help:
parser.print_help()
raise SystemExit
use_serial = not args.network
debug_mode = args.debug
fast_mode = args.fast
serial_port = args.serial
speed_factor = max(args.speed, 0.001)
class MoveUpdater(threading.Thread):
def __init__(self, parent, gline, totalduration, orig, vec):
super(MoveUpdater, self).__init__()
self.parent = parent
self.gline = gline
self.totalduration = totalduration
self.orig = numpy.array(orig)
self.vec = numpy.array(vec)
def run(self):
starttime = time.time()
timestep = 0.05
orig = self.orig
vec = self.vec
wx.CallAfter(self.parent.add_glmove, self.gline, *(list(orig) + list(orig)))
while True:
prop = (time.time() - starttime) / self.totalduration
if prop > 0.99: break
wx.CallAfter(self.parent.update_glmove, self.gline, *list(orig + prop * vec))
time.sleep(timestep)
wx.CallAfter(self.parent.update_glmove, self.gline, *list(orig + vec))
class PrinterSimulator(object):
cur_x = 0
cur_y = 0
cur_z = 0
cur_e = 0
cur_f = 100
acceleration = 1500.0 # mm/s/s ASSUMING THE DEFAULT FROM SPRINTER !!!!
xy_homing_feedrate = 50.
z_homing_feedrate = 4.
def __init__(self, path, port, gline_cb = None, debug = False, server = None):
self.path = path
self.port = port
self.stop_threads = False
self.read_thread = None
self.process_thread = None
self.gcoder = None
self.gline_cb = None
self.debug = debug
self.server = server
self.command_buffer = Queue(20 if not fast_mode else 400)
self.glframe = None
self.sd_upload = False
def log(self, message):
if self.debug: print "???", message
def start(self, frame):
self.gcoder = gcoder.GCode([])
self.glframe = frame
self.init_glmodel()
self.init_glhead()
self.stop_threads = False
self.read_thread = threading.Thread(target = self.reader)
self.read_thread.start()
self.process_thread = threading.Thread(target = self.processor)
self.process_thread.start()
def stop(self):
self.command_buffer.put(None)
self.stop_threads = True
if self.read_thread:
self.read_thread.join()
self.read_thread = None
if self.process_thread:
self.process_thread.join()
self.process_thread = None
def compute_duration(self, x, y, z, f):
currenttravel = math.hypot(x - self.cur_x, y - self.cur_y)
if currenttravel > 0:
distance = 2 * abs(((self.cur_f + f) * (f - self.cur_f) * 0.5) / self.acceleration) # multiply by 2 because we have to accelerate and decelerate
if distance <= currenttravel and self.cur_f + f != 0 and f != 0:
# Unsure about this formula -- iXce reviewing this code
moveduration = 2 * distance / (self.cur_f + f)
currenttravel -= distance
moveduration += currenttravel / f
else:
moveduration = math.sqrt(2 * distance / self.acceleration) # probably buggy : not taking actual travel into account
else:
moveduration = 0
if z != self.cur_z:
distance = abs(self.cur_z - z)
moveduration += distance / f
return moveduration / speed_factor
def process_gline_nong(self, gline):
# These unbuffered commands must be acked manually
if gline.command == "M114":
self.write("ok X:%.02fY:%.02fZ:%.02fE:%.02f Count:" % (self.cur_x, self.cur_y, self.cur_z, self.cur_e))
elif gline.command == "M105":
self.write("ok T:100.0/225.0 B:98.0 /110.0 T0:228.0/220.0 T1:150.0/185")
elif gline.command == "M115":
self.write("ok PROTOCOL_VERSION:0.1 FIRMWARE_NAME:FiveD FIRMWARE_URL:http%3A//reprap.org MACHINE_TYPE:Mendel EXTRUDER_COUNT:3")
elif gline.command == "M190":
time.sleep(10)
self.write("ok")
elif gline.command == "M28":
self.sd_upload = True
self.write("ok Writing to file")
elif gline.command == "M29":
self.sd_upload = False
self.write("ok")
else:
self.write("ok")
if self.gline_cb:
self.gline_cb(gline)
def process_gline(self, gline):
if not gline.command.startswith("G"): # unbuffered
return self.process_gline_nong(gline)
if self.sd_upload:
return
line_duration = 0
timer = None
if gline.is_move:
new_x = self.cur_x
new_y = self.cur_y
new_z = self.cur_z
new_e = self.cur_e
new_f = self.cur_f
if gline.relative:
if gline.x is not None: new_x += gline.x
if gline.y is not None: new_y += gline.y
if gline.z is not None: new_z += gline.z
else:
if gline.x is not None: new_x = gline.x
if gline.y is not None: new_y = gline.y
if gline.z is not None: new_z = gline.z
if gline.e is not None:
if gline.relative_e:
new_e += gline.e
else:
new_e = gline.e
if gline.f is not None: new_f = gline.f / 60.0
line_duration = self.compute_duration(new_x, new_y, new_z, new_f)
if not fast_mode and line_duration > 0.5:
vec = (new_x - self.cur_x, new_y - self.cur_y, new_z - self.cur_z)
timer = MoveUpdater(self, gline, line_duration, (self.cur_x, self.cur_y, self.cur_z), vec)
else:
wx.CallAfter(self.add_glmove, gline, self.cur_x, self.cur_y, self.cur_z, new_x, new_y, new_z)
self.cur_x = new_x
self.cur_y = new_y
self.cur_z = new_z
self.cur_e = new_e
self.cur_f = new_f
elif gline.command == "G4":
line_duration = gcoder.P(gline)
elif gline.command == "G28":
new_x = 0 if "X" in gline.raw else self.cur_x
new_y = 0 if "Y" in gline.raw else self.cur_y
new_z = 0 if "Z" in gline.raw else self.cur_z
line_duration = self.compute_duration(new_x, new_y, self.cur_z, self.xy_homing_feedrate)
line_duration += self.compute_duration(self.cur_x, self.cur_y, new_z, self.z_homing_feedrate)
self.cur_x = new_x
self.cur_y = new_y
self.cur_z = new_z
wx.CallAfter(self.move_head, gline, self.cur_x, self.cur_y, self.cur_z)
elif gline.command == "G92":
if gline.x is not None: self.cur_x = gline.x
if gline.y is not None: self.cur_y = gline.y
if gline.z is not None: self.cur_z = gline.z
if gline.e is not None: self.cur_e = gline.e
wx.CallAfter(self.move_head, gline, self.cur_x, self.cur_y, self.cur_z)
if not fast_mode and line_duration and line_duration > 0:
self.log("sleeping for %ss" % line_duration)
if timer: timer.start()
time.sleep(line_duration)
if timer: timer.join()
if self.gline_cb:
self.gline_cb(gline)
def processor(self):
while not self.stop_threads or not self.command_buffer.empty():
gline = self.command_buffer.get()
if gline is None:
self.command_buffer.task_done()
return
try:
self.process_gline(gline)
except:
print "Exception caught while processing command %s" % gline.raw
traceback.print_exc()
self.command_buffer.task_done()
def write(self, data):
if self.debug: print ">>>", data
try:
self.port.write(data + "\n")
self.port.flush()
except socket.error:
pass # Don't do anything : reader thread will pick it up
def reader(self):
print "Simulator listening on %s" % self.path
while not self.stop_threads:
if not self.port and self.server:
try:
self.conn, self.remote_addr = self.server.accept()
print "TCP connection from %s:%s" % self.remote_addr
self.conn.settimeout(com_timeout)
self.port = self.conn.makefile()
except socket.timeout:
continue
try:
line = self.port.readline()
except socket.timeout:
continue
except socket.error:
line = ""
if self.server and not line: # empty line returned from the socket: this is EOF
print "Lost connection from %s:%s" % self.remote_addr
self.port = None
continue
line = line.strip()
if not line:
continue
if self.debug: print "<<<", line
try:
gline = self.gcoder.append(line)
if not gline.command.startswith("G"): # unbuffered (okai, all G-commands are not buffered, but that's a light move from reality)
while not self.command_buffer.empty():
time.sleep(0.05)
self.command_buffer.put(gline)
while not self.command_buffer.empty():
time.sleep(0.05)
else: # buffered
self.command_buffer.put(gline)
self.write("ok")
except ValueError:
# Failed to parse the G-Code, probably a custom command
self.write("ok")
def init_glmodel(self):
self.glmodel = actors.GcodeModelLight()
generator = self.glmodel.load_data(self.gcoder)
generator_output = generator.next()
while generator_output is not None:
generator_output = generator.next()
self.glmodel.nvertices = 0
self.glmodel.layer_stops[-1] = self.glmodel.nvertices
self.glmodel.num_layers_to_draw = self.glmodel.max_layers + 1
self.glmodel.use_vbos = False
self.glmodel.loaded = True
self.glmodel.initialized = False
self.glframe.objects[-1].model = self.glmodel
self.refresh_timer = wx.CallLater(100, self.glframe.Refresh)
def init_glhead(self):
self.printhead = gcview.GCObject(actors.PrintHead())
self.glframe.objects.insert(1, self.printhead)
def move_head(self, gline, cur_x, cur_y, cur_z):
self.printhead.offsets[0] = cur_x
self.printhead.offsets[1] = cur_y
self.printhead.offsets[2] = cur_z
if not self.refresh_timer.IsRunning():
self.refresh_timer.Start()
def add_glmove(self, gline, prev_x, prev_y, prev_z, cur_x, cur_y, cur_z):
if self.glmodel.nvertices + 2 > len(self.glmodel.vertices):
self.glmodel.colors.resize((2 * len(self.glmodel.vertices) + 2, 4))
self.glmodel.vertices.resize((2 * len(self.glmodel.vertices) + 2, 3))
self.glmodel.vertices[self.glmodel.nvertices] = (prev_x, prev_y, prev_z)
self.glmodel.vertices[self.glmodel.nvertices + 1] = (cur_x, cur_y, cur_z)
color = self.glmodel.movement_color(gline)
self.glmodel.colors[self.glmodel.nvertices] = color
self.glmodel.colors[self.glmodel.nvertices + 1] = color
self.glmodel.nvertices += 2
self.glmodel.layer_stops[-1] = self.glmodel.nvertices
self.glmodel.initialized = False
self.move_head(gline, cur_x, cur_y, cur_z)
def update_glmove(self, gline, cur_x, cur_y, cur_z):
self.glmodel.vertices[self.glmodel.nvertices - 1] = (cur_x, cur_y, cur_z)
self.glmodel.initialized = False
self.move_head(gline, cur_x, cur_y, cur_z)
if use_serial:
privend = tempfile.mktemp(prefix = "simpriv_", dir = os.getcwd())
if serial_port:
pubend = serial_port
else:
pubend = tempfile.mktemp(prefix = "printer_", dir = os.getcwd())
socat_p = subprocess.Popen(["socat", "PTY,link=%s" % privend, "PTY,link=%s" % pubend])
while not os.path.exists(privend) or not os.path.exists(pubend):
time.sleep(0.1)
sport = serial.Serial(privend, baudrate = serial_baudrate, timeout = com_timeout)
simulator = PrinterSimulator(pubend, sport, debug = debug_mode)
else:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((socket_host, socket_port))
sock.settimeout(com_timeout)
sock.listen(1)
simulator = PrinterSimulator("%s:%s" % (socket_host, socket_port), None, server = sock, debug = debug_mode)
app = wx.App(redirect = False)
frame = gcview.GcodeViewFrame(None, wx.ID_ANY, '3D printer simulator', size = (400, 400), build_dimensions = build_dimensions)
frame.Bind(wx.EVT_CLOSE, lambda event: frame.Destroy())
frame.Show(True)
simulator.start(frame)
app.MainLoop()
simulator.stop()
app.Destroy()
if use_serial:
socat_p.terminate()
| iXce/Printator | printator.py | Python | gpl-3.0 | 15,417 |
# -*- coding: utf8 -*-
from django.db import models
from holonet_django.models import MailMapping
class TestRecipientModel(models.Model):
username = models.CharField(max_length=100)
email = models.EmailField()
class Mapping1(MailMapping):
recipients = models.ManyToManyField(TestRecipientModel)
invalid_test_field = 'Just a test value'
def get_recipients(self):
return self.recipients.all()
class Mapping2(MailMapping):
def get_recipients(self):
return None
| webkom/django-holonet | tests/models.py | Python | mit | 510 |
import os
from ehive.runnable.IGFBaseProcess import IGFBaseProcess
from igf_data.utils.config_genome_browser import Config_genome_browser
class BuildGenomeBrowserConfigForProject(IGFBaseProcess):
def param_defaults(self):
params_dict=super(BuildGenomeBrowserConfigForProject,self).param_defaults()
params_dict.update({
'ref_genome_type':'GENOME_TWOBIT_URI',
'collection_table':'experiment',
'use_ephemeral_space':0,
})
return params_dict
def run(self):
'''
'''
try:
project_igf_id = self.param_required('project_igf_id')
experiment_igf_id = self.param_required('experiment_igf_id')
sample_igf_id = self.param_required('sample_igf_id')
igf_session_class = self.param_required('igf_session_class')
collection_type_list = self.param_required('collection_type_list')
ref_genome_type = self.param('ref_genome_type')
collection_table = self.param('collection_table')
pipeline_name = self.param_required('pipeline_name')
species_name = self.param_required('species_name')
base_work_dir = self.param_required('base_work_dir')
template_file = self.param_required('template_file')
use_ephemeral_space = self.param('use_ephemeral_space')
work_dir_prefix = \
os.path.join(
base_work_dir,
project_igf_id,
sample_igf_id,
experiment_igf_id)
work_dir = \
self.get_job_work_dir(work_dir=work_dir_prefix) # get a run work dir
output_file = \
os.path.join(
work_dir,
os.path.basename(template_file)) # get output file name
cg = \
Config_genome_browser(
dbsession_class=igf_session_class,
project_igf_id=project_igf_id,
collection_type_list=collection_type_list,
pipeline_name=pipeline_name,
collection_table=collection_table,
species_name=species_name,
use_ephemeral_space=use_ephemeral_space,
ref_genome_type=ref_genome_type)
cg.build_biodalliance_config(
template_file=template_file,
output_file=output_file)
if os.path.exists(output_file):
self.param('dataflow_params',{'genome_browser_config':output_file}) # populate dataflow if the output file found
else:
self.param('dataflow_params',{'genome_browser_config':''}) # send empty string to dataflow
message = \
'Generated genome browser config for {0}: {1}'.\
format(
project_igf_id,
sample_igf_id)
self.post_message_to_slack(message,reaction='pass') # send log to slack
self.post_message_to_ms_team(
message=message,
reaction='pass')
except Exception as e:
message = \
'project: {2}, sample:{3}, Error in {0}: {1}'.format(
self.__class__.__name__,
e,
project_igf_id,
sample_igf_id)
self.warning(message)
self.post_message_to_slack(message,reaction='fail') # post msg to slack for failed jobs
self.post_message_to_ms_team(
message=message,
reaction='fail')
raise | imperial-genomics-facility/data-management-python | ehive/runnable/process/alignment/BuildGenomeBrowserConfigForProject.py | Python | apache-2.0 | 3,272 |
#!/bin/sh
"""": # -*-python-*-
bup_python="$(dirname "$0")/bup-python" || exit $?
exec "$bup_python" "$0" ${1+"$@"}
"""
# end of bup preamble
import sys, getopt, socket, subprocess, fcntl
from bup import options, path
from bup.helpers import *
optspec = """
bup daemon [options...] -- [bup-server options...]
--
l,listen ip address to listen on, defaults to *
p,port port to listen on, defaults to 1982
"""
o = options.Options(optspec, optfunc=getopt.getopt)
(opt, flags, extra) = o.parse(sys.argv[1:])
host = opt.listen
port = opt.port and int(opt.port) or 1982
import socket
import sys
socks = []
e = None
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
except socket.error as e:
continue
try:
if af == socket.AF_INET6:
log("bup daemon: listening on [%s]:%s\n" % sa[:2])
else:
log("bup daemon: listening on %s:%s\n" % sa[:2])
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(sa)
s.listen(1)
fcntl.fcntl(s.fileno(), fcntl.F_SETFD, fcntl.FD_CLOEXEC)
except socket.error as e:
s.close()
continue
socks.append(s)
if not socks:
log('bup daemon: listen socket: %s\n' % e.args[1])
sys.exit(1)
try:
while True:
[rl,wl,xl] = select.select(socks, [], [], 60)
for l in rl:
s, src = l.accept()
try:
log("Socket accepted connection from %s\n" % (src,))
fd1 = os.dup(s.fileno())
fd2 = os.dup(s.fileno())
s.close()
sp = subprocess.Popen([path.exe(), 'mux', '--',
path.exe(), 'server']
+ extra, stdin=fd1, stdout=fd2)
finally:
os.close(fd1)
os.close(fd2)
finally:
for l in socks:
l.shutdown(socket.SHUT_RDWR)
l.close()
debug1("bup daemon: done")
| tjanez/bup | cmd/daemon-cmd.py | Python | lgpl-2.1 | 2,128 |
from go.vumitools.tests.helpers import djangotest_imports
with djangotest_imports(globals()):
from django.core.urlresolvers import reverse
from go.apps.tests.view_helpers import AppViewsHelper
from go.base.tests.helpers import GoDjangoTestCase
class TestHttpApiNoStreamViews(GoDjangoTestCase):
def setUp(self):
self.app_helper = self.add_helper(AppViewsHelper(u'http_api_nostream'))
self.client = self.app_helper.get_client()
def test_show_stopped(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv")
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_show_running(self):
"""
Test showing the conversation
"""
conv_helper = self.app_helper.create_conversation_helper(
name=u"myconv", started=True)
response = self.client.get(conv_helper.get_view_url('show'))
self.assertContains(response, u"<h1>myconv</h1>")
def test_edit_view(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': 'http://messages/',
'http_api_nostream-push_event_url': 'http://events/',
'http_api_nostream-metric_store': 'foo_metric_store',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api_nostream': {
'push_event_url': 'http://events/',
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
'content_length_limit': None,
}
})
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, 'http://events/')
self.assertContains(response, 'http://messages/')
self.assertContains(response, 'foo_metric_store')
self.assertEqual(response.status_code, 200)
def test_edit_view_no_push_urls(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': '',
'http_api_nostream-push_event_url': '',
'http_api_nostream-metric_store': 'foo_metric_store',
})
self.assertEqual(
response.context['edit_forms'][0].errors, {
'push_message_url': [
u'This field is required unless messages are ignored.'],
'push_event_url': [
u'This field is required unless events are ignored.'],
})
def test_edit_view_ignore_messages(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': '',
'http_api_nostream-push_event_url': 'http://events/',
'http_api_nostream-metric_store': 'foo_metric_store',
'http_api_nostream-ignore_messages': 'on',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api_nostream': {
'push_event_url': 'http://events/',
'push_message_url': None,
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': True,
'content_length_limit': None,
}
})
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, 'foo_metric_store')
self.assertEqual(response.status_code, 200)
def test_edit_view_ignore_events(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': 'http://messages/',
'http_api_nostream-push_event_url': '',
'http_api_nostream-metric_store': 'foo_metric_store',
'http_api_nostream-ignore_events': 'on',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api_nostream': {
'push_event_url': None,
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': True,
'ignore_messages': False,
'content_length_limit': None,
}
})
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, 'foo_metric_store')
self.assertEqual(response.status_code, 200)
def test_edit_view_content_length_limit(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': 'http://messages/',
'http_api_nostream-push_event_url': 'http://events/',
'http_api_nostream-metric_store': 'foo_metric_store',
'http_api_nostream-content_length_limit': '160',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api_nostream': {
'push_event_url': 'http://events/',
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
'content_length_limit': 160,
}
})
# Now unset the limit
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertContains(response, '160')
self.assertEqual(response.status_code, 200)
response = self.client.post(conv_helper.get_view_url('edit'), {
'http_api_nostream-api_tokens': 'token',
'http_api_nostream-push_message_url': 'http://messages/',
'http_api_nostream-push_event_url': 'http://events/',
'http_api_nostream-metric_store': 'foo_metric_store',
'http_api_nostream-content_length_limit': '',
})
self.assertRedirects(response, conv_helper.get_view_url('show'))
reloaded_conv = conv_helper.get_conversation()
self.assertEqual(reloaded_conv.config, {
'http_api_nostream': {
'push_event_url': 'http://events/',
'push_message_url': 'http://messages/',
'api_tokens': ['token'],
'metric_store': 'foo_metric_store',
'ignore_events': False,
'ignore_messages': False,
'content_length_limit': None,
}
})
def test_get_edit_view_no_config(self):
conv_helper = self.app_helper.create_conversation_helper()
conversation = conv_helper.get_conversation()
self.assertEqual(conversation.config, {})
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
def test_get_edit_view_help(self):
conv_helper = self.app_helper.create_conversation_helper(config={
'http_api_nostream': {
'api_tokens': ['token1234'],
},
})
conversation = conv_helper.get_conversation()
response = self.client.get(conv_helper.get_view_url('edit'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "\n".join([
" $ curl -X PUT \\",
" --user 'test-0-user:token1234' \\",
" --data '{\"in_reply_to\": "
"\"59b37288d8d94e42ab804158bdbf53e5\", \\",
" \"to_addr\": \"27761234567\", \\",
" \"to_addr_type\": \"msisdn\", \\",
" \"content\": \"This is an outgoing SMS!\"}' \\",
" https://go.vumi.org/api/v1/go/http_api_nostream/%s/"
"messages.json \\",
" -vvv",
]) % conversation.key)
self.assertContains(response, "\n".join([
" $ curl -X PUT \\",
" --user 'test-0-user:token1234' \\",
" --data '[[\"total_pings\", 1200, \"MAX\"]]' \\",
" https://go.vumi.org/api/v1/go/http_api_nostream/%s/"
"metrics.json \\",
" -vvv",
]) % conversation.key)
self.assertContains(response, "\n".join([
" <li>The <em>username</em> is your Vumi Go account key",
" (<code>test-0-user</code>). It can be found at the",
" bottom of your <a href=\"%s\">Account",
" Details</a> page.",
" </li>",
]) % reverse('account:details'))
self.assertContains(response, "\n".join([
" <li>The <em>password</em> is the API token you specified for"
" this",
" conversation.",
" </li>",
]))
self.assertContains(response, "\n".join([
"<p>",
" The URLs for sending messages and firing metrics contain",
" the <em>conversation id</em> (the string of letters and digits"
" near",
" the end of the URL). This conversation's id is",
" <code>%s</code>.",
"</p>",
]) % conversation.key)
| praekelt/vumi-go | go/apps/http_api_nostream/tests/test_views.py | Python | bsd-3-clause | 11,177 |
"""Our app specific CEF loggers."""
from django.conf import settings
from django.http import HttpRequest
from cef import log_cef as _log_cef
heka = settings.HEKA
class CEFLogger:
"""Abstract base CEF logger.
Class attributes to set in a concrete class:
**sig_prefix**
Prefix to the CEF signature. Example: RECEIPT
**cs2label**
cs2label parameter. Example: ReceiptTransaction
**msg_prefix**
Prefix to all CEF log messages. Example: Receipt
**default_severity**
If set, this should be a 0-10 int.
"""
sig_prefix = ''
cs2label = None
msg_prefix = ''
default_severity = None
def log(self, environ, app, msg, longer, severity=None,
extra_kwargs=None):
"""Log something important using the CEF library.
Parameters:
**environ**
Typically a Django request object. It can also be
a plain dict.
**app**
A webapp object.
**msg**
A short message about the incident.
**longer**
A more description message about the incident.
**severity=None**
A 0-10 int to override the default severity.
**extra_kwargs**
A dict to override anything sent to the CEF library.
"""
c = {'cef.product': getattr(settings, 'CEF_PRODUCT', 'AMO'),
'cef.vendor': getattr(settings, 'CEF_VENDOR', 'Mozilla'),
'cef.version': getattr(settings, 'CEF_VERSION', '0'),
'cef.device_version': getattr(settings,
'CEF_DEVICE_VERSION',
'0'),
'cef.file': getattr(settings, 'CEF_FILE', 'syslog'), }
user = getattr(environ, 'user', None)
# Sometimes app is a string, eg: "unknown". Boo!
try:
app_str = app.pk
except AttributeError:
app_str = app
kwargs = {'username': getattr(user, 'name', ''),
'suid': str(getattr(user, 'pk', '')),
'signature': '%s%s' % (self.sig_prefix, msg.upper()),
'msg': longer, 'config': c,
# Until the CEF log can cope with unicode app names, just
# use primary keys.
'cs2': app_str, 'cs2Label': self.cs2label}
if extra_kwargs:
kwargs.update(extra_kwargs)
if not severity:
severity = self.default_severity
if not severity:
raise ValueError('CEF severity was not defined')
if isinstance(environ, HttpRequest):
environ = environ.META.copy()
if settings.USE_HEKA_FOR_CEF:
return heka.cef('%s %s' % (self.msg_prefix, msg), severity,
environ, **kwargs)
else:
return _log_cef('%s %s' % (self.msg_prefix, msg),
severity, environ, **kwargs)
class ReceiptCEFLogger(CEFLogger):
sig_prefix = 'RECEIPT'
cs2label = 'ReceiptTransaction'
msg_prefix = 'Receipt'
default_severity = 5
receipt_cef = ReceiptCEFLogger()
class AppPayCEFLogger(CEFLogger):
"""
Anything to do with app payments.
"""
sig_prefix = 'APP_PAY'
cs2label = 'AppPayment'
msg_prefix = 'AppPayment'
default_severity = 5
app_pay_cef = AppPayCEFLogger()
| shahbaz17/zamboni | lib/cef_loggers.py | Python | bsd-3-clause | 3,376 |
import sys
from ga144 import GA144
# Output a square wave on node 708's pin 1.
# The frequency is about 20 MHz.
prg708 = """
@p a! @p @p \ a points to the io port
io
2 \ stack literal 2 and 3
3
over over over \ replicate 2,3 all down the stack
over over over
over over
: again
! jump again \ write top-of-stack to io port
"""
if __name__ == '__main__':
g = GA144()
g.node['708'].load(prg708)
g.download(sys.argv[1])
| jamesbowman/ga144tools | src/square1.py | Python | mit | 493 |
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getopt, glob, os, sys
def main(argv):
f1 = ""
f2 = ""
# Get the base folder
try:
opts, args = getopt.getopt(argv, "h", ["f1=", "f2="])
except getopt.GetoptError:
print 'The file options for build_saxon_collection_xml.py were not correctly specified.'
print 'To see a full list of options try:'
print ' $ python build_saxon_collection_xml.py -h'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'Options:'
print ' -f The base folder to create collection XML file.'
sys.exit()
elif opt in ('--f1'):
# check if file exists.
if os.path.exists(arg):
f1 = arg
else:
print 'Error: Argument must be a file name for --f1.'
sys.exit()
elif opt in ('--f2'):
# check if file exists.
if os.path.exists(arg):
f2 = arg
else:
print 'Error: Argument must be a file name for --f2.'
sys.exit()
# Required fields to run the script.
if f1 == "" or not os.path.exists(f1):
print 'Error: The file path option must be supplied: --f1.'
sys.exit()
if f2 == "" or not os.path.exists(f2):
print 'Error: The file path option must be supplied: --f2.'
sys.exit()
missing_in_f1 = []
missing_in_f2 = []
found_in_both = []
with open(f1) as f:
content_f1 = f.readlines()
set_f1 = set(content_f1)
with open(f2) as f:
content_f2 = f.readlines()
set_f2 = set(content_f2)
missing_in_f1 = set_f2.difference(set_f1)
missing_in_f2 = set_f1.difference(set_f2)
found_in_both = set_f1.intersection(set_f2)
print ""
print "Missing files in " + f1
for f1_name in missing_in_f1:
print " + " + f1_name.strip()
print ""
print "Missing files in " + f2
for f2_name in missing_in_f2:
print " + " + f2_name.strip()
offset = 40
print ""
print "XML Summary"
print (" - Found in both:").ljust(offset) + str(len(found_in_both))
print (" - " + f1 + " diff set vs list:").ljust(offset) + str(len(content_f1) - len(set_f1))
print (" - " + f2 + " diff set vs list:").ljust(offset) + str(len(content_f2) - len(set_f2))
print (" - " + f1 + " missing:").ljust(offset) + str(len(missing_in_f1))
print (" - " + f2 + " missing:").ljust(offset) + str(len(missing_in_f2))
if __name__ == "__main__":
main(sys.argv[1:])
| innovimax/vxquery | vxquery-benchmark/src/main/resources/util/diff_xml_files.py | Python | apache-2.0 | 3,401 |
import wx
import threading
import time
import json
import sys
from loginDialog import LoginDialog
from udpsocket import Udpsocket
reload(sys)
sys.setdefaultencoding('utf8')
def call_after(func):
def _wrapper(*args, **kwargs):
return wx.CallAfter(func, *args, **kwargs)
return _wrapper
class ClientFrame(wx.Frame):
CLIENT_LOGIN = 100;
CLIENT_LOGOUT = 101;
BUTTON_SEND = 200;
BUTTON_LOGIN = 201;
BUTTON_REFRESH = 202;
LIST_NAME = 500;
curr_addr = ""
curr_port = 0
myname = ""
def __init__(self, parent, title, pos, size):
wx.Frame.__init__(self, parent, title=title, size=size, pos=pos)
#status bar
statusBar = self.CreateStatusBar()
#menu
self.clientMenu = wx.Menu()
self.clientMenu.Append(self.CLIENT_LOGIN, "&Login", "Login chatroom")
self.clientMenu.Append(self.CLIENT_LOGOUT, "L&ogout", "Logout chatroom")
menuBar = wx.MenuBar()
menuBar.Append(self.clientMenu, "&Server")
self.SetMenuBar(menuBar)
self.change_status_bar(False)
self.Bind(wx.EVT_MENU, self.menu_login_action, id=self.CLIENT_LOGIN)
#text box
self.txt_recv = wx.TextCtrl(self, pos=(2, 0), size=(400, 200), style=wx.TE_MULTILINE + wx.TE_READONLY)
self.txt_send = wx.TextCtrl(self, pos=(2, 210), size=(400, 130), style=wx.TE_MULTILINE)
#button
self.btn_send = wx.Button(self, self.BUTTON_SEND, "send", (330, 345), (60, 30))
#btn_send = wx.Button(self, self.BUTTON_REFRESH, "send", (420, 345), (60, 30))
self.btn_send.Bind(wx.EVT_BUTTON, self.btn_send_action, id=self.BUTTON_SEND)
#listbox
self.choices = [];
self.listName= wx.ListBox(self, self.LIST_NAME, (410,0), (130,341), []);
self.listName.Bind(wx.EVT_LISTBOX, self.listName_action)
#login dialog
self.mdialog = LoginDialog(self, wx.ID_ANY, "Login")
#udp
self.udpsocket = Udpsocket()
#thread
threading.Thread(target = self.thread_recv_worker, args=(self.udpsocket, )).start()
threading.Thread(target = self.thread_send_worker, args=(self.udpsocket, )).start()
def change_status_bar(self, islogin):
self.clientMenu.Enable(self.CLIENT_LOGIN, not islogin)
self.clientMenu.Enable(self.CLIENT_LOGOUT, islogin)
if(not islogin):
self.SetStatusText("No Login")
def menu_login_action(self, event):
self.mdialog.ShowModal()
def btn_send_action(self, event):
msg = self.txt_send.GetValue()
if self.curr_addr.strip():
self.txt_recv.AppendText("me: " + msg)
self.udpsocket.sendtoOther(self.curr_addr, self.curr_port, self.myname + ": " + msg)
self.txt_send.Clear()
else:
print("no send")
def listName_action(self, event):
select_index = event.GetSelection()
self.curr_addr = self.choices[select_index]["addr"]
self.curr_port = self.choices[select_index]["port"]
def thread_recv_worker(self, udpsocket):
while(True):
recv = udpsocket.recvfrom()
msg = recv[0]
if msg.find("ser") != -1:
me = msg[3:]
self.deal_ser(me)
else:
self.deal_cli(msg)
def thread_send_worker(self, udpsocket):
while (True):
time.sleep(5)
udpsocket.sendtoServer("get ")
@call_after
def deal_ser(self, data):
print(data)
self.listName.Clear()
namelist = []
myjdata = json.loads(data)
if myjdata["type"] == "hello":
self.SetStatusText("Login " + myjdata["name"])
self.change_status_bar(True)
self.myname = myjdata["name"]
elif myjdata["type"] == "bye":
self.change_status_bar(false)
self.myname = ""
else:
for d in myjdata["data"]:
self.choices.append(d)
namelist.append(d["name"])
self.listName.InsertItems(namelist, 0)
@call_after
def deal_cli(self, data):
self.txt_recv.AppendText(data + "\n");
#main
app = wx.App()
frame = ClientFrame(None,"udp chatroom (client)", (100,100), (564, 465))
frame.Show(True)
app.MainLoop()
| LiSheep/udpchat | src/client-python/client.py | Python | gpl-3.0 | 3,906 |
#!/usr/bin/env python3
import glob
import io
import os
import subprocess
import sys
import time
import xml.etree.ElementTree as ET
suffix = sys.argv[1]
cmd = sys.argv[2:]
# get the list of input files to process
infiles = sorted(glob.glob('inputs/*.' + suffix))
testsuites = ET.Element('testsuites')
suite = ET.SubElement(testsuites, 'testsuite')
(tests, failures, disabled, skipped, errors) = (0, 0, 0, 0, 0)
totaltime = 0.0
prevpassed = True
for infile in infiles:
if not prevpassed:
print()
outfile = infile[:-len('.' + suffix)] + '.expected'
actualfile = infile[:-len('.' + suffix)] + '.actual'
# get the input
fp = open(infile, 'rb')
input = fp.read()
fp.close()
# get the expected output
fp = open(outfile, 'rb')
expected = fp.read()
fp.close()
# report the result in XML
case = ET.SubElement(suite, 'testcase')
case.set('name', infile)
# run the program to get the actual output
body = ' '.join(cmd) + ' < ' + infile
print(body)
body += '\n'
start = time.time()
proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(actual, stderr) = proc.communicate(input)
seconds = time.time() - start
fp = open(actualfile, 'wb')
fp.write(actual)
fp.close()
# check the output
passed = True
if proc.returncode != 0:
msg = '\n!!! returned non-zero status code {}'.format(proc.returncode)
print(msg)
body += msg + '\n'
passed = False
if stderr != b'':
msg = '\n!!! stderr should have been empty, but instead the program printed:'
lines = stderr.split(b'\n')
if len(lines) > 0 and lines[-1] == b'':
lines = lines[:-1]
for line in lines:
msg += '\n> ' + str(line, 'utf-8')
print(msg)
body += msg + '\n'
passed = False
if actual != expected:
msg = '\n!!! output is incorrect:\n'
diff = ['diff', actualfile, outfile]
msg += ' '.join(diff)
proc = subprocess.Popen(diff, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
(output, errout) = proc.communicate(b'')
if len(output) > 0 and output[-1] == '\n':
output = output[:-1]
msg += str(output, 'utf-8')
print(msg)
body += msg + '\n'
passed = False
os.remove(actualfile)
tests += 1
totaltime += seconds
case.set('time', str(time.time() - start))
if not passed:
failures += 1
case.set('status', 'failed')
failure = ET.SubElement(case, 'failure')
failure.set('type', 'failure')
failure.text = body
prevpassed = passed
suite.set('tests', str(tests))
suite.set('failures', str(failures))
suite.set('disabled', str(disabled))
suite.set('skipped', str(skipped))
suite.set('errors', str(errors))
suite.set('time', str(totaltime))
testsuites.set('tests', str(tests))
testsuites.set('failures', str(failures))
testsuites.set('disabled', str(disabled))
testsuites.set('skipped', str(skipped))
testsuites.set('errors', str(errors))
testsuites.set('time', str(totaltime))
tree = ET.ElementTree(element=testsuites)
tree.write('test_detail.xml', encoding='utf-8', xml_declaration=True)
print('\nPassed {}/{} tests in {:.2} seconds'.format(tests-failures, tests, totaltime))
| russross/codegrinder | files/riscvinout/lib/inout-runner.py | Python | agpl-3.0 | 3,381 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from unittest import TestCase
from libcloud.compute.types import (
Provider,
NodeState,
StorageVolumeState,
VolumeSnapshotState,
Type,
)
class TestType(Type):
INUSE = "inuse"
NOTINUSE = "NOTINUSE"
class TestTestType(TestCase):
model = TestType
def test_provider_tostring(self):
self.assertEqual(Provider.tostring(TestType.INUSE), "INUSE")
self.assertEqual(Provider.tostring(TestType.NOTINUSE), "NOTINUSE")
def test_provider_fromstring(self):
self.assertEqual(TestType.fromstring("inuse"), TestType.INUSE)
self.assertEqual(TestType.fromstring("NOTINUSE"), TestType.NOTINUSE)
def test_provider_fromstring_caseinsensitive(self):
self.assertEqual(TestType.fromstring("INUSE"), TestType.INUSE)
self.assertEqual(TestType.fromstring("notinuse"), TestType.NOTINUSE)
def test_compare_as_string(self):
self.assertTrue(TestType.INUSE == "inuse")
self.assertFalse(TestType.INUSE == "bar")
class TestProvider(TestCase):
def test_provider_tostring(self):
self.assertEqual(Provider.tostring(Provider.RACKSPACE), "RACKSPACE")
def test_provider_fromstring(self):
self.assertEqual(Provider.fromstring("rackspace"), Provider.RACKSPACE)
class TestNodeState(TestCase):
def test_nodestate_tostring(self):
self.assertEqual(NodeState.tostring(NodeState.RUNNING), "RUNNING")
def test_nodestate_fromstring(self):
self.assertEqual(NodeState.fromstring("running"), NodeState.RUNNING)
class TestStorageVolumeState(TestCase):
def test_storagevolumestate_tostring(self):
self.assertEqual(
StorageVolumeState.tostring(StorageVolumeState.AVAILABLE), "AVAILABLE"
)
def test_storagevolumestate_fromstring(self):
self.assertEqual(
StorageVolumeState.fromstring("available"), StorageVolumeState.AVAILABLE
)
class TestVolumeSnapshotState(TestCase):
def test_volumesnapshotstate_tostring(self):
self.assertEqual(
VolumeSnapshotState.tostring(VolumeSnapshotState.AVAILABLE), "AVAILABLE"
)
def test_volumesnapshotstate_fromstring(self):
self.assertEqual(
VolumeSnapshotState.fromstring("available"), VolumeSnapshotState.AVAILABLE
)
if __name__ == "__main__":
sys.exit(unittest.main())
| apache/libcloud | libcloud/test/compute/test_types.py | Python | apache-2.0 | 3,169 |
import logging
import shlex
import warnings
from flask import request
from jinja2 import Markup
from flask_admin.base import BaseView, expose
from flask_admin.babel import gettext
from flask_admin._compat import VER
# Set up logger
log = logging.getLogger("flask-admin.redis")
class CommandError(Exception):
"""
RedisCli error exception.
"""
pass
class TextWrapper(str):
"""
Small text wrapper for result formatter to distinguish between
different string types.
"""
pass
class RedisCli(BaseView):
"""
Simple redis console.
To use it, simply pass `Redis` connection object to the constructor.
"""
shlex_check = True
"""
shlex from stdlib does not work with unicode on 2.7.2 and lower.
If you want to suppress warning, set this attribute to False.
"""
remapped_commands = {
'del': 'delete'
}
"""
List of redis remapped commands.
"""
excluded_commands = set(('pubsub', 'set_response_callback', 'from_url'))
"""
List of excluded commands.
"""
def __init__(self, redis,
name=None, category=None, endpoint=None, url=None):
"""
Constructor.
:param redis:
Redis connection
:param name:
View name. If not provided, will use the model class name
:param category:
View category
:param endpoint:
Base endpoint. If not provided, will use the model name + 'view'.
For example if model name was 'User', endpoint will be
'userview'
:param url:
Base URL. If not provided, will use endpoint as a URL.
"""
super(RedisCli, self).__init__(name, category, endpoint, url)
self.redis = redis
self.commands = {}
self._inspect_commands()
self._contribute_commands()
if self.shlex_check and VER < (2, 7, 3):
warnings.warn('Warning: rediscli uses shlex library and it does '
'not work with unicode until Python 2.7.3. To '
'remove this warning, upgrade to Python 2.7.3 or '
'suppress it by setting shlex_check attribute '
'to False.')
def _inspect_commands(self):
"""
Inspect connection object and extract command names.
"""
for name in dir(self.redis):
if not name.startswith('_'):
attr = getattr(self.redis, name)
if callable(attr) and name not in self.excluded_commands:
doc = (getattr(attr, '__doc__', '') or '').strip()
self.commands[name] = (attr, doc)
for new, old in self.remapped_commands.items():
self.commands[new] = self.commands[old]
def _contribute_commands(self):
"""
Contribute custom commands.
"""
self.commands['help'] = (self._cmd_help, 'Help!')
def _execute_command(self, name, args):
"""
Execute single command.
:param name:
Command name
:param args:
Command arguments
"""
# Do some remapping
new_cmd = self.remapped_commands.get(name)
if new_cmd:
name = new_cmd
# Execute command
if name not in self.commands:
return self._error(gettext('Cli: Invalid command.'))
handler, _ = self.commands[name]
return self._result(handler(*args))
def _parse_cmd(self, cmd):
"""
Parse command by using shlex module.
:param cmd:
Command to parse
"""
if VER < (2, 7, 3):
# shlex can't work with unicode until 2.7.3
return tuple(x.decode('utf-8') for x in shlex.split(cmd.encode('utf-8')))
return tuple(shlex.split(cmd))
def _error(self, msg):
"""
Format error message as HTTP response.
:param msg:
Message to format
"""
return Markup('<div class="error">%s</div>' % msg)
def _result(self, result):
"""
Format result message as HTTP response.
:param msg:
Result to format.
"""
return self.render('admin/rediscli/response.html',
type_name=lambda d: type(d).__name__,
result=result)
# Commands
def _cmd_help(self, *args):
"""
Help command implementation.
"""
if not args:
help = 'Usage: help <command>.\nList of supported commands: '
help += ', '.join(n for n in sorted(self.commands))
return TextWrapper(help)
cmd = args[0]
if cmd not in self.commands:
raise CommandError('Invalid command.')
help = self.commands[cmd][1]
if not help:
return TextWrapper('Command does not have any help.')
return TextWrapper(help)
# Views
@expose('/')
def console_view(self):
"""
Console view.
"""
return self.render('admin/rediscli/console.html')
@expose('/run/', methods=('POST',))
def execute_view(self):
"""
AJAX API.
"""
try:
cmd = request.form.get('cmd').lower()
if not cmd:
return self._error('Cli: Empty command.')
parts = self._parse_cmd(cmd)
if not parts:
return self._error('Cli: Failed to parse command.')
return self._execute_command(parts[0], parts[1:])
except CommandError as err:
return self._error('Cli: %s' % err)
except Exception as ex:
log.exception(ex)
return self._error('Cli: %s' % ex)
| Widiot/simpleblog | venv/lib/python3.5/site-packages/flask_admin/contrib/rediscli.py | Python | mit | 5,966 |
from PyGMO.problem import base as base_problem
from PyKEP.core import epoch, fb_con, EARTH_VELOCITY, AU, MU_SUN
from PyKEP.planet import jpl_lp
from PyKEP.sims_flanagan import leg, spacecraft, sc_state
class mga_lt_nep(base_problem):
"""
This class is a PyGMO (http://esa.github.io/pygmo/) problem representing a low-thrust
interplanetary trajectory modelled as a Multiple Gravity Assist trajectory with sims_flanagan legs
- Yam, C.H., di Lorenzo, D., and Izzo, D., Low-Thrust Trajectory Design as a Constrained Global Optimization Problem, Proceedings of the Institution of Mechanical Engineers, Part G: Journal of Aerospace Engineering, 225(11), pp.1243-1251, 2011.
The decision vector (chromosome) is::
[t0] + [T1, mf1, Vxi1, Vyi1, Vzi1, Vxf1, Vyf1, Vzf1] + [T2, mf2, Vxi2, Vyi2, Vzi2, Vxf2, Vyf2, Vzf2] + ... + [throttles1] + [throttles2] + ...
.. note::
The resulting problem is non linearly constrained. The resulting trajectory is not time-bounded.
"""
def __init__(self,
seq=[jpl_lp('earth'), jpl_lp('venus'), jpl_lp('earth')],
n_seg=[10] * 2,
t0=[epoch(0), epoch(1000)],
tof=[[200, 500], [200, 500]],
vinf_dep=2.5,
vinf_arr=2.0,
mass=4000.0,
Tmax=1.0,
Isp=2000.0,
fb_rel_vel=6,
multi_objective=False,
high_fidelity=False):
"""
prob = mga_lt_nep(seq = [jpl_lp('earth'),jpl_lp('venus'),jpl_lp('earth')], n_seg = [10]*2,
t0 = [epoch(0),epoch(1000)], tof = [[200,500],[200,500]], Vinf_dep=2.5, Vinf_arr=2.0, mass=4000.0, Tmax=1.0, Isp=2000.0,
multi_objective = False, fb_rel_vel = 6, high_fidelity=False)
- seq: list of PyKEP.planet defining the encounter sequence for the trajectoty (including the initial planet)
- n_seg: list of integers containing the number of segments to be used for each leg (len(n_seg) = len(seq)-1)
- t0: list of PyKEP epochs defining the launch window
- tof: minimum and maximum time of each leg (days)
- vinf_dep: maximum launch hyperbolic velocity allowed (in km/sec)
- vinf_arr: maximum arrival hyperbolic velocity allowed (in km/sec)
- mass: spacecraft starting mass
- Tmax: maximum thrust
- Isp: engine specific impulse
- fb_rel_vel = determines the bounds on the maximum allowed relative velocity at all fly-bys (in km/sec)
- multi-objective: when True defines the problem as a multi-objective problem, returning total DV and time of flight
- high_fidelity = makes the trajectory computations slower, but actually dynamically feasible.
"""
# 1) We compute the problem dimensions .... and call the base problem constructor
self.__n_legs = len(seq) - 1
n_fb = self.__n_legs - 1
# 1a) The decision vector length
dim = 1 + self.__n_legs * 8 + sum(n_seg) * 3
# 1b) The total number of constraints (mismatch + fly-by + boundary + throttles
c_dim = self.__n_legs * 7 + n_fb * 2 + 2 + sum(n_seg)
# 1c) The number of inequality constraints (boundary + fly-by angle + throttles)
c_ineq_dim = 2 + n_fb + sum(n_seg)
# 1d) the number of objectives
f_dim = multi_objective + 1
# First we call the constructor for the base PyGMO problem
# As our problem is n dimensional, box-bounded (may be multi-objective), we write
# (dim, integer dim, number of obj, number of con, number of inequality con, tolerance on con violation)
super(mga_lt_nep, self).__init__(dim, 0, f_dim, c_dim, c_ineq_dim, 1e-4)
# 2) We then define some class data members
# public:
self.seq = seq
# private:
self.__n_seg = n_seg
self.__vinf_dep = vinf_dep * 1000
self.__vinf_arr = vinf_arr * 1000
self.__sc = spacecraft(mass, Tmax, Isp)
self.__leg = leg()
self.__leg.set_mu(MU_SUN)
self.__leg.set_spacecraft(self.__sc)
self.__leg.high_fidelity = high_fidelity
fb_rel_vel *= 1000
# 3) We compute the bounds
lb = [t0[0].mjd2000] + [0, mass / 2, -fb_rel_vel, -fb_rel_vel, -fb_rel_vel, -fb_rel_vel, -fb_rel_vel, -fb_rel_vel] * self.__n_legs + [-1, -1, -1] * sum(self.__n_seg)
ub = [t0[1].mjd2000] + [1, mass, fb_rel_vel, fb_rel_vel, fb_rel_vel, fb_rel_vel, fb_rel_vel, fb_rel_vel] * self.__n_legs + [1, 1, 1] * sum(self.__n_seg)
# 3a ... and account for the bounds on the vinfs......
lb[3:6] = [-self.__vinf_dep] * 3
ub[3:6] = [self.__vinf_dep] * 3
lb[-sum(self.__n_seg) * 3 - 3:-sum(self.__n_seg) * 3] = [-self.__vinf_arr] * 3
ub[-sum(self.__n_seg) * 3 - 3:-sum(self.__n_seg) * 3] = [self.__vinf_arr] * 3
# 3b... and for the time of flight
lb[1:1 + 8 * self.__n_legs:8] = [el[0] for el in tof]
ub[1:1 + 8 * self.__n_legs:8] = [el[1] for el in tof]
# 4) And we set the bounds
self.set_bounds(lb, ub)
# Objective function
def _objfun_impl(self, x):
if self.f_dimension == 1:
return (-x[2 + (self.__n_legs - 1) * 8],)
else:
return (-x[2 + (self.__n_legs - 1) * 8], sum(x[1:1 + 8 * self.__n_legs:8]))
# Constraints function
def _compute_constraints_impl(self, x):
# 1 - We decode the chromosome extracting the time of flights
T = list([0] * (self.__n_legs))
for i in range(self.__n_legs):
T[i] = x[1 + i * 8]
# 2 - We compute the epochs and ephemerides of the planetary encounters
t_P = list([None] * (self.__n_legs + 1))
r_P = list([None] * (self.__n_legs + 1))
v_P = list([None] * (self.__n_legs + 1))
for i, planet in enumerate(self.seq):
t_P[i] = epoch(x[0] + sum(T[0:i]))
r_P[i], v_P[i] = self.seq[i].eph(t_P[i])
# 3 - We iterate through legs to compute mismatches and throttles constraints
ceq = list()
cineq = list()
m0 = self.__sc.mass
for i in range(self.__n_legs):
# First Leg
v = [a + b for a, b in zip(v_P[i], x[(3 + i * 8):(6 + i * 8)])]
x0 = sc_state(r_P[i], v, m0)
v = [a + b for a, b in zip(v_P[i + 1], x[(6 + i * 8):(9 + i * 8)])]
xe = sc_state(r_P[i + 1], v, x[2 + i * 8])
throttles = x[(1 + 8 * self.__n_legs + 3 * sum(self.__n_seg[:i])):(1 + 8 * self.__n_legs + 3 * sum(self.__n_seg[:i]) + 3 * self.__n_seg[i])]
self.__leg.set(t_P[i], x0, throttles, t_P[i + 1], xe)
# update mass!
m0 = x[2 + 8 * i]
ceq.extend(self.__leg.mismatch_constraints())
cineq.extend(self.__leg.throttles_constraints())
# Adding the boundary constraints
# departure
v_dep_con = (x[3] ** 2 + x[4] ** 2 + x[5] ** 2 - self.__vinf_dep ** 2) / (EARTH_VELOCITY ** 2)
# arrival
v_arr_con = (x[6 + (self.__n_legs - 1) * 8] ** 2 + x[7 + (self.__n_legs - 1) * 8] ** 2 + x[8 + (self.__n_legs - 1) * 8] ** 2 - self.__vinf_arr ** 2) / (EARTH_VELOCITY ** 2)
cineq.append(v_dep_con * 100)
cineq.append(v_arr_con * 100)
# We add the fly-by constraints
for i in range(self.__n_legs - 1):
DV_eq, alpha_ineq = fb_con(x[6 + i * 8:9 + i * 8], x[11 + i * 8:14 + i * 8], self.seq[i + 1])
ceq.append(DV_eq / (EARTH_VELOCITY ** 2))
cineq.append(alpha_ineq)
# Making the mismatches non dimensional
for i in range(self.__n_legs):
ceq[0 + i * 7] /= AU
ceq[1 + i * 7] /= AU
ceq[2 + i * 7] /= AU
ceq[3 + i * 7] /= EARTH_VELOCITY
ceq[4 + i * 7] /= EARTH_VELOCITY
ceq[5 + i * 7] /= EARTH_VELOCITY
ceq[6 + i * 7] /= self.__sc.mass
# We assemble the constraint vector
retval = list()
retval.extend(ceq)
retval.extend(cineq)
return retval
# And this helps visualizing the trajectory
def plot(self, x, ax=None):
"""
ax = prob.plot(x, ax=None)
- x: encoded trajectory
- ax: matplotlib axis where to plot. If None figure and axis will be created
- [out] ax: matplotlib axis where to plot
Plots the trajectory represented by a decision vector x on the 3d axis ax
Example::
ax = prob.plot(x)
"""
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from PyKEP import epoch, AU
from PyKEP.sims_flanagan import sc_state
from PyKEP.orbit_plots import plot_planet, plot_sf_leg
# Creating the axis if necessary
if ax is None:
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
axis = fig.gca(projection='3d')
else:
axis = ax
# Plotting the Sun ........
axis.scatter([0], [0], [0], color='y')
# Plotting the legs .......
# 1 - We decode the chromosome extracting the time of flights
T = list([0] * (self.__n_legs))
for i in range(self.__n_legs):
T[i] = x[1 + i * 8]
# 2 - We compute the epochs and ephemerides of the planetary encounters
t_P = list([None] * (self.__n_legs + 1))
r_P = list([None] * (self.__n_legs + 1))
v_P = list([None] * (self.__n_legs + 1))
for i, planet in enumerate(self.seq):
t_P[i] = epoch(x[0] + sum(T[0:i]))
r_P[i], v_P[i] = self.seq[i].eph(t_P[i])
# 3 - We iterate through legs to compute mismatches and throttles constraints
ceq = list()
cineq = list()
m0 = self.__sc.mass
for i in range(self.__n_legs):
# First Leg
v = [a + b for a, b in zip(v_P[i], x[(3 + i * 8):(6 + i * 8)])]
x0 = sc_state(r_P[i], v, m0)
v = [a + b for a, b in zip(v_P[i + 1], x[(6 + i * 8):(11 + i * 8)])]
xe = sc_state(r_P[i + 1], v, x[2 + i * 8])
throttles = x[(1 + 8 * self.__n_legs + 3 * sum(self.__n_seg[:i])):(1 + 8 * self.__n_legs + 3 * sum(self.__n_seg[:i]) + 3 * self.__n_seg[i])]
self.__leg.set(t_P[i], x0, throttles, t_P[i + 1], xe)
# update mass!
m0 = x[2 + 8 * i]
plot_sf_leg(self.__leg, units=AU, N=10, ax=axis)
# Plotting planets
for i, planet in enumerate(self.seq):
plot_planet(planet, t_P[i], units=AU, legend=True, color=(0.7, 0.7, 1), ax = axis)
plt.show()
return axis
def high_fidelity(self, boolean):
"""
prob.high_fidelity(status)
- status: either True or False (True sets high fidelity on)
Sets the trajectory high fidelity mode
Example::
prob.high_fidelity(True)
"""
# We avoid here that objfun and constraint are kept that have been evaluated wrt a different fidelity
self.reset_caches()
# We set the propagation fidelity
self.__leg.high_fidelity = boolean
def ic_from_mga_1dsm(self, x):
"""
x_lt = prob.ic_from_mga_1dsm(x_mga)
- x_mga: compatible trajectory as encoded by an mga_1dsm problem
Returns an initial guess for the low-thrust trajectory, converting the mga_1dsm solution x_dsm. The user
is responsible that x_mga makes sense (i.e. it is a viable mga_1dsm representation). The conversion is done by importing in the
low-thrust encoding a) the launch date b) all the legs durations, c) the in and out relative velocities at each planet.
All throttles are put to zero.
Example::
x_lt= prob.ic_from_mga_1dsm(x_mga)
"""
from math import pi, cos, sin, acos
from scipy.linalg import norm
from PyKEP import propagate_lagrangian, lambert_problem, DAY2SEC, fb_prop
retval = list([0.0] * self.dimension)
# 1 - we 'decode' the chromosome recording the various times of flight (days) in the list T
T = list([0] * (self.__n_legs))
for i in range(len(T)):
T[i] = log(x[2 + 4 * i])
total = sum(T)
T = [x[1] * time / total for time in T]
retval[0] = x[0]
for i in range(self.__n_legs):
retval[1 + 8 * i] = T[i]
retval[2 + 8 * i] = self.__sc.mass
# 2 - We compute the epochs and ephemerides of the planetary encounters
t_P = list([None] * (self.__n_legs + 1))
r_P = list([None] * (self.__n_legs + 1))
v_P = list([None] * (self.__n_legs + 1))
DV = list([None] * (self.__n_legs + 1))
for i, planet in enumerate(self.seq):
t_P[i] = epoch(x[0] + sum(T[0:i]))
r_P[i], v_P[i] = self.seq[i].eph(t_P[i])
# 3 - We start with the first leg
theta = 2 * pi * x[1]
phi = acos(2 * x[2] - 1) - pi / 2
Vinfx = x[3] * cos(phi) * cos(theta)
Vinfy = x[3] * cos(phi) * sin(theta)
Vinfz = x[3] * sin(phi)
retval[3:6] = [Vinfx, Vinfy, Vinfz]
v0 = [a + b for a, b in zip(v_P[0], [Vinfx, Vinfy, Vinfz])]
r, v = propagate_lagrangian(r_P[0], v0, x[4] * T[0] * DAY2SEC, MU_SUN)
# Lambert arc to reach seq[1]
dt = (1 - x[4]) * T[0] * DAY2SEC
l = lambert_problem(r, r_P[1], dt, MU_SUN)
v_end_l = l.get_v2()[0]
v_beg_l = l.get_v1()[0]
retval[6:9] = [a - b for a, b in zip(v_end_l, v_P[1])]
# 4 - And we proceed with each successive leg
for i in range(1, self.__n_legs):
# Fly-by
v_out = fb_prop(v_end_l, v_P[i], x[7 + (i - 1) * 4] * self.seq[i].radius, x[6 + (i - 1) * 4], self.seq[i].mu_self)
retval[3 + i * 8:6 + i * 8] = [a - b for a, b in zip(v_out, v_P[i])]
# s/c propagation before the DSM
r, v = propagate_lagrangian(r_P[i], v_out, x[8 + (i - 1) * 4] * T[i] * DAY2SEC, MU_SUN)
# Lambert arc to reach Earth during (1-nu2)*T2 (second segment)
dt = (1 - x[8 + (i - 1) * 4]) * T[i] * DAY2SEC
l = lambert_problem(r, r_P[i + 1], dt, MU_SUN)
v_end_l = l.get_v2()[0]
v_beg_l = l.get_v1()[0]
# DSM occuring at time nu2*T2
DV[i] = norm([a - b for a, b in zip(v_beg_l, v)])
retval[6 + i * 8:9 + i * 8] = [a - b for a, b in zip(v_end_l, v_P[i + 1])]
return retval
def double_segments(self, x):
"""
x_doubled = prob.double_segments(x)
- x: compatible trajectory as encoded by an mga_1dsm mga_lt_nep
Returns the decision vector encoding a low trust trajectory having double the number of segments with respect to x
and a 'similar' throttle history. In case high fidelity is True, and x is a feasible trajectory, the returned decision vector
also encodes a feasible trajectory that can be further optimized
Example::
prob = traj.mga_lt_nep(nseg=[[10],[20]])
pop = population(prob,1)
.......OPTIMIZE.......
x = prob.double_segments(pop.champion.x)
prob = traj.mga_lt_nep(nseg=[[20],[40]])
pop = population(prob)
pop.push_back(x)
.......OPTIMIZE AGAIN......
"""
y = list()
y.extend(x[:-sum(self.__n_seg) * 3])
for i in range(sum(self.__n_seg)):
y.extend(x[-(sum(self.__n_seg) - i) * 3:-(sum(self.__n_seg) - 1 - i) * 3] * 2)
y.extend(x[-3:] * 2)
return y
| krzysztof/pykep | PyKEP/trajopt/_mga_lt_nep.py | Python | gpl-3.0 | 15,675 |
# -*- coding: utf-8 -*-
"""
Contains a custom QTableWidget for easier displaying of CheckerMessages
"""
from pyqode.core.api.utils import memoized
from pyqode.core.modes import CheckerMessage, CheckerMessages
from pyqode.qt import QtCore, QtWidgets, QtGui
COL_TYPE = 0
COL_FILE_NAME = 1
COL_LINE_NBR = 2
COL_MSG = 3
class ErrorsTable(QtWidgets.QTableWidget):
"""
Extends a QtWidgets.QTableWidget to easily show
:class:`pyqode.core.modes.CheckerMessage`.
You add messages to the table using
:meth:`pyqode.core.widgets.ErrorsTable.add_message`.
You clear the table using :meth:`pyqode.core.widgets.ErrorsTable`.
"""
#: Signal emitted when a message is activated, the clicked signal is passed
#: as a parameter
msg_activated = QtCore.Signal(CheckerMessage)
ICONS = {
CheckerMessages.INFO: ':/ide-icons/rc/accept.png',
CheckerMessages.WARNING: ':pyqode-icons/rc/dialog-warning.png',
CheckerMessages.ERROR: ':pyqode-icons/rc/dialog-error.png',
}
def __init__(self, parent=None):
QtWidgets.QTableWidget.__init__(self, parent)
self.setColumnCount(6)
self.setHorizontalHeaderLabels(
["Type", "File name", "Line", "Description", 'Details'])
try:
# pyqt4
self.horizontalHeader().setResizeMode(
QtWidgets.QHeaderView.ResizeToContents)
self.horizontalHeader().setResizeMode(
COL_MSG, QtWidgets.QHeaderView.Stretch)
except AttributeError:
# pyqt5
self.horizontalHeader().setSectionResizeMode(
QtWidgets.QHeaderView.ResizeToContents)
self.horizontalHeader().setSectionResizeMode(
COL_MSG, QtWidgets.QHeaderView.Stretch)
self.setMinimumSize(900, 200)
self.itemActivated.connect(self._on_item_activated)
self.setSelectionMode(self.SingleSelection)
self.setSelectionBehavior(self.SelectRows)
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self._show_context_menu)
self.context_mnu = QtWidgets.QMenu()
self.action_details = QtWidgets.QAction('View details', self)
self.action_details.triggered.connect(self.showDetails)
self.action_copy = QtWidgets.QAction('Copy error', self)
self.action_copy.triggered.connect(self._copy_cell_text)
self.context_mnu.addAction(self.action_details)
self.context_mnu.addAction(self.action_copy)
self.clear()
def _copy_cell_text(self):
"""
Copies the description of the selected message to the clipboard
"""
txt = self.currentItem().text()
QtWidgets.QApplication.clipboard().setText(txt)
def _show_context_menu(self, pos):
""" Shows the context menu """
self.context_mnu.exec_(self.mapToGlobal(pos))
def clear(self):
"""
Clears the tables and the message list
"""
QtWidgets.QTableWidget.clear(self)
self.setRowCount(0)
self.setColumnCount(4)
self.setHorizontalHeaderLabels(
["Type", "File name", "Line", "Description"])
@classmethod
@memoized
def _make_icon(cls, status):
"""
Make icon from icon filename/tuple (if you want to use a theme)
"""
icon = cls.ICONS[status]
if isinstance(icon, tuple):
return QtGui.QIcon.fromTheme(
icon[0], QtGui.QIcon(icon[1]))
elif isinstance(icon, str):
return QtGui.QIcon(icon)
else:
return None
def add_message(self, msg):
"""
Adds a checker message to the table.
:param msg: The message to append
:type msg: pyqode.core.modes.CheckerMessage
"""
row = self.rowCount()
self.insertRow(row)
# type
item = QtWidgets.QTableWidgetItem(
self._make_icon(msg.status), msg.status_string)
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item.setData(QtCore.Qt.UserRole, msg)
self.setItem(row, COL_TYPE, item)
# filename
item = QtWidgets.QTableWidgetItem(
QtCore.QFileInfo(msg.path).fileName())
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item.setData(QtCore.Qt.UserRole, msg)
self.setItem(row, COL_FILE_NAME, item)
# line
if msg.line <= 0:
item = QtWidgets.QTableWidgetItem("-")
else:
item = QtWidgets.QTableWidgetItem(str(msg.line + 1))
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item.setData(QtCore.Qt.UserRole, msg)
self.setItem(row, COL_LINE_NBR, item)
# desc
item = QtWidgets.QTableWidgetItem(msg.description)
item.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable)
item.setData(QtCore.Qt.UserRole, msg)
self.setItem(row, COL_MSG, item)
def _on_item_activated(self, item):
"""
Emits the message activated signal
"""
msg = item.data(QtCore.Qt.UserRole)
self.msg_activated.emit(msg)
def showDetails(self):
"""
Shows the error details.
"""
msg = self.currentItem().data(QtCore.Qt.UserRole)
QtWidgets.QMessageBox.information(
self, 'Message details',
"""<p><b>Description:</b><br/>%s</p>
<i><p><b>File:</b><br/>%s</p>
<p><b>Line: </b>%d</p></i>
""" % (msg.description, msg.path, msg.line + 1, ))
| jmwright/cadquery-x | gui/libs/pyqode/core/widgets/errors_table.py | Python | lgpl-3.0 | 5,638 |
# Copyright 2013 Devsim LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#### Small Signal simulation
from devsim import *
from devsim.python_packages.simple_physics import *
import diode_common
import math
#This requires a circuit element to integrated current
circuit_element(name="V1", n1=GetContactBiasName("top"), n2=0, value=0.0, acreal=1.0, acimag=0.0)
device="MyDevice"
region="MyRegion"
diode_common.CreateMesh2(device, region)
diode_common.SetParameters(device=device, region=region)
diode_common.SetNetDoping(device=device, region=region)
diode_common.InitialSolution(device, region, circuit_contacts="top")
# Initial DC solution
solve(type="dc", absolute_error=1.0, relative_error=1e-12, maximum_iterations=30)
diode_common.DriftDiffusionInitialSolution(device, region, circuit_contacts=["top"])
v=0.0
while v < 0.51:
circuit_alter(name="V1", value=v)
solve(type="dc", absolute_error=1e10, relative_error=1e-10, maximum_iterations=30)
#TODO: get out circuit information
# PrintCurrents(device, "top")
PrintCurrents(device, "bot")
solve(type="ac", frequency=1.0)
cap=get_circuit_node_value(node="V1.I", solution="ssac_imag")/ (-2*math.pi)
print("capacitance {0} {1}".format(v, cap))
v += 0.1
for x in get_circuit_node_list():
for y in get_circuit_solution_list():
z = get_circuit_node_value(node=x, solution=y)
print(("{0}\t{1}\t{2}".format(x, y, z)))
| devsim/devsim | examples/diode/ssac_diode.py | Python | apache-2.0 | 1,935 |
"""
Copyright (c) 2015 Civic Knowledge. This file is licensed under the terms of
the Revised BSD License, included in this distribution as LICENSE.txt
"""
import geoid
import geoid.acs
import geoid.census
import geoid.civick
import geoid.tiger
from rowgenerators.valuetype import (FailedValue, GeoMixin, IntDimension, FloatDimension, LabelValue, ROLE,
NoneValue, ValueType, LOM)
from rowgenerators.valuetype import StrDimension
class FailedGeoid(FailedValue):
def __str__(self):
return 'invalid'
class Geoid(StrDimension, GeoMixin):
"""General Geoid """
desc = 'General Geoid'
geoid_cls = None # Set in derived classes
geoid = None
def __new__(cls, *args, **kwargs):
import geoid
v = args[0]
if v is None or (isinstance(v, str) and v.strip() == ''):
return NoneValue
try:
if isinstance(v, geoid.core.Geoid):
_geoid = v
_geoid_str = str(v)
elif len(args) < 2: # Parse a string
_geoid = cls.geoid_cls.parse(v)
_geoid_str = v
else: # construct from individual state, county, etc, values
_geoid = cls.geoid_cls(*args, **kwargs)
_geoid_str = v
except ValueError as e:
return FailedValue(args[0], e)
o = super(Geoid, cls).__new__(cls, _geoid_str)
o.geoid = _geoid
return o
def __getattr__(self, item):
"""Allow getting attributes from the internal geoid"""
try:
return getattr(self.geoid, item)
except AttributeError:
return object.__getattribute__(self, item)
@property
def acs(self):
return self.geoid.convert(geoid.acs.AcsGeoid)
@property
def gvid(self):
return self.geoid.convert(geoid.civick.GVid)
@property
def census(self):
return self.geoid.convert(geoid.census.CensusGeoid)
@property
def tiger(self):
return self.geoid.convert(geoid.tiger.TigerGeoid)
class GeoLabel(LabelValue, GeoMixin):
role = ROLE.LABEL
vt_code = 'geo/label'
desc = 'Geographic Identifier Label'
class GeoAcs(Geoid):
role = ROLE.DIMENSION
vt_code = 'geoid'
desc = 'ACS Geoid'
geoid_cls = geoid.acs.AcsGeoid
class GeoidAcsCounty(GeoAcs):
"""An ACS Geoid for Counties """
desc = 'County ACS geoid'
geoid_cls = geoid.acs.County
class GeoidAcsTract(GeoAcs):
"""An ACS Geoid for Counties """
vt_code = 'geo/acs/tract'
desc = 'Tract ACS geoid'
geoid_cls = geoid.acs.Tract
class GeoidCensusTract(Geoid):
"""A Census Geoid for Counties """
desc = 'Census Tract geoid'
vt_code = "geoid/census/tract"
geoid_cls = geoid.census.Tract
@property
def dotted(self):
"""Return just the tract number, excluding the state and county, in the dotted format"""
v = str(self.geoid.tract).zfill(6)
return v[0:4] + '.' + v[4:]
def county(state, county):
return GeoidAcsCounty(state, county)
class GeoidTiger(Geoid):
role = ROLE.DIMENSION
vt_code = 'geo/tiger'
desc = 'Tigerline format Geoid'
geoid_cls = geoid.tiger.TigerGeoid
class GeoidTigerTract(Geoid):
role = ROLE.DIMENSION
vt_code = 'geo/tiger/tract'
desc = 'Tigerline format GeoidAcsTract'
geoid_cls = geoid.tiger.Tract
class GeoidCensus(Geoid):
role = ROLE.DIMENSION
vt_code = 'geoid/census/census'
desc = 'Census Geoid'
geoid_cls = geoid.census.CensusGeoid
@classmethod
def subclass(cls, vt_code, vt_args):
"""Return a dynamic subclass that has the extra parameters built in"""
from geoid.geoid.core import get_class
import geoid.census
parser = get_class(geoid.census, vt_args.strip('/')).parse
cls = type(vt_code.replace('/', '_'), (cls,), {'vt_code': vt_code, 'parser': parser})
globals()[cls.__name__] = cls
assert cls.parser
return cls
class GeoidGvid(Geoid):
role = ROLE.DIMENSION
vt_code = 'geo/gvid'
desc = 'CK Geoid'
geoid_cls = geoid.civick.GVid
class ZipCode(IntDimension, GeoMixin):
"""A ZIP code"""
desc = 'ZIP Code'
vt_code = 'zip'
class ZipCodePlusFour(StrDimension, GeoMixin):
"""A ZIP code"""
desc = 'ZIP Code with 4 digit extension'
vt_code = 'zipp4'
class Stusab(StrDimension, GeoMixin):
"""A 2 character state abbreviation"""
desc = 'USPS State Code'
vt_code = 'geo/usps/state'
def __new__(cls, v):
if v is None:
return NoneValue
try:
return str.__new__(cls, str(v).lower())
except Exception as e:
return FailedValue(v, e)
class Fips(IntDimension, GeoMixin):
"""A FIPS Code"""
role = ROLE.DIMENSION
desc = 'Fips Code'
vt_code = 'fips'
class FipsState(IntDimension, GeoMixin):
"""A FIPS Code"""
role = ROLE.DIMENSION
desc = 'Fips State Code'
vt_code = 'fips/state'
@property
def geoid(self):
import geoid.census
v = geoid.census.State(int(self))
if not v:
return NoneValue
else:
return v
class GeoInt(IntDimension, GeoMixin):
"""General integer Geo identifier"""
role = ROLE.DIMENSION
desc = 'General integer Geo identifier'
vt_code = 'geo/int'
class GnisValue(IntDimension, GeoMixin):
"""An ANSI geographic code"""
role = ROLE.DIMENSION
desc = 'US Geographic Names Information System Code'
vt_code = 'gnis'
class CensusValue(IntDimension, GeoMixin):
"""An geographic code defined by the census"""
role = ROLE.DIMENSION
desc = 'Census Geographic Code'
vt_code = 'geo/census'
class WellKnownTextValue(StrDimension, GeoMixin):
"""Geographic shape in Well Known Text format"""
role = ROLE.DIMENSION
desc = 'Well Known Text'
vt_code = 'wkt'
# Not sure how to make this a general object, so it is a
# single element tuple
class ShapeValue(tuple, ValueType):
_pythontype = object
desc = 'Shape object'
vt_code = 'geometry'
lom = LOM.NOMINAL
def __new__(cls, v):
if v is None or v is NoneValue or v == '':
return NoneValue
try:
return tuple.__new__(cls, [v])
except Exception as e:
return FailedValue(v, e)
@property
def shape(self):
from shapely.wkt import loads
from shapely.geometry.base import BaseGeometry
if isinstance(self[0], BaseGeometry):
return self[0]
else:
return loads(self[0])
def __str__(self):
return str(self.shape)
class DecimalDegreesValue(FloatDimension, GeoMixin):
"""An geographic code defined by the census"""
role = ROLE.DIMENSION
desc = 'Geographic coordinate in decimal degrees'
geo_value_types = {
"label/geo": GeoLabel,
"geoid": GeoAcs, # acs_geoid
"geoid/census": GeoAcs, # acs_geoid
GeoidTigerTract.vt_code: GeoidTigerTract,
GeoidCensusTract.vt_code: GeoidCensusTract,
"geoid/tract": GeoidAcsTract,
GeoidAcsTract.vt_code: GeoidAcsTract,
"geoid/county": GeoidAcsCounty, # acs_geoid
GeoidAcsCounty.vt_code: GeoidAcsCounty, # acs_geoid
"gvid": GeoidGvid,
"fips": Fips,
"fips/state": FipsState, # fips_state
"fips/county": Fips, # fips_
"geo/int": GeoInt,
"gnis": GnisValue,
"census": CensusValue, # Census specific int code, like FIPS and ANSI, but for tracts, blockgroups and blocks
"zip": ZipCode, # zip
"zipp4": ZipCodePlusFour, # zip
"zcta": ZipCode, # zip
"stusab": Stusab, # stusab
"lat": DecimalDegreesValue, # Decimal degrees
"lon": DecimalDegreesValue, # Decimal degrees
"wkt": WellKnownTextValue # WKT Geometry String
}
| CivicKnowledge/rowgenerators | rowgenerators/valuetype/geo.py | Python | mit | 7,843 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for IloPower module."""
import mock
from oslo.config import cfg
from oslo.utils import importutils
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers.modules.ilo import common as ilo_common
from ironic.drivers.modules.ilo import deploy as ilo_deploy
from ironic.drivers.modules.ilo import power as ilo_power
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
ilo_client = importutils.try_import('proliantutils.ilo.ribcl')
INFO_DICT = db_utils.get_test_ilo_info()
CONF = cfg.CONF
@mock.patch.object(ilo_common, 'ilo_client')
@mock.patch.object(ilo_power, 'ilo_client')
class IloPowerInternalMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(IloPowerInternalMethodsTestCase, self).setUp()
driver_info = INFO_DICT
mgr_utils.mock_the_extension_manager(driver="fake_ilo")
n = db_utils.get_test_node(
driver='fake_ilo',
driver_info=driver_info,
instance_uuid='instance_uuid_123')
self.node = self.dbapi.create_node(n)
CONF.set_override('power_retry', 2, 'ilo')
CONF.set_override('power_wait', 0, 'ilo')
def test__get_power_state(self, power_ilo_client_mock,
common_ilo_client_mock):
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.get_host_power_status.return_value = 'ON'
self.assertEqual(
states.POWER_ON, ilo_power._get_power_state(self.node))
ilo_mock_object.get_host_power_status.return_value = 'OFF'
self.assertEqual(
states.POWER_OFF, ilo_power._get_power_state(self.node))
ilo_mock_object.get_host_power_status.return_value = 'ERROR'
self.assertEqual(states.ERROR, ilo_power._get_power_state(self.node))
def test__get_power_state_fail(self, power_ilo_client_mock,
common_ilo_client_mock):
power_ilo_client_mock.IloError = Exception
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.get_host_power_status.side_effect = [Exception()]
self.assertRaises(exception.IloOperationError,
ilo_power._get_power_state,
self.node)
ilo_mock_object.get_host_power_status.assert_called_once_with()
def test__set_power_state_invalid_state(self, power_ilo_client_mock,
common_ilo_client_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
power_ilo_client_mock.IloError = Exception
self.assertRaises(exception.IloOperationError,
ilo_power._set_power_state,
task,
states.ERROR)
def test__set_power_state_reboot_fail(self, power_ilo_client_mock,
common_ilo_client_mock):
power_ilo_client_mock.IloError = Exception
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.reset_server.side_effect = Exception()
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.IloOperationError,
ilo_power._set_power_state,
task,
states.REBOOT)
ilo_mock_object.reset_server.assert_called_once_with()
def test__set_power_state_reboot_ok(self, power_ilo_client_mock,
common_ilo_client_mock):
power_ilo_client_mock.IloError = Exception
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.get_host_power_status.side_effect = ['ON', 'OFF', 'ON']
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
ilo_power._set_power_state(task, states.REBOOT)
ilo_mock_object.reset_server.assert_called_once_with()
def test__set_power_state_off_fail(self, power_ilo_client_mock,
common_ilo_client_mock):
power_ilo_client_mock.IloError = Exception
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.get_host_power_status.return_value = 'ON'
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.PowerStateFailure,
ilo_power._set_power_state,
task,
states.POWER_OFF)
ilo_mock_object.get_host_power_status.assert_called_with()
ilo_mock_object.hold_pwr_btn.assert_called_once_with()
def test__set_power_state_on_ok(self, power_ilo_client_mock,
common_ilo_client_mock):
power_ilo_client_mock.IloError = Exception
ilo_mock_object = common_ilo_client_mock.IloClient.return_value
ilo_mock_object.get_host_power_status.side_effect = ['OFF', 'ON']
target_state = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
ilo_power._set_power_state(task, target_state)
ilo_mock_object.get_host_power_status.assert_called_with()
ilo_mock_object.set_host_power.assert_called_once_with('ON')
@mock.patch.object(ilo_common, 'set_boot_device')
@mock.patch.object(ilo_common, 'setup_vmedia_for_boot')
def test__attach_boot_iso(self, setup_vmedia_mock, set_boot_device_mock,
power_ilo_client_mock, common_ilo_client_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.instance_info['ilo_boot_iso'] = 'boot-iso'
ilo_power._attach_boot_iso(task)
setup_vmedia_mock.assert_called_once_with(task, 'boot-iso')
set_boot_device_mock.assert_called_once_with(task.node, 'CDROM')
class IloPowerTestCase(db_base.DbTestCase):
def setUp(self):
super(IloPowerTestCase, self).setUp()
driver_info = INFO_DICT
mgr_utils.mock_the_extension_manager(driver="fake_ilo")
self.node = obj_utils.create_test_node(self.context,
driver='fake_ilo',
driver_info=driver_info)
def test_get_properties(self):
expected = ilo_common.COMMON_PROPERTIES
expected.update(ilo_deploy.COMMON_PROPERTIES)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(ilo_common, 'parse_driver_info')
def test_validate(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.power.validate(task)
mock_drvinfo.assert_called_once_with(task.node)
@mock.patch.object(ilo_common, 'parse_driver_info')
def test_validate_fail(self, mock_drvinfo):
side_effect = exception.InvalidParameterValue("Invalid Input")
mock_drvinfo.side_effect = side_effect
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate,
task)
@mock.patch.object(ilo_power, '_get_power_state')
def test_get_power_state(self, mock_get_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_get_power.return_value = states.POWER_ON
self.assertEqual(states.POWER_ON,
task.driver.power.get_power_state(task))
mock_get_power.assert_called_once_with(task.node)
@mock.patch.object(ilo_power, '_set_power_state')
def test_set_power_state(self, mock_set_power):
mock_set_power.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON)
mock_set_power.assert_called_once_with(task, states.POWER_ON)
@mock.patch.object(ilo_power, '_set_power_state')
@mock.patch.object(ilo_power, '_get_power_state')
def test_reboot(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_ON
mock_set_power.return_value = states.POWER_ON
task.driver.power.reboot(task)
mock_get_power.assert_called_once_with(task.node)
mock_set_power.assert_called_once_with(task, states.REBOOT)
| froyobin/ironic | ironic/tests/drivers/ilo/test_power.py | Python | apache-2.0 | 10,139 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
""" sysdiag
Pierre Haessig — September 2013
"""
from __future__ import division, print_function
def _create_name(name_list, base):
'''Returns a name (str) built on `base` that doesn't exist in `name_list`.
Useful for automatic creation of subsystems or wires
'''
base = str(base).strip()
if base == '':
# avoid having '' as name (although it would not break the code...)
raise ValueError('base name should not be empty!')
if base not in name_list:
return base
# Else: build another name by counting
i = 0
name = base + str(i)
while name in name_list:
i += 1
name = base + str(i)
return name
class System(object):
'''Diagram description of a system
a System is either an interconnecion of subsystems
or an atomic element (a leaf of the tree)
'''
def __init__(self, name='root', parent=None):
self.name = name
# Parent system, if any (None for top-level):
self.parent = None
# Children systems, if any (None for leaf-level):
self.subsystems = []
self.wires = []
self.ports = []
self.params = {}
# If a parent system is provided, request its addition as a subsystem
if parent is not None:
parent.add_subsystem(self)
#end __init__()
def is_empty(self):
'''True if the System contains no subsystems and no wires'''
return (not self.subsystems) and (not self.wires)
@property
def ports_dict(self):
'''dict of ports, which keys are the names of the ports'''
return {p.name:p for p in self.ports}
@property
def subsystems_dict(self):
'''dict of subsystems, which keys are the names of the systems'''
return {s.name:s for s in self.subsystems}
def add_port(self, port, created_by_system = False):
'''add a Port to the System'''
if port in self.ports:
raise ValueError('port already added!')
# extract the port's name
name = port.name
port_names = [p.name for p in self.ports]
if name in port_names:
raise ValueError("port name '{}' already exists in {:s}!".format(
name, repr(self))
)
# Add parent relationship and add to the ports dict:
port.system = self
port._created_by_system = bool(created_by_system)
self.ports.append(port)
def del_port(self, port):
'''delete a Port of the System (and disconnect any connected wire)
'''
if (port.wire is not None) or (port.internal_wire is not None):
# TODO : implement the wire disconnection
raise NotImplementedError('Cannot yet delete a connected Port')
# Remove the ports list:
self.ports.remove(port)
def add_subsystem(self, subsys):
# 1) Check name uniqueness
name = subsys.name
subsys_names = [s.name for s in self.subsystems]
if name in subsys_names:
raise ValueError("system name '{}' already exists in {:s}!".format(
name, repr(self))
)
# 2) Add parent relationship and add to the system list
subsys.parent = self
self.subsystems.append(subsys)
def add_wire(self, wire):
# 1) Check name uniqueness
name = wire.name
wire_names = [w.name for w in self.wires]
if name in wire_names:
raise ValueError("wire name '{}' already exists in {:s}!".format(
name, repr(self))
)
# Add parent relationship and add to the ports dict:
wire.parent = self
self.wires.append(wire)
def create_name(self, category, base):
'''Returns a name (str) built on `base` that doesn't exist in
within the names of `category`.
'''
if category == 'subsystem':
components = self.subsystems
elif category == 'wire':
components = self.wires
else:
raise ValueError("Unknown category '{}'!".format(str(category)))
name_list = [c.name for c in components]
return _create_name(name_list, base)
def __repr__(self):
cls_name = self.__class__.__name__
s = "{:s}('{.name}')".format(cls_name, self)
return s
def __str__(self):
s = repr(self)
if self.parent:
s += '\n Parent: {:s}'.format(repr(self.parent))
if self.params:
s += '\n Parameters: {:s}'.format(str(self.params))
if self.ports:
s += '\n Ports: {:s}'.format(str(self.ports))
if self.subsystems:
s += '\n Subsytems: {:s}'.format(str(self.subsystems))
return s
def __eq__(self, other):
'''Systems compare equal if their class, `name` and `params` are equal.
and also their lists of ports and wires are *similar*
(see `_is_similar` methods of Port and Wire)
and finally their subsystems recursively compare equal.
parent systems are not compared (would generate infinite recursion).
'''
if not isinstance(other, System):
return NotImplemented
# Basic similarity
basic_sim = self.__class__ == other.__class__ and \
self.name == other.name and \
self.params == other.params
if not basic_sim:
return False
# Port similarity: (sensitive to the order)
ports_sim = all(p1._is_similar(p2) for (p1,p2)
in zip(self.ports, other.ports))
if not ports_sim:
return False
# Wires similarity
wires_sim = all(w1._is_similar(w2) for (w1,w2)
in zip(self.wires, other.wires))
if not wires_sim:
return False
print('equality at level {} is true'.format(self.name))
# Since everything matches, compare subsystems:
return self.subsystems == other.subsystems
# end __eq__()
def __ne__(self,other):
return not (self==other)
def _to_json(self):
'''convert the System instance to a JSON-serializable object
System is serialized with list of ports, subsystems and wires
but without connectivity information (e.g. no parent information)
ports created at the initialization of the system ("default ports")
are not serialized.
'''
# Filter out ports created at the initialization of the system
ports_list = [p for p in self.ports if not p._created_by_system]
cls_name = self.__module__ +'.'+ self.__class__.__name__
return {'__sysdiagclass__': 'System',
'__class__': cls_name,
'name':self.name,
'subsystems':self.subsystems,
'wires':self.wires,
'ports':ports_list,
'params':self.params
}
# end _to_json
def json_dump(self, output=None, indent=2, sort_keys=True):
'''dump (e.g. save) the System structure in json format
if `output` is None: return a json string
if `output` is a writable file: write in this file
'''
import json
if output is None:
return json.dumps(self, default=to_json, indent=indent, sort_keys=sort_keys)
else:
json.dump(self, output, default=to_json, indent=indent, sort_keys=sort_keys)
return
# end json_dump
class Port(object):
'''Port enables the connection of a System to a Wire
Each port has a `type` which only allows the connection of a Wire
of the same type.
it also have a `direction` ('none', 'in', 'out') that is set
at the class level
private attribute `_created_by_system` tells whether the port was created
automatically by the system's class at initialization or by a custom code
(if True, the port is not serialized by its system).
'''
direction = 'none'
def __init__(self, name, ptype):
self.name = name
self.type = ptype
self.system = None
self.wire = None
self.internal_wire = None
self._created_by_system = False
def __repr__(self):
cls_name = self.__class__.__name__
s = '{:s}({:s}, {:s})'.format(cls_name, repr(self.name), repr(self.type))
return s
def __str__(self):
s = repr(self) + ' of ' + repr(self.system)
return s
def _is_similar(self, other):
'''Ports are *similar* if their class, `type` and `name` are equal.
(their parent system are not compared)
'''
if not isinstance(other, Port):
return NotImplemented
return self.__class__ == other.__class__ and \
self.type == other.type and \
self.name == other.name
def _to_json(self):
'''convert the Port instance to a JSON-serializable object
Ports are serialized without any connectivity information
'''
cls_name = self.__module__ +'.'+ self.__class__.__name__
return {'__sysdiagclass__': 'Port',
'__class__': cls_name,
'name':self.name,
'type':self.type
}
# end _to_json
class InputPort(Port):
'''Input Port'''
direction = 'in'
def __init__(self, name, ptype=''):
super(InputPort, self).__init__(name, ptype)
class OutputPort(Port):
'''Output Port'''
direction = 'out'
def __init__(self, name, ptype=''):
super(OutputPort, self).__init__(name, ptype)
class Wire(object):
'''Wire enables the interconnection of several Systems
through their Ports'''
def __init__(self, name, wtype, parent=None):
self.name = name
self.parent = None
self.type = wtype
self.ports = []
# If a parent system is provided, request its addition as a wire
if parent is not None:
parent.add_wire(self)
def is_connect_allowed(self, port, port_level, raise_error=False):
'''Check that a connection between Wire ̀ self` and a Port `port` is allowed.
Parameters
----------
`port`: the Port instance to connect to
`port_level`: whether `port` belongs to a 'sibling' (usual case) or a
'parent' system (to enable connections to the upper level)
`raise_error`: if True, raising an error replaces returning False
Returns
-------
allowed: True or False
'''
assert port_level in ['sibling', 'parent']
# Port availability (is there already a wire connected?):
if port_level == 'sibling':
connected_wire = port.wire
elif port_level == 'parent':
connected_wire = port.internal_wire
if connected_wire is not None:
if raise_error:
raise ValueError('port is already connected to '+\
'{:s}!'.format(repr(connected_wire)))
else:
return False
# Check parent relationship:
if port_level == 'sibling':
# Check that the wire and port.system are siblings:
if self.parent is not port.system.parent:
if raise_error:
raise ValueError('wire and port.system should have a common parent!')
else:
return False
elif port_level == 'parent':
# Check that the port.system is the parent of the wire:
if self.parent is not port.system:
if raise_error:
raise ValueError('port.system should be the parent of the wire!')
else:
return False
# Wire-Port Type checking:
if self.type == '':
# untyped wire: connection is always possible
return True
elif port.type == self.type:
return True
else:
# Incompatible types
if raise_error:
raise TypeError("Wire type '{:s}'".format(str(self.type)) + \
" and Port type '{:s}'".format(str(port.type)) + \
" are not compatible!")
else:
return False
def connect_port(self, port, port_level='sibling'):
'''Connect the Wire to a Port `port`'''
if port in self.ports:
return # Port is aleady connected
# Type checking:
self.is_connect_allowed(port, port_level, raise_error=True)
# Add parent relationship:
assert port_level in ['sibling', 'parent']
if port_level=='sibling':
port.wire = self
elif port_level == 'parent':
port.internal_wire = self
# Book keeping of ports:
self.ports.append(port)
@property
def ports_by_name(self):
'''triplet representation of port connections
(level, port.system.name, port.name)
(used for serialization)
'''
def port_triplet(p):
'''triplet representation (level, port.system.name, port.name)'''
if p.system is self.parent:
level = 'parent'
elif p.system.parent is self.parent:
level = 'sibling'
else:
raise ValueError('The system of Port {}'.format(repr(p)) +\
'is neither a parent nor a sibling!')
return (level, p.system.name, p.name)
return [port_triplet(p) for p in self.ports]
def connect_by_name(self, s_name, p_name, level='sibling'):
'''Connects the ports named `p_name` of system named `s_name`
to be found at level `level` ('parent' or 'sibling' (default))
'''
# TODO (?) merge the notion of level in the name (make parent a reserved name)
assert level in ['sibling', 'parent']
# 1) find the system:
if level == 'parent':
syst = self.parent
assert self.parent.name == s_name
elif level == 'sibling':
syst = self.parent.subsystems_dict[s_name]
port = syst.ports_dict[p_name]
self.connect_port(port, level)
def __repr__(self):
cls_name = self.__class__.__name__
s = '{:s}({:s}, {:s})'.format(cls_name, repr(self.name), repr(self.type))
return s
def _is_similar(self, other):
'''Wires are *similar* if their class, `type` and `name` are equal
and if their connectivity (`ports_by_name`) is the same
(their parent system are not compared)
'''
if not isinstance(other, Wire):
return NotImplemented
return self.__class__ == other.__class__ and \
self.type == other.type and \
self.name == other.name and \
self.ports_by_name == other.ports_by_name
def _to_json(self):
'''convert the Wire instance to a JSON-serializable object
Wires are serialized with the port connectivity in tuples
(but parent relationship is not serialized)
'''
cls_name = self.__module__ +'.'+ self.__class__.__name__
return {'__sysdiagclass__': 'Wire',
'__class__': cls_name,
'name': self.name,
'type': self.type,
'ports': self.ports_by_name
}
# end _to_json
class SignalWire(Wire):
'''Signal Wire for the interconnection of several Systems
through their Input and Output Ports.
Each SignalWire can be connected to a unique Output Port (signal source)
and several Input Ports (signal sinks)
'''
def __init__(self, name, wtype='', parent=None):
super(SignalWire, self).__init__(name, wtype, parent)
def is_connect_allowed(self, port, port_level, raise_error=False):
'''Check that a connection between SignalWire ̀ self` and a Port `port`
is allowed.
Parameters
----------
`port`: the Port instance to connect to
`port_level`: whether `port` belongs to a 'sibling' (usual case) or a
'parent' system (to enable connections to the upper level)
`raise_error`: if True, raising an error replaces returning False
Returns
-------
allowed: True or False
'''
if port.direction not in ['in', 'out']:
if raise_error:
raise TypeError('Only Input/Output Port can be connected!')
else:
return False
def is_output(port, level):
'''an output port is either:
* a sibling system'port with direction == 'out' or
* a parent system'port with direction == 'in'
'''
if level=='detect':
wire = self
if wire.parent == port.system:
level = 'parent'
elif wire.parent == port.system.parent:
level = 'sibling'
else:
raise ValueError('Port is neither sibling nor parent')
is_out = (level=='sibling' and port.direction == 'out') or \
(level=='parent' and port.direction == 'in')
return is_out
# Now we have an I/O Port for sure:
if is_output(port, port_level):
# check that there is not already a signal source
other_ports = [p for p in self.ports if (is_output(p, 'detect')
and p is not port)]
if other_ports:
if raise_error:
raise ValueError('Only one output port can be connected!')
else:
return False
# Now the I/O aspect is fine. Launch some further checks:
return super(SignalWire, self).is_connect_allowed(port, port_level, raise_error)
def connect_systems(source, dest, s_pname, d_pname, wire_cls=Wire):
'''Connect systems `source` to `dest` using
port names `s_pname` and `d_pname`
with a wire of instance `wire_cls` (defaults to Wire)
The wire is created if necessary
Returns: the wire used for the connection
'''
# 1) find the ports
s_port = source.ports_dict[s_pname]
d_port = dest.ports_dict[d_pname]
# 2) find a prexisting wire:
w = None
if s_port.wire is not None:
w = s_port.wire
elif d_port.wire is not None:
w = d_port.wire
else:
parent = s_port.system.parent
wname = parent.create_name('wire','W')
wtype = s_port.type
w = wire_cls(wname, wtype, parent)
# 3) Make the connection:
w.connect_port(s_port)
w.connect_port(d_port)
return w
def to_json(py_obj):
'''convert `py_obj` to JSON-serializable objects
`py_obj` should be an instance of `System`, `Wire` or `Port`
'''
if isinstance(py_obj, System):
return py_obj._to_json()
if isinstance(py_obj, Wire):
return py_obj._to_json()
if isinstance(py_obj, Port):
return py_obj._to_json()
raise TypeError(repr(py_obj) + ' is not JSON serializable')
# end to_json
import sys
def _str_to_class(mod_class):
'''retreives the class from a "module.class" string'''
mod_name, cls_name = mod_class.split('.')
mod = sys.modules[mod_name]
return getattr(mod, cls_name)
def from_json(json_object):
'''deserializes a sysdiag json object'''
if '__sysdiagclass__' in json_object:
cls = _str_to_class(json_object['__class__'])
if json_object['__sysdiagclass__'] == 'Port':
port = cls(name = json_object['name'], ptype = json_object['type'])
return port
if json_object['__sysdiagclass__'] == 'System':
# TODO: specialize the instanciation for each class using
# _from_json class methods
syst = cls(name = json_object['name'])
syst.params = json_object['params']
# add ports if any:
for p in json_object['ports']:
syst.add_port(p)
# add subsystems
for s in json_object['subsystems']:
syst.add_subsystem(s)
# add wires
for w_dict in json_object['wires']:
# 1) decode the wire:
w_cls = _str_to_class(w_dict['__class__'])
w = w_cls(name = w_dict['name'], wtype = w_dict['type'])
syst.add_wire(w)
# make the connections:
for level, s_name, p_name in w_dict['ports']:
w.connect_by_name(s_name, p_name, level)
# end for each wire
return syst
return json_object
def json_load(json_dump):
import json
syst = json.loads(json_dump, object_hook=from_json)
return syst
| pierre-haessig/sysdiag | sysdiag.py | Python | mit | 21,428 |
#!/usr/bin/env python
# encoding: utf-8
from celery.task import task
from colorama import Fore
from utils.constants import CP_NAME
from utils.constants import AGENCY
from utils.constants import ROUTE
from utils.constants import STOP
from utils.constants import TRIP
from utils.constants import STOPTIME
from utils.constants import ID
from utils.errors import APIKeyError
from utils.errors import CrawlerError
from utils.errors import OSTError
from utils.errors import FiWareError
from utils.utils import get_error_message
from crawler import Crawler
from importer import FiWare
@task(name='transfer_gtfs_cb', ignore_result=True)
def transfer_gtfs_cb(agency_name=None):
"""
Fetches CP data from OST APIs and puts it on ContextBroker
Uses the Crawler to fetch data and FiWare to import it.
# 1st) Agency == CP
# 2nd) CP Routes
# 3rd) CP Stops
# 4th) CP Trips
# 5th) CP StopTimes
"""
try:
crawler = Crawler()
fiware = FiWare()
if agency_name is None:
agency_name = CP_NAME
print '> Inserting Agency... ',
agency = crawler.get_agency(agency_name)
agency_id = agency.get(ID)
fiware.insert_data(agency, content_type=AGENCY)
print 'Done.'
# ROUTES
print '> Inserting Routes... ',
routes = crawler.get_data_by_agency(agency_id, content_type=ROUTE)
fiware.insert_data(routes, content_type=ROUTE)
routes_cb = fiware.get_data(content_type=ROUTE)['contextResponses']
print 'Done:', len(routes_cb)
# STOPS
print '> Inserting Stops... ',
stops = crawler.get_data_by_agency(agency_id, content_type=STOP)
fiware.insert_data(stops, content_type=STOP)
stops_cb = fiware.get_data(content_type=STOP)['contextResponses']
print 'Done:', len(stops_cb)
# TRIPS
route_ids = fiware.get_ids(fiware.get_data(content_type=ROUTE))
print '> Inserting Trips... ',
trips = crawler.get_data_from_routes(route_ids, content_type=TRIP)
fiware.insert_data(trips, content_type=TRIP)
trips_cb = fiware.get_data(content_type=TRIP)['contextResponses']
print 'Done:', len(trips_cb)
# STOPTIMES
print '> Inserting StopTimes...',
times = crawler.get_data_from_routes(route_ids, content_type=STOPTIME)
fiware.insert_data(times, content_type=STOPTIME)
times_cb = fiware.get_data(content_type=STOPTIME)['contextResponses']
print 'Done:', len(times_cb)
except (APIKeyError, CrawlerError, OSTError, FiWareError) as error:
message = get_error_message(error)
print(Fore.RED + str(error) + Fore.RESET + ':' + message)
if __name__ == '__main__':
transfer_gtfs_cb()
| OneStopTransport/OST-FiWare-Lisbon | fiware_lisbon/fiware/tasks.py | Python | mit | 2,779 |
# Copyright 2016 United States Government as represented by the Administrator
# of the National Aeronautics and Space Administration. All Rights Reserved.
#
# Portion of this code is Copyright Geoscience Australia, Licensed under the
# Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License
# at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# The CEOS 2 platform is licensed under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import gdal, osr
import collections
import gc
import numpy as np
import xarray as xr
from datetime import datetime
import collections
from collections import OrderedDict
import datacube
from . import dc_utilities as utilities
# Author: KMF
# Creation date: 2016-06-14
# Modified by: AHDS
# Last modified date:
def create_mosaic_iterative(dataset_in, clean_mask=None, no_data=-9999, intermediate_product=None):
"""
Description:
Creates a most recent - oldest mosaic of the input dataset. If no clean mask is given,
the 'cf_mask' variable must be included in the input dataset, as it will be used
to create a clean mask
-----
Inputs:
dataset_in (xarray.Dataset) - dataset retrieved from the Data Cube; should contain
coordinates: time, latitude, longitude
variables: variables to be mosaicked
If user does not provide a clean_mask, dataset_in must also include the cf_mask
variable
Optional Inputs:
clean_mask (nd numpy array with dtype boolean) - true for values user considers clean;
if user does not provide a clean mask, one will be created using cfmask
no_data (int/float) - no data pixel value; default: -9999
Output:
dataset_out (xarray.Dataset) - mosaicked data with
coordinates: latitude, longitude
variables: same as dataset_in
"""
# Create clean_mask from cfmask if none given
if clean_mask is None:
cfmask = dataset_in.cf_mask
clean_mask = utilities.create_cfmask_clean_mask(cfmask)
dataset_in = dataset_in.drop('cf_mask')
#masks data with clean_mask. all values that are clean_mask==False are set to nodata.
for key in list(dataset_in.data_vars):
dataset_in[key].values[np.invert(clean_mask)] = no_data
if intermediate_product is not None:
dataset_out = intermediate_product.copy(deep=True)
else:
dataset_out = None
for index in reversed(range(len(clean_mask))):
dataset_slice = dataset_in.isel(time=index).astype("int16").drop('time')
if dataset_out is None:
dataset_out = dataset_slice.copy(deep=True)
#clear out the params as they can't be written to nc.
dataset_out.attrs = OrderedDict()
else:
for key in list(dataset_in.data_vars):
dataset_out[key].values[dataset_out[key].values==-9999] = dataset_slice[key].values[dataset_out[key].values==-9999]
return dataset_out
def create_median_mosaic(dataset_in, clean_mask=None, no_data=-9999, intermediate_product=None):
"""
Description:
Method for calculating the median pixel value for a given dataset.
-----
Input:
dataset_in (xarray dataset) - the set of data with clouds and no data removed.
Optional Inputs:
no_data (int/float) - no data value.
"""
# Create clean_mask from cfmask if none given
if clean_mask is None:
cfmask = dataset_in.cf_mask
clean_mask = utilities.create_cfmask_clean_mask(cfmask)
dataset_in = dataset_in.drop('cf_mask')
#required for np.nan
dataset_in = dataset_in.astype("float64")
for key in list(dataset_in.data_vars):
dataset_in[key].values[np.invert(clean_mask)] = no_data
dataset_out = dataset_in.isel(time=0).drop('time').copy(deep=True)
dataset_out.attrs = OrderedDict()
# Loop over every key.
for key in list(dataset_in.data_vars):
dataset_in[key].values[dataset_in[key].values==no_data] = np.nan
dataset_out[key].values = np.nanmedian(dataset_in[key].values, axis=0)
dataset_out[key].values[dataset_out[key].values==np.nan] = no_data
return dataset_out.astype('int16')
def create_max_ndvi_mosaic(dataset_in, clean_mask=None, no_data=-9999, intermediate_product=None):
"""
Description:
Method for calculating the pixel value for the max ndvi value.
-----
Input:
dataset_in (xarray dataset) - the set of data with clouds and no data removed.
Optional Inputs:
no_data (int/float) - no data value.
"""
# Create clean_mask from cfmask if none given
if clean_mask is None:
cfmask = dataset_in.cf_mask
clean_mask = utilities.create_cfmask_clean_mask(cfmask)
dataset_in = dataset_in.drop('cf_mask')
for key in list(dataset_in.data_vars):
dataset_in[key].values[np.invert(clean_mask)] = no_data
if intermediate_product is not None:
dataset_out = intermediate_product.copy(deep=True)
else:
dataset_out = None
for timeslice in range(clean_mask.shape[0]):
dataset_slice = dataset_in.isel(time=timeslice).astype("float64").drop('time')
ndvi = (dataset_slice.nir - dataset_slice.red) / (dataset_slice.nir + dataset_slice.red)
ndvi.values[np.invert(clean_mask)[timeslice,::]] = -1000000000
dataset_slice['ndvi'] = ndvi
if dataset_out is None:
dataset_out = dataset_slice.copy(deep=True)
#clear out the params as they can't be written to nc.
dataset_out.attrs = OrderedDict()
else:
for key in list(dataset_slice.data_vars):
dataset_out[key].values[dataset_slice.ndvi.values > dataset_out.ndvi.values] = dataset_slice[key].values[dataset_slice.ndvi.values > dataset_out.ndvi.values]
return dataset_out
def create_min_ndvi_mosaic(dataset_in, clean_mask=None, no_data=-9999, intermediate_product=None):
"""
Description:
Method for calculating the pixel value for the min ndvi value.
-----
Input:
dataset_in (xarray dataset) - the set of data with clouds and no data removed.
Optional Inputs:
no_data (int/float) - no data value.
"""
# Create clean_mask from cfmask if none given
if clean_mask is None:
cfmask = dataset_in.cf_mask
clean_mask = utilities.create_cfmask_clean_mask(cfmask)
dataset_in = dataset_in.drop('cf_mask')
for key in list(dataset_in.data_vars):
dataset_in[key].values[np.invert(clean_mask)] = no_data
if intermediate_product is not None:
dataset_out = intermediate_product.copy(deep=True)
else:
dataset_out = None
for timeslice in range(clean_mask.shape[0]):
dataset_slice = dataset_in.isel(time=timeslice).astype("float64").drop('time')
ndvi = (dataset_slice.nir - dataset_slice.red) / (dataset_slice.nir + dataset_slice.red)
ndvi.values[np.invert(clean_mask)[timeslice,::]] = 1000000000
dataset_slice['ndvi'] = ndvi
if dataset_out is None:
dataset_out = dataset_slice.copy(deep=True)
#clear out the params as they can't be written to nc.
dataset_out.attrs = OrderedDict()
else:
for key in list(dataset_slice.data_vars):
dataset_out[key].values[dataset_slice.ndvi.values < dataset_out.ndvi.values] = dataset_slice[key].values[dataset_slice.ndvi.values < dataset_out.ndvi.values]
return dataset_out
| ceos-seo/Data_Cube_v2 | ui/django_site_v2/data_cube_ui/utils/dc_mosaic.py | Python | apache-2.0 | 7,933 |
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from collections import defaultdict
#***************************************************************
class Tarjan:
"""
Computes Tarjan's algorithm for finding strongly connected components (cycles) of a graph
Attributes:
edges: dictionary of edges such that edges[dep] = head
vertices: set of dependents
SCCs: list of sets of strongly connected components. Non-singleton sets are cycles.
"""
#=============================================================
def __init__(self, prediction, tokens):
"""
Inputs:
prediction: a predicted dependency tree where
prediction[dep_idx] = head_idx
tokens: the tokens we care about (i.e. exclude _GO, _EOS, and _PAD)
"""
self._edges = defaultdict(set)
self._vertices = set((0,))
for dep, head in enumerate(prediction[tokens]):
self._vertices.add(dep+1)
self._edges[head].add(dep+1)
self._indices = {}
self._lowlinks = {}
self._onstack = defaultdict(lambda: False)
self._SCCs = []
index = 0
stack = []
for v in self.vertices:
if v not in self.indices:
self.strongconnect(v, index, stack)
#=============================================================
def strongconnect(self, v, index, stack):
""""""
self._indices[v] = index
self._lowlinks[v] = index
index += 1
stack.append(v)
self._onstack[v] = True
for w in self.edges[v]:
if w not in self.indices:
self.strongconnect(w, index, stack)
self._lowlinks[v] = min(self._lowlinks[v], self._lowlinks[w])
elif self._onstack[w]:
self._lowlinks[v] = min(self._lowlinks[v], self._indices[w])
if self._lowlinks[v] == self._indices[v]:
self._SCCs.append(set())
while stack[-1] != v:
w = stack.pop()
self._onstack[w] = False
self._SCCs[-1].add(w)
w = stack.pop()
self._onstack[w] = False
self._SCCs[-1].add(w)
return
#======================
@property
def edges(self):
return self._edges
@property
def vertices(self):
return self._vertices
@property
def indices(self):
return self._indices
@property
def SCCs(self):
return self._SCCs | Northeaster/TargetSentimentAnalysis | lib/etc/tarjan.py | Python | apache-2.0 | 2,339 |
# Copyright 2020 Google
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# --- This file has been autogenerated --- #
# --- from docs/Readout-Data-Collection.ipynb --- #
# --- Do not edit this file directly --- #
import os
import numpy as np
import sympy
import cirq
import recirq
@recirq.json_serializable_dataclass(namespace='recirq.readout_scan',
registry=recirq.Registry,
frozen=True)
class ReadoutScanTask:
"""Scan over Ry(theta) angles from -pi/2 to 3pi/2 tracing out a sinusoid
which is primarily affected by readout error.
See Also:
:py:func:`run_readout_scan`
Attributes:
dataset_id: A unique identifier for this dataset.
device_name: The device to run on, by name.
n_shots: The number of repetitions for each theta value.
qubit: The qubit to benchmark.
resolution_factor: We select the number of points in the linspace
so that the special points: (-1/2, 0, 1/2, 1, 3/2) * pi are
always included. The total number of theta evaluations
is resolution_factor * 4 + 1.
"""
dataset_id: str
device_name: str
n_shots: int
qubit: cirq.GridQubit
resolution_factor: int
@property
def fn(self):
n_shots = _abbrev_n_shots(n_shots=self.n_shots)
qubit = _abbrev_grid_qubit(self.qubit)
return (f'{self.dataset_id}/'
f'{self.device_name}/'
f'q-{qubit}/'
f'ry_scan_{self.resolution_factor}_{n_shots}')
# Define the following helper functions to make nicer `fn` keys
# for the tasks:
def _abbrev_n_shots(n_shots: int) -> str:
"""Shorter n_shots component of a filename"""
if n_shots % 1000 == 0:
return f'{n_shots // 1000}k'
return str(n_shots)
def _abbrev_grid_qubit(qubit: cirq.GridQubit) -> str:
"""Formatted grid_qubit component of a filename"""
return f'{qubit.row}_{qubit.col}'
EXPERIMENT_NAME = 'readout-scan'
DEFAULT_BASE_DIR = os.path.expanduser(f'~/cirq-results/{EXPERIMENT_NAME}')
def run_readout_scan(task: ReadoutScanTask,
base_dir=None):
"""Execute a :py:class:`ReadoutScanTask` task."""
if base_dir is None:
base_dir = DEFAULT_BASE_DIR
if recirq.exists(task, base_dir=base_dir):
print(f"{task} already exists. Skipping.")
return
# Create a simple circuit
theta = sympy.Symbol('theta')
circuit = cirq.Circuit([
cirq.ry(theta).on(task.qubit),
cirq.measure(task.qubit, key='z')
])
# Use utilities to map sampler names to Sampler objects
sampler = recirq.get_sampler_by_name(device_name=task.device_name)
# Use a sweep over theta values.
# Set up limits so we include (-1/2, 0, 1/2, 1, 3/2) * pi
# The total number of points is resolution_factor * 4 + 1
n_special_points: int = 5
resolution_factor = task.resolution_factor
theta_sweep = cirq.Linspace(theta, -np.pi / 2, 3 * np.pi / 2,
resolution_factor * (n_special_points - 1) + 1)
thetas = np.asarray([v for ((k, v),) in theta_sweep.param_tuples()])
flat_circuit, flat_sweep = cirq.flatten_with_sweep(circuit, theta_sweep)
# Run the jobs
print(f"Collecting data for {task.qubit}", flush=True)
results = sampler.run_sweep(program=flat_circuit, params=flat_sweep,
repetitions=task.n_shots)
# Save the results
recirq.save(task=task, data={
'thetas': thetas,
'all_bitstrings': [
recirq.BitArray(np.asarray(r.measurements['z']))
for r in results]
}, base_dir=base_dir)
| quantumlib/ReCirq | recirq/readout_scan/tasks.py | Python | apache-2.0 | 4,314 |
import click
from . import logging_helper, util
from .chrome_store import commands as chrome_commands
from .firefox_store import commands as firefox_commands
from .script_parser.parser import Parser
logger = logging_helper.get_logger(__file__)
@click.group()
@click.option('-v', '--verbose', count=True,
help="Much verbosity. May be repeated multiple times. More v's, more info!")
def main(verbose):
logging_helper.set_level(30 - verbose * 10)
logger.info("Logging into file: {}".format(logging_helper.log_file))
logger.debug("Using temporary directory: {}".format(util.build_dir))
@main.command('script')
@click.argument('file', required=True)
def script(file):
logger.info("Executing script {}".format(file))
p = Parser(script_fn=file)
p.execute()
main.add_command(chrome_commands.chrome)
main.add_command(firefox_commands.firefox)
# For any other platforms, add their commands here.
if __name__ == '__main__':
main()
| melkamar/webstore-manager | webstore_manager/manager.py | Python | mit | 972 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
演示Wing IDE 101的goto define功能。使用Wing IDE 101或Spyder打开本程序之后,
按住Ctrl的同时点击signal, pl, HasTraits等跳转到定义它们的程序。
"""
from scipy import signal
import pylab as pl
from enthought.traits.api import HasTraits, Instance
from enthought.traits.ui.api import View, Item
signal.lfilter
pl.plot
pl.title | Akagi201/akcode | python/gotodefine.py | Python | gpl-2.0 | 419 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014-present Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import uuid
from django.urls import reverse
from taiga.base.utils import json
from taiga.projects import choices as project_choices
from taiga.projects.models import Project
from taiga.projects.epics.serializers import EpicSerializer
from taiga.projects.epics.models import Epic
from taiga.projects.epics.utils import attach_extra_info as attach_epic_extra_info
from taiga.projects.utils import attach_extra_info as attach_project_extra_info
from taiga.permissions.choices import MEMBERS_PERMISSIONS, ANON_PERMISSIONS
from taiga.projects.occ import OCCResourceMixin
from tests import factories as f
from tests.utils import helper_test_http_method, reconnect_signals
from taiga.projects.votes.services import add_vote
from taiga.projects.notifications.services import add_watcher
from unittest import mock
import pytest
pytestmark = pytest.mark.django_db
def setup_function(function):
reconnect_signals()
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.project_member_with_perms = f.UserFactory.create()
m.project_member_without_perms = f.UserFactory.create()
m.project_owner = f.UserFactory.create()
m.other_user = f.UserFactory.create()
m.public_project = f.ProjectFactory(is_private=False,
anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)) + ["comment_epic"],
owner=m.project_owner,
epics_csv_uuid=uuid.uuid4().hex)
m.public_project = attach_project_extra_info(Project.objects.all()).get(id=m.public_project.id)
m.private_project1 = f.ProjectFactory(is_private=True,
anon_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
public_permissions=list(map(lambda x: x[0], ANON_PERMISSIONS)),
owner=m.project_owner,
epics_csv_uuid=uuid.uuid4().hex)
m.private_project1 = attach_project_extra_info(Project.objects.all()).get(id=m.private_project1.id)
m.private_project2 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
epics_csv_uuid=uuid.uuid4().hex)
m.private_project2 = attach_project_extra_info(Project.objects.all()).get(id=m.private_project2.id)
m.blocked_project = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
epics_csv_uuid=uuid.uuid4().hex,
blocked_code=project_choices.BLOCKED_BY_STAFF)
m.blocked_project = attach_project_extra_info(Project.objects.all()).get(id=m.blocked_project.id)
m.public_membership = f.MembershipFactory(
project=m.public_project,
user=m.project_member_with_perms,
role__project=m.public_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
m.private_membership1 = f.MembershipFactory(
project=m.private_project1,
user=m.project_member_with_perms,
role__project=m.private_project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(
project=m.private_project1,
user=m.project_member_without_perms,
role__project=m.private_project1,
role__permissions=[])
m.private_membership2 = f.MembershipFactory(
project=m.private_project2,
user=m.project_member_with_perms,
role__project=m.private_project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(
project=m.private_project2,
user=m.project_member_without_perms,
role__project=m.private_project2,
role__permissions=[])
m.blocked_membership = f.MembershipFactory(
project=m.blocked_project,
user=m.project_member_with_perms,
role__project=m.blocked_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.blocked_project,
user=m.project_member_without_perms,
role__project=m.blocked_project,
role__permissions=[])
f.MembershipFactory(project=m.public_project,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project1,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project2,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.blocked_project,
user=m.project_owner,
is_admin=True)
m.public_epic = f.EpicFactory(project=m.public_project,
status__project=m.public_project)
m.public_epic = attach_epic_extra_info(Epic.objects.all()).get(id=m.public_epic.id)
m.private_epic1 = f.EpicFactory(project=m.private_project1,
status__project=m.private_project1)
m.private_epic1 = attach_epic_extra_info(Epic.objects.all()).get(id=m.private_epic1.id)
m.private_epic2 = f.EpicFactory(project=m.private_project2,
status__project=m.private_project2)
m.private_epic2 = attach_epic_extra_info(Epic.objects.all()).get(id=m.private_epic2.id)
m.blocked_epic = f.EpicFactory(project=m.blocked_project,
status__project=m.blocked_project)
m.blocked_epic = attach_epic_extra_info(Epic.objects.all()).get(id=m.blocked_epic.id)
m.public_us = f.UserStoryFactory(project=m.public_project)
m.private_us1 = f.UserStoryFactory(project=m.private_project1)
m.private_us2 = f.UserStoryFactory(project=m.private_project2)
m.blocked_us = f.UserStoryFactory(project=m.blocked_project)
m.public_related_us = f.RelatedUserStory(epic=m.public_epic, user_story=m.public_us)
m.private_related_us1 = f.RelatedUserStory(epic=m.private_epic1, user_story=m.private_us1)
m.private_related_us2 = f.RelatedUserStory(epic=m.private_epic2, user_story=m.private_us2)
m.blocked_related_us = f.RelatedUserStory(epic=m.blocked_epic, user_story=m.blocked_us)
m.public_project.default_epic_status = m.public_epic.status
m.public_project.save()
m.private_project1.default_epic_status = m.private_epic1.status
m.private_project1.save()
m.private_project2.default_epic_status = m.private_epic2.status
m.private_project2.save()
m.blocked_project.default_epic_status = m.blocked_epic.status
m.blocked_project.save()
return m
def test_epic_list(client, data):
url = reverse('epics-list')
response = client.get(url)
epics_data = json.loads(response.content.decode('utf-8'))
assert len(epics_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
epics_data = json.loads(response.content.decode('utf-8'))
assert len(epics_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
epics_data = json.loads(response.content.decode('utf-8'))
assert len(epics_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
epics_data = json.loads(response.content.decode('utf-8'))
assert len(epics_data) == 4
assert response.status_code == 200
def test_epic_retrieve(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_create(client, data):
url = reverse('epics-list')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
create_data = json.dumps({
"subject": "test",
"ref": 1,
"project": data.public_project.pk,
"status": data.public_project.epic_statuses.all()[0].pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users)
assert results == [401, 403, 403, 201, 201]
create_data = json.dumps({
"subject": "test",
"ref": 2,
"project": data.private_project1.pk,
"status": data.private_project1.epic_statuses.all()[0].pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users)
assert results == [401, 403, 403, 201, 201]
create_data = json.dumps({
"subject": "test",
"ref": 3,
"project": data.private_project2.pk,
"status": data.private_project2.epic_statuses.all()[0].pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users)
assert results == [401, 403, 403, 201, 201]
create_data = json.dumps({
"subject": "test",
"ref": 3,
"project": data.blocked_project.pk,
"status": data.blocked_project.epic_statuses.all()[0].pk,
})
results = helper_test_http_method(client, 'post', url, create_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_put_update(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
epic_data = EpicSerializer(data.public_epic).data
epic_data["subject"] = "test"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', public_url, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic1).data
epic_data["subject"] = "test"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url1, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic2).data
epic_data["subject"] = "test"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url2, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.blocked_epic).data
epic_data["subject"] = "test"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_put_comment(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
epic_data = EpicSerializer(data.public_epic).data
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', public_url, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic1).data
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url1, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic2).data
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url2, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.blocked_epic).data
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_put_update_and_comment(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
epic_data = EpicSerializer(data.public_epic).data
epic_data["subject"] = "test"
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', public_url, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic1).data
epic_data["subject"] = "test"
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url1, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.private_epic2).data
epic_data["subject"] = "test"
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', private_url2, epic_data, users)
assert results == [401, 403, 403, 200, 200]
epic_data = EpicSerializer(data.blocked_epic).data
epic_data["subject"] = "test"
epic_data["comment"] = "test comment"
epic_data = json.dumps(epic_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_put_update_with_project_change(client):
user1 = f.UserFactory.create()
user2 = f.UserFactory.create()
user3 = f.UserFactory.create()
user4 = f.UserFactory.create()
project1 = f.ProjectFactory()
project2 = f.ProjectFactory()
epic_status1 = f.EpicStatusFactory.create(project=project1)
epic_status2 = f.EpicStatusFactory.create(project=project2)
project1.default_epic_status = epic_status1
project2.default_epic_status = epic_status2
project1.save()
project2.save()
project1 = attach_project_extra_info(Project.objects.all()).get(id=project1.id)
project2 = attach_project_extra_info(Project.objects.all()).get(id=project2.id)
f.MembershipFactory(project=project1,
user=user1,
role__project=project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=project2,
user=user1,
role__project=project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=project1,
user=user2,
role__project=project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=project2,
user=user3,
role__project=project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
epic = f.EpicFactory.create(project=project1)
epic = attach_epic_extra_info(Epic.objects.all()).get(id=epic.id)
url = reverse('epics-detail', kwargs={"pk": epic.pk})
# Test user with permissions in both projects
client.login(user1)
epic_data = EpicSerializer(epic).data
epic_data["project"] = project2.id
epic_data = json.dumps(epic_data)
response = client.put(url, data=epic_data, content_type="application/json")
assert response.status_code == 200
epic.project = project1
epic.save()
# Test user with permissions in only origin project
client.login(user2)
epic_data = EpicSerializer(epic).data
epic_data["project"] = project2.id
epic_data = json.dumps(epic_data)
response = client.put(url, data=epic_data, content_type="application/json")
assert response.status_code == 403
epic.project = project1
epic.save()
# Test user with permissions in only destionation project
client.login(user3)
epic_data = EpicSerializer(epic).data
epic_data["project"] = project2.id
epic_data = json.dumps(epic_data)
response = client.put(url, data=epic_data, content_type="application/json")
assert response.status_code == 403
epic.project = project1
epic.save()
# Test user without permissions in the projects
client.login(user4)
epic_data = EpicSerializer(epic).data
epic_data["project"] = project2.id
epic_data = json.dumps(epic_data)
response = client.put(url, data=epic_data, content_type="application/json")
assert response.status_code == 403
epic.project = project1
epic.save()
def test_epic_patch_update(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
patch_data = json.dumps({"subject": "test", "version": data.public_epic.version})
results = helper_test_http_method(client, 'patch', public_url, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"subject": "test", "version": data.private_epic1.version})
results = helper_test_http_method(client, 'patch', private_url1, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"subject": "test", "version": data.private_epic2.version})
results = helper_test_http_method(client, 'patch', private_url2, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"subject": "test", "version": data.blocked_epic.version})
results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_patch_comment(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
patch_data = json.dumps({"comment": "test comment", "version": data.public_epic.version})
results = helper_test_http_method(client, 'patch', public_url, patch_data, users)
assert results == [401, 200, 200, 200, 200]
patch_data = json.dumps({"comment": "test comment", "version": data.private_epic1.version})
results = helper_test_http_method(client, 'patch', private_url1, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"comment": "test comment", "version": data.private_epic2.version})
results = helper_test_http_method(client, 'patch', private_url2, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({"comment": "test comment", "version": data.blocked_epic.version})
results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_patch_update_and_comment(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
with mock.patch.object(OCCResourceMixin, "_validate_and_update_version"):
patch_data = json.dumps({
"subject": "test",
"comment": "test comment",
"version": data.public_epic.version
})
results = helper_test_http_method(client, 'patch', public_url, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({
"subject": "test",
"comment": "test comment",
"version": data.private_epic1.version
})
results = helper_test_http_method(client, 'patch', private_url1, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({
"subject": "test",
"comment": "test comment",
"version": data.private_epic2.version
})
results = helper_test_http_method(client, 'patch', private_url2, patch_data, users)
assert results == [401, 403, 403, 200, 200]
patch_data = json.dumps({
"subject": "test",
"comment": "test comment",
"version": data.blocked_epic.version
})
results = helper_test_http_method(client, 'patch', blocked_url, patch_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_delete(client, data):
public_url = reverse('epics-detail', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-detail', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-detail', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-detail', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private_url1, None, users)
assert results == [401, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private_url2, None, users)
assert results == [401, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 451]
def test_epic_action_bulk_create(client, data):
url = reverse('epics-bulk-create')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
bulk_data = json.dumps({
"bulk_epics": "test1\ntest2",
"project_id": data.public_epic.project.pk,
})
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 200, 200]
bulk_data = json.dumps({
"bulk_epics": "test1\ntest2",
"project_id": data.private_epic1.project.pk,
})
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 200, 200]
bulk_data = json.dumps({
"bulk_epics": "test1\ntest2",
"project_id": data.private_epic2.project.pk,
})
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 200, 200]
bulk_data = json.dumps({
"bulk_epics": "test1\ntest2",
"project_id": data.blocked_epic.project.pk,
})
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 451, 451]
def test_epic_action_upvote(client, data):
public_url = reverse('epics-upvote', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-upvote', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-upvote', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-upvote', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url1, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url2, "", users)
assert results == [404, 404, 404, 200, 200]
results = helper_test_http_method(client, 'post', blocked_url, "", users)
assert results == [404, 404, 404, 451, 451]
def test_epic_action_downvote(client, data):
public_url = reverse('epics-downvote', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-downvote', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-downvote', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-downvote', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url1, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url2, "", users)
assert results == [404, 404, 404, 200, 200]
results = helper_test_http_method(client, 'post', blocked_url, "", users)
assert results == [404, 404, 404, 451, 451]
def test_epic_voters_list(client, data):
public_url = reverse('epic-voters-list', kwargs={"resource_id": data.public_epic.pk})
private_url1 = reverse('epic-voters-list', kwargs={"resource_id": data.private_epic1.pk})
private_url2 = reverse('epic-voters-list', kwargs={"resource_id": data.private_epic2.pk})
blocked_url = reverse('epic-voters-list', kwargs={"resource_id": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_voters_retrieve(client, data):
add_vote(data.public_epic, data.project_owner)
public_url = reverse('epic-voters-detail', kwargs={"resource_id": data.public_epic.pk,
"pk": data.project_owner.pk})
add_vote(data.private_epic1, data.project_owner)
private_url1 = reverse('epic-voters-detail', kwargs={"resource_id": data.private_epic1.pk,
"pk": data.project_owner.pk})
add_vote(data.private_epic2, data.project_owner)
private_url2 = reverse('epic-voters-detail', kwargs={"resource_id": data.private_epic2.pk,
"pk": data.project_owner.pk})
add_vote(data.blocked_epic, data.project_owner)
blocked_url = reverse('epic-voters-detail', kwargs={"resource_id": data.blocked_epic.pk,
"pk": data.project_owner.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_action_watch(client, data):
public_url = reverse('epics-watch', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-watch', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-watch', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-watch', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url1, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url2, "", users)
assert results == [404, 404, 404, 200, 200]
results = helper_test_http_method(client, 'post', blocked_url, "", users)
assert results == [404, 404, 404, 451, 451]
def test_epic_action_unwatch(client, data):
public_url = reverse('epics-unwatch', kwargs={"pk": data.public_epic.pk})
private_url1 = reverse('epics-unwatch', kwargs={"pk": data.private_epic1.pk})
private_url2 = reverse('epics-unwatch', kwargs={"pk": data.private_epic2.pk})
blocked_url = reverse('epics-unwatch', kwargs={"pk": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url1, "", users)
assert results == [401, 200, 200, 200, 200]
results = helper_test_http_method(client, 'post', private_url2, "", users)
assert results == [404, 404, 404, 200, 200]
results = helper_test_http_method(client, 'post', blocked_url, "", users)
assert results == [404, 404, 404, 451, 451]
def test_epic_watchers_list(client, data):
public_url = reverse('epic-watchers-list', kwargs={"resource_id": data.public_epic.pk})
private_url1 = reverse('epic-watchers-list', kwargs={"resource_id": data.private_epic1.pk})
private_url2 = reverse('epic-watchers-list', kwargs={"resource_id": data.private_epic2.pk})
blocked_url = reverse('epic-watchers-list', kwargs={"resource_id": data.blocked_epic.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_watchers_retrieve(client, data):
add_watcher(data.public_epic, data.project_owner)
public_url = reverse('epic-watchers-detail', kwargs={"resource_id": data.public_epic.pk,
"pk": data.project_owner.pk})
add_watcher(data.private_epic1, data.project_owner)
private_url1 = reverse('epic-watchers-detail', kwargs={"resource_id": data.private_epic1.pk,
"pk": data.project_owner.pk})
add_watcher(data.private_epic2, data.project_owner)
private_url2 = reverse('epic-watchers-detail', kwargs={"resource_id": data.private_epic2.pk,
"pk": data.project_owner.pk})
add_watcher(data.blocked_epic, data.project_owner)
blocked_url = reverse('epic-watchers-detail', kwargs={"resource_id": data.blocked_epic.pk,
"pk": data.project_owner.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url1, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private_url2, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epics_csv(client, data):
url = reverse('epics-csv')
csv_public_uuid = data.public_project.epics_csv_uuid
csv_private1_uuid = data.private_project1.epics_csv_uuid
csv_private2_uuid = data.private_project1.epics_csv_uuid
csv_blocked_uuid = data.blocked_project.epics_csv_uuid
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', "{}?uuid={}".format(url, csv_public_uuid), None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', "{}?uuid={}".format(url, csv_private1_uuid), None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', "{}?uuid={}".format(url, csv_private2_uuid), None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', "{}?uuid={}".format(url, csv_blocked_uuid), None, users)
assert results == [200, 200, 200, 200, 200]
| taigaio/taiga-back | tests/integration/resources_permissions/test_epics_resources.py | Python | agpl-3.0 | 38,177 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2016-2018 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
import os.path
import pytest
pytest.importorskip('PyQt5.QtWebEngineWidgets')
from qutebrowser.browser.webengine import webenginedownloads
from helpers import utils
@pytest.mark.parametrize('path, expected', [
(os.path.join('subfolder', 'foo'), 'foo'),
('foo(1)', 'foo'),
('foo(a)', 'foo(a)'),
('foo1', 'foo1'),
pytest.param('foo%20bar', 'foo bar', marks=utils.qt58),
pytest.param('foo%2Fbar', 'bar', marks=utils.qt58),
pytest.param('foo%20bar', 'foo%20bar', marks=utils.qt59),
pytest.param('foo%2Fbar', 'foo%2Fbar', marks=utils.qt59),
])
def test_get_suggested_filename(path, expected):
assert webenginedownloads._get_suggested_filename(path) == expected
| V155/qutebrowser | tests/unit/browser/webengine/test_webenginedownloads.py | Python | gpl-3.0 | 1,495 |
#
import numpy as np
import healpy as hp
import astropy.io.fits as pyfits
from multiprocessing import Pool
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from quicksipManera import *
import fitsio
### ------------ A couple of useful conversions -----------------------
def zeropointToScale(zp):
return 10.**((zp - 22.5)/2.5)
def nanomaggiesToMag(nm):
return -2.5 * (log(nm,10.) - 9.)
def Magtonanomaggies(m):
return 10.**(-m/2.5+9.)
#-2.5 * (log(nm,10.) - 9.)
### ------------ SHARED CLASS: HARDCODED INPUTS GO HERE ------------------------
### Please, add here your own harcoded values if any, so other may use them
class mysample(object):
"""
This class mantains the basic information of the sample
to minimize hardcoded parameters in the test functions
Everyone is meant to call mysample to obtain information like
- path to ccd-annotated files : ccds
- zero points : zp0
- magnitude limits (recm) : recm
- photoz requirements : phreq
- extintion coefficient : extc
- extintion index : be
- mask var eqv. to blacklist_ok : maskname
- predicted frac exposures : FracExp
Current Inputs are: survey, DR, band, localdir)
survey: DECaLS, MZLS, BASS
DR: DR3, DR4
band: g,r,z
localdir: output directory
"""
def __init__(self,survey,DR,band,localdir,verb):
"""
Initialize image survey, data release, band, output path
Calculate variables and paths
"""
self.survey = survey
self.DR = DR
self.band = band
self.localdir = localdir
self.verbose =verb
# Check bands
if(self.band != 'g' and self.band !='r' and self.band!='z'):
raise RuntimeError("Band seems wrong options are 'g' 'r' 'z'")
# Check surveys
if(self.survey !='DECaLS' and self.survey !='BASS' and self.survey !='MZLS'):
raise RuntimeError("Survey seems wrong options are 'DECAaLS' 'BASS' MZLS' ")
# Annotated CCD paths
if(self.DR == 'DR3'):
inputdir = '/global/project/projectdirs/cosmo/data/legacysurvey/dr3/'
self.ccds =inputdir+'ccds-annotated-decals.fits.gz'
self.catalog = 'DECaLS_DR3'
if(self.survey != 'DECaLS'): raise RuntimeError("Survey name seems inconsistent")
elif(self.DR == 'DR4'):
inputdir = '/global/project/projectdirs/cosmo/data/legacysurvey/dr4/'
if (band == 'g' or band == 'r'):
#self.ccds = inputdir+'ccds-annotated-dr4-90prime.fits.gz'
self.ccds = inputdir+'ccds-annotated-bass.fits.gz'
self.catalog = 'BASS_DR4'
if(self.survey != 'BASS'): raise RuntimeError("Survey name seems inconsistent")
elif(band == 'z'):
#self.ccds = inputdir+'ccds-annotated-dr4-mzls.fits.gz'
self.ccds = inputdir+'ccds-annotated-mzls.fits.gz'
self.catalog = 'MZLS_DR4'
if(self.survey != 'MZLS'): raise RuntimeError("Survey name seems inconsistent")
else: raise RuntimeError("Input sample band seems inconsisent")
else: raise RuntimeError("Data Realease seems wrong")
# Predicted survey exposure fractions
if(self.survey =='DECaLS'):
# DECALS final survey will be covered by
# 1, 2, 3, 4, and 5 exposures in the following fractions:
self.FracExp=[0.02,0.24,0.50,0.22,0.02]
elif(self.survey == 'BASS'):
# BASS coverage fractions for 1,2,3,4,5 exposures are:
self.FracExp=[0.0014,0.0586,0.8124,0.1203,0.0054,0.0019]
elif(self.survey == 'MZLS'):
# For MzLS fill factors of 100% with a coverage of at least 1,
# 99.5% with a coverage of at least 2, and 85% with a coverage of 3.
self.FracExp=[0.005,0.145,0.85,0,0]
else:
raise RuntimeError("Survey seems to have wrong options for fraction of exposures ")
#Bands inputs
if band == 'g':
self.be = 1
self.extc = 3.303 #/2.751
self.zp0 = 25.08
self.recm = 24.
self.phreq = 0.01
if band == 'r':
self.be = 2
self.extc = 2.285 #/2.751
self.zp0 = 25.29
self.recm = 23.4
self.phreq = 0.01
if band == 'z':
self.be = 4
self.extc = 1.263 #/2.751
self.zp0 = 24.92
self.recm = 22.5
self.phreq = 0.02
# ------------------------------------------------------------------
# ------------------------------------------------------------------
# ------------ VALIDATION TESTS ------------------------------------
# ------------------------------------------------------------------
# Note: part of the name of the function should startw with number valXpX
def val3p4c_depthfromIvar(sample):
"""
Requirement V3.4
90% filled to g=24, r=23.4 and z=22.5 and 95% and 98% at 0.3/0.6 mag shallower.
Produces extinction correction magnitude maps for visual inspection
MARCM stable version, improved from AJR quick hack
This now included extinction from the exposures
Uses quicksip subroutines from Boris, corrected
for a bug I found for BASS and MzLS ccd orientation
"""
nside = 1024 # Resolution of output maps
nsidesout = None # if you want full sky degraded maps to be written
ratiores = 1 # Superresolution/oversampling ratio, simp mode doesn't allow anything other than 1
mode = 1 # 1: fully sequential, 2: parallel then sequential, 3: fully parallel
pixoffset = 0 # How many pixels are being removed on the edge of each CCD? 15 for DES.
oversamp='1' # ratiores in string format
nsideSTR='1024' # same as nside but in string format
band = sample.band
catalogue_name = sample.catalog
fname = sample.ccds
localdir = sample.localdir
outroot = localdir
extc = sample.extc
#Read ccd file
tbdata = pyfits.open(fname)[1].data
# ------------------------------------------------------
# Obtain indices
auxstr='band_'+band
sample_names = [auxstr]
if(sample.DR == 'DR3'):
inds = np.where((tbdata['filter'] == band) & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
elif(sample.DR == 'DR4'):
inds = np.where((tbdata['filter'] == band) & (tbdata['photometric'] == True) & (tbdata['bitmask'] == 0))
#Read data
#obtain invnoisesq here, including extinction
nmag = Magtonanomaggies(tbdata['galdepth']-extc*tbdata['EBV'])/5.
ivar= 1./nmag**2.
# What properties do you want mapped?
# Each each tuple has [(quantity to be projected, weighting scheme, operation),(etc..)]
propertiesandoperations = [ ('ivar', '', 'total'), ]
# What properties to keep when reading the images?
# Should at least contain propertiesandoperations and the image corners.
# MARCM - actually no need for ra dec image corners.
# Only needs ra0 ra1 ra2 ra3 dec0 dec1 dec2 dec3 only if fast track appropriate quicksip subroutines were implemented
propertiesToKeep = [ 'filter', 'AIRMASS', 'FWHM','mjd_obs'] \
+ ['RA', 'DEC', 'crval1', 'crval2', 'crpix1', 'crpix2', 'cd1_1', 'cd1_2', 'cd2_1', 'cd2_2','width','height']
# Create big table with all relevant properties.
tbdata = np.core.records.fromarrays([tbdata[prop] for prop in propertiesToKeep] + [ivar], names = propertiesToKeep + [ 'ivar'])
# Read the table, create Healtree, project it into healpix maps, and write these maps.
# Done with Quicksip library, note it has quite a few hardcoded values (use new version by MARCM for BASS and MzLS)
# project_and_write_maps_simp(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside)
project_and_write_maps(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside, ratiores, pixoffset, nsidesout)
# Read Haelpix maps from quicksip
prop='ivar'
op='total'
vmin=21.0
vmax=24.0
fname2=localdir+catalogue_name+'/nside'+nsideSTR+'_oversamp'+oversamp+'/'+\
catalogue_name+'_band_'+band+'_nside'+nsideSTR+'_oversamp'+oversamp+'_'+prop+'__'+op+'.fits.gz'
f = fitsio.read(fname2)
# HEALPIX DEPTH MAPS
# convert ivar to depth
import healpy as hp
from healpix import pix2ang_ring,thphi2radec
ral = []
decl = []
val = f['SIGNAL']
pix = f['PIXEL']
# Obtain values to plot
if (prop == 'ivar'):
myval = []
mylabel='depth'
below=0
for i in range(0,len(val)):
depth=nanomaggiesToMag(sqrt(1./val[i]) * 5.)
if(depth < vmin):
below=below+1
else:
myval.append(depth)
th,phi = hp.pix2ang(int(nside),pix[i])
ra,dec = thphi2radec(th,phi)
ral.append(ra)
decl.append(dec)
npix=len(f)
print 'Area is ', npix/(float(nside)**2.*12)*360*360./pi, ' sq. deg.'
print below, 'of ', npix, ' pixels are not plotted as their ', mylabel,' < ', vmin
print 'Within the plot, min ', mylabel, '= ', min(myval), ' and max ', mylabel, ' = ', max(myval)
# Plot depth
from matplotlib import pyplot as plt
import matplotlib.cm as cm
map = plt.scatter(ral,decl,c=myval, cmap=cm.rainbow,s=2., vmin=vmin, vmax=vmax, lw=0,edgecolors='none')
cbar = plt.colorbar(map)
plt.xlabel('r.a. (degrees)')
plt.ylabel('declination (degrees)')
plt.title('Map of '+ mylabel +' for '+catalogue_name+' '+band+'-band')
plt.xlim(0,360)
plt.ylim(-30,90)
mapfile=localdir+mylabel+'_'+band+'_'+catalogue_name+str(nside)+'.png'
print 'saving plot to ', mapfile
plt.savefig(mapfile)
plt.close()
#plt.show()
#cbar.set_label(r'5$\sigma$ galaxy depth', rotation=270,labelpad=1)
#plt.xscale('log')
return mapfile
def val3p4b_maghist_pred(sample,ndraw=1e5, nbin=100, vmin=21.0, vmax=25.0):
"""
Requirement V3.4
90% filled to g=24, r=23.4 and z=22.5 and 95% and 98% at 0.3/0.6 mag shallower.
MARCM
Makes histogram of predicted magnitudes
by MonteCarlo from exposures converving fraction of number of exposures
This produces the histogram for Dustin's processed galaxy depth
"""
import fitsio
from matplotlib import pyplot as plt
from numpy import zeros,array
from random import random
# Check fraction of number of exposures adds to 1.
if( abs(sum(sample.FracExp) - 1.0) > 1e-5 ):
raise ValueError("Fration of number of exposures don't add to one")
# Survey inputs
rel = sample.DR
catalogue_name = sample.catalog
band = sample.band
be = sample.be
zp0 = sample.zp0
recm = sample.recm
verbose = sample.verbose
f = fitsio.read(sample.ccds)
#read in magnitudes including extinction
counts2014 = 0
counts20 = 0
nl = []
for i in range(0,len(f)):
year = int(f[i]['date_obs'].split('-')[0])
if (year <= 2014): counts2014 = counts2014 + 1
if f[i]['dec'] < -20 : counts20 = counts20 + 1
if(sample.DR == 'DR3'):
if f[i]['filter'] == sample.band and f[i]['photometric'] == True and f[i]['blacklist_ok'] == True :
magext = f[i]['galdepth'] - f[i]['decam_extinction'][be]
nmag = Magtonanomaggies(magext)/5. #total noise
nl.append(nmag)
if(sample.DR == 'DR4'):
if f[i]['filter'] == sample.band and f[i]['photometric'] == True and f[i]['bitmask'] == 0 :
magext = f[i]['galdepth'] - f[i]['decam_extinction'][be]
nmag = Magtonanomaggies(magext)/5. #total noise
nl.append(nmag)
ng = len(nl)
print "-----------"
if(verbose) : print "Number of objects = ", len(f)
if(verbose) : print "Counts before or during 2014 = ", counts2014
if(verbose) : print "Counts with dec < -20 = ", counts20
print "Number of objects in the sample = ", ng
#Monte Carlo to predict magnitudes histogram
ndrawn = 0
nbr = 0
NTl = []
n = 0
for indx, f in enumerate(sample.FracExp,1) :
Nexp = indx # indx starts at 1 bc argument on enumearate :-), thus is the number of exposures
nd = int(round(ndraw * f))
ndrawn=ndrawn+nd
for i in range(0,nd):
detsigtoti = 0
for j in range(0,Nexp):
ind = int(random()*ng)
detsig1 = nl[ind]
detsigtoti += 1./detsig1**2.
detsigtot = sqrt(1./detsigtoti)
m = nanomaggiesToMag(detsigtot * 5.)
if m > recm: # pass requirement
nbr += 1.
NTl.append(m)
n += 1.
# Run some statistics
NTl=np.array(NTl)
mean = sum(NTl)/float(len(NTl))
std = sqrt(sum(NTl**2.)/float(len(NTl))-mean**2.)
NTl.sort()
if len(NTl)/2. != len(NTl)/2:
med = NTl[len(NTl)/2+1]
else:
med = (NTl[len(NTl)/2+1]+NTl[len(NTl)/2])/2.
print "Total images drawn with either 1,2,3,4,5 exposures", ndrawn
print "Mean = ", mean, "; Median = ", med ,"; Std = ", std
print 'percentage better than requirements = '+str(nbr/float(ndrawn))
# Prepare historgram
minN = max(min(NTl),vmin)
maxN = max(NTl)+.0001
hl = zeros((nbin)) # histogram counts
lowcounts=0
for i in range(0,len(NTl)):
bin = int(nbin*(NTl[i]-minN)/(maxN-minN))
if(bin >= 0) :
hl[bin] += 1
else:
lowcounts +=1
Nl = [] # x bin centers
for i in range(0,len(hl)):
Nl.append(minN+i*(maxN-minN)/float(nbin)+0.5*(maxN-minN)/float(nbin))
NTl = array(NTl)
print "min,max depth = ",min(NTl), max(NTl)
print "counts below ", minN, " = ", lowcounts
#### Ploting histogram
fname=sample.localdir+'validationplots/'+sample.catalog+sample.band+'_pred_exposures.png'
print "saving histogram plot in", fname
#--- pdf version ---
#from matplotlib.backends.backend_pdf import PdfPages
#pp = PdfPages(fname)
plt.clf()
plt.plot(Nl,hl,'k-')
plt.xlabel(r'5$\sigma$ '+sample.band+ ' depth')
plt.ylabel('# of images')
plt.title('MC combined exposure depth '+str(mean)[:5]+r'$\pm$'+str(std)[:4]+r', $f_{\rm pass}=$'+str(nbr/float(ndrawn))[:5]+'\n '+catalogue_name)
#plt.xscale('log') # --- pdf ---
plt.savefig(fname) #pp.savefig()
plt.close #pp.close()
return fname
# -------------------------------------------------------------------
# -------------------------------------------------------------------
# OLD STUFF
# -------------------------------------------------------------------
dir = '$HOME/' # obviously needs to be changed
#inputdir = '/project/projectdirs/cosmo/data/legacysurvey/dr3/' # where I get my data
inputdir= '/global/projecta/projectdirs/cosmo/work/dr4/'
localdir = '/global/homes/m/manera/DESI/validation-outputs/' #place for local DESI stuff
#extmap = np.loadtxt('/global/homes/m/manera/DESI/validation-outputs/healSFD_r_256_fullsky.dat') # extintion map remove it
### Plotting facilities
def plotPhotometryMap(band,vmin=0.0,vmax=1.0,mjdmax='',prop='zptvar',op='min',rel='DR0',survey='surveyname',nside='1024',oversamp='1'):
import fitsio
from matplotlib import pyplot as plt
import matplotlib.cm as cm
from numpy import zeros,array
import healpix
from healpix import pix2ang_ring,thphi2radec
import healpy as hp
# Survey inputs
mjdw=mjdmax
if rel == 'DR2':
fname =inputdir+'decals-ccds-annotated.fits'
catalogue_name = 'DECaLS_DR2'+mjdw
if rel == 'DR3':
inputdir = '/project/projectdirs/cosmo/data/legacysurvey/dr3/' # where I get my data
fname =inputdir+'ccds-annotated-decals.fits.gz'
catalogue_name = 'DECaLS_DR3'+mjdw
if rel == 'DR4':
inputdir= '/global/projecta/projectdirs/cosmo/work/dr4/'
if (band == 'g' or band == 'r'):
fname=inputdir+'ccds-annotated-dr4-90prime.fits.gz'
catalogue_name = '90prime_DR4'+mjdw
if band == 'z' :
fname = inputdir+'ccds-annotated-dr4-mzls.fits.gz'
catalogue_name = 'MZLS_DR4'+mjdw
fname=localdir+catalogue_name+'/nside'+nside+'_oversamp'+oversamp+'/'+catalogue_name+'_band_'+band+'_nside'+nside+'_oversamp'+oversamp+'_'+prop+'__'+op+'.fits.gz'
f = fitsio.read(fname)
ral = []
decl = []
val = f['SIGNAL']
pix = f['PIXEL']
# -------------- plot of values ------------------
if( prop=='zptvar' and opt == 'min' ):
print 'Plotting min zpt rms'
myval = []
for i in range(0,len(val)):
myval.append(1.086 * np.sqrt(val[i])) #1.086 converts dm into d(flux)
th,phi = hp.pix2ang(int(nside),pix[i])
ra,dec = thphi2radec(th,phi)
ral.append(ra)
decl.append(dec)
mylabel = 'min-zpt-rms-flux'
vmin = 0.0 #min(myval)
vmax = 0.03 #max(myval)
npix = len(myval)
below = 0
print 'Min and Max values of ', mylabel, ' values is ', min(myval), max(myval)
print 'Number of pixels is ', npix
print 'Number of pixels offplot with ', mylabel,' < ', vmin, ' is', below
print 'Area is ', npix/(float(nside)**2.*12)*360*360./pi, ' sq. deg.'
map = plt.scatter(ral,decl,c=myval, cmap=cm.rainbow,s=2., vmin=vmin, vmax=vmax, lw=0,edgecolors='none')
cbar = plt.colorbar(map)
plt.xlabel('r.a. (degrees)')
plt.ylabel('declination (degrees)')
plt.title('Map of '+ mylabel +' for '+catalogue_name+' '+band+'-band')
plt.xlim(0,360)
plt.ylim(-30,90)
plt.savefig(localdir+mylabel+'_'+band+'_'+catalogue_name+str(nside)+'.png')
plt.close()
# -------------- plot of status in udgrade maps of 1.406 deg pix size ------------------
#Bands inputs
if band == 'g':
phreq = 0.01
if band == 'r':
phreq = 0.01
if band == 'z':
phreq = 0.02
# Obtain values to plot
if( prop=='zptvar' and opt == 'min' ):
nside2 = 64 # 1.40625 deg per pixel
npix2 = hp.nside2npix(nside2)
myreq = np.zeros(npix2) # 0 off footprint, 1 at least one pass requirement, -1 none pass requirement
ral = np.zeros(npix2)
decl = np.zeros(npix2)
mylabel = 'photometric-pixels'
print 'Plotting photometric requirement'
for i in range(0,len(val)):
th,phi = hp.pix2ang(int(nside),pix[i])
ipix = hp.ang2pix(nside2,th,phi)
dF= 1.086 * (sqrt(val[i])) # 1.086 converts d(magnitudes) into d(flux)
if(dF < phreq):
myreq[ipix]=1
else:
if(myreq[ipix] == 0): myreq[ipix]=-1
for i in range(0,len(myreq)):
th,phi = hp.pix2ang(int(nside2),pix[i])
ra,dec = thphi2radec(th,phi)
ral[i] = ra
decl[i] = dec
#myval = np.zeros(npix2)
#mycount = np.zeros(pix2)
#myval[ipix] += dF
#mycount[ipix] += 1.
below=sum( x for x in myreq if x < phreq)
print 'Number of pixels offplot with ', mylabel,' < ', phreq, ' is', below
vmin = min(myreq)
vmax = max(myreq)
map = plt.scatter(ral,decl,c=myreq, cmap=cm.rainbow,s=5., vmin=vmin, vmax=vmax, lw=0,edgecolors='none')
cbar = plt.colorbar(map)
plt.xlabel('r.a. (degrees)')
plt.ylabel('declination (degrees)')
plt.title('Map of '+ mylabel +' for '+catalogue_name+' '+band+'-band')
plt.xlim(0,360)
plt.ylim(-30,90)
plt.savefig(localdir+mylabel+'_'+band+'_'+catalogue_name+str(nside)+'.png')
plt.close()
#plt.show()
#cbar.set_label(r'5$\sigma$ galaxy depth', rotation=270,labelpad=1)
#plt.xscale('log')
return True
def plotPropertyMap(band,vmin=21.0,vmax=24.0,mjdmax='',prop='ivar',op='total',survey='surveyname',nside='1024',oversamp='1'):
import fitsio
from matplotlib import pyplot as plt
import matplotlib.cm as cm
from numpy import zeros,array
import healpix
from healpix import pix2ang_ring,thphi2radec
import healpy as hp
fname=localdir+survey+mjdmax+'/nside'+nside+'_oversamp'+oversamp+'/'+survey+mjdmax+'_band_'+band+'_nside'+nside+'_oversamp'+oversamp+'_'+prop+'__'+op+'.fits.gz'
f = fitsio.read(fname)
ral = []
decl = []
val = f['SIGNAL']
pix = f['PIXEL']
# Obtain values to plot
if (prop == 'ivar'):
myval = []
mylabel='depth'
print 'Converting ivar to depth.'
print 'Plotting depth'
below=0
for i in range(0,len(val)):
depth=nanomaggiesToMag(sqrt(1./val[i]) * 5.)
if(depth < vmin):
below=below+1
else:
myval.append(depth)
th,phi = hp.pix2ang(int(nside),pix[i])
ra,dec = thphi2radec(th,phi)
ral.append(ra)
decl.append(dec)
npix=len(f)
print 'Min and Max values of ', mylabel, ' values is ', min(myval), max(myval)
print 'Number of pixels is ', npix
print 'Number of pixels offplot with ', mylabel,' < ', vmin, ' is', below
print 'Area is ', npix/(float(nside)**2.*12)*360*360./pi, ' sq. deg.'
map = plt.scatter(ral,decl,c=myval, cmap=cm.rainbow,s=2., vmin=vmin, vmax=vmax, lw=0,edgecolors='none')
cbar = plt.colorbar(map)
plt.xlabel('r.a. (degrees)')
plt.ylabel('declination (degrees)')
plt.title('Map of '+ mylabel +' for '+survey+' '+band+'-band')
plt.xlim(0,360)
plt.ylim(-30,90)
plt.savefig(localdir+mylabel+'_'+band+'_'+survey+str(nside)+'.png')
plt.close()
#plt.show()
#cbar.set_label(r'5$\sigma$ galaxy depth', rotation=270,labelpad=1)
#plt.xscale('log')
return True
def depthfromIvar(band,rel='DR3',survey='survename'):
# ------------------------------------------------------
# MARCM stable version, improved from AJR quick hack
# This now included extinction from the exposures
# Uses quicksip subroutines from Boris
# (with a bug I corrected for BASS and MzLS ccd orientation)
# Produces depth maps from Dustin's annotated files
# ------------------------------------------------------
nside = 1024 # Resolution of output maps
nsidesout = None # if you want full sky degraded maps to be written
ratiores = 1 # Superresolution/oversampling ratio, simp mode doesn't allow anything other than 1
mode = 1 # 1: fully sequential, 2: parallel then sequential, 3: fully parallel
pixoffset = 0 # How many pixels are being removed on the edge of each CCD? 15 for DES.
mjd_max = 10e10
mjdw = ''
# Survey inputs
if rel == 'DR2':
fname =inputdir+'decals-ccds-annotated.fits'
catalogue_name = 'DECaLS_DR2'+mjdw
if rel == 'DR3':
inputdir = '/project/projectdirs/cosmo/data/legacysurvey/dr3/' # where I get my data
fname =inputdir+'ccds-annotated-decals.fits.gz'
catalogue_name = 'DECaLS_DR3'+mjdw
if rel == 'DR4':
inputdir= '/global/projecta/projectdirs/cosmo/work/dr4/'
if (band == 'g' or band == 'r'):
fname=inputdir+'ccds-annotated-dr4-90prime.fits.gz'
catalogue_name = '90prime_DR4'+mjdw
if band == 'z' :
fname = inputdir+'ccds-annotated-dr4-mzls.fits.gz'
catalogue_name = 'MZLS_DR4'+mjdw
#Bands inputs
if band == 'g':
be = 1
extc = 3.303 #/2.751
if band == 'r':
be = 2
extc = 2.285 #/2.751
if band == 'z':
be = 4
extc = 1.263 #/2.751
# Where to write the maps ? Make sure directory exists.
outroot = localdir
tbdata = pyfits.open(fname)[1].data
# ------------------------------------------------------
# Obtain indices
if band == 'g':
sample_names = ['band_g']
indg = np.where((tbdata['filter'] == 'g') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
inds = indg #redundant
if band == 'r':
sample_names = ['band_r']
indr = np.where((tbdata['filter'] == 'r') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
inds = indr #redundant
if band == 'z':
sample_names = ['band_z']
indz = np.where((tbdata['filter'] == 'z') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
inds = indz # redundant
#Read data
#obtain invnoisesq here, including extinction
nmag = Magtonanomaggies(tbdata['galdepth']-extc*tbdata['EBV'])/5.
ivar= 1./nmag**2.
# What properties do you want mapped?
# Each each tuple has [(quantity to be projected, weighting scheme, operation),(etc..)]
propertiesandoperations = [ ('ivar', '', 'total'), ]
# What properties to keep when reading the images?
#Should at least contain propertiesandoperations and the image corners.
# MARCM - actually no need for ra dec image corners.
# Only needs ra0 ra1 ra2 ra3 dec0 dec1 dec2 dec3 only if fast track appropriate quicksip subroutines were implemented
propertiesToKeep = [ 'filter', 'AIRMASS', 'FWHM','mjd_obs'] \
+ ['RA', 'DEC', 'crval1', 'crval2', 'crpix1', 'crpix2', 'cd1_1', 'cd1_2', 'cd2_1', 'cd2_2','width','height']
# Create big table with all relevant properties.
tbdata = np.core.records.fromarrays([tbdata[prop] for prop in propertiesToKeep] + [ivar], names = propertiesToKeep + [ 'ivar'])
# Read the table, create Healtree, project it into healpix maps, and write these maps.
# Done with Quicksip library, note it has quite a few hardcoded values (use new version by MARCM for BASS and MzLS)
# project_and_write_maps_simp(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside)
project_and_write_maps(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside, ratiores, pixoffset, nsidesout)
# ----- plot depth map -----
prop='ivar'
plotPropertyMap(band,survey=catalogue_name,prop=prop)
return True
def plotMaghist_pred(band,FracExp=[0,0,0,0,0],ndraw = 1e5,nbin=100,rel='DR3',vmin=21.0):
# MARCM Makes histogram of predicted magnitudes
# by MonteCarlo from exposures converving fraction of number of exposures
# This produces the histogram for Dustin's processed galaxy depth
import fitsio
from matplotlib import pyplot as plt
from numpy import zeros,array
from random import random
# Check fraction of number of exposures adds to 1.
if( abs(sum(FracExp) - 1.0) > 1e-5 ):
print sum(FracExp)
raise ValueError("Fration of number of exposures don't add to one")
# Survey inputs
mjdw=''
if rel == 'DR2':
fname =inputdir+'decals-ccds-annotated.fits'
catalogue_name = 'DECaLS_DR2'+mjdw
if rel == 'DR3':
inputdir = '/project/projectdirs/cosmo/data/legacysurvey/dr3/' # where I get my data
fname =inputdir+'ccds-annotated-decals.fits.gz'
catalogue_name = 'DECaLS_DR3'+mjdw
if rel == 'DR4':
#inputdir= '/global/projecta/projectdirs/cosmo/work/dr4/'
inputdir='/project/projectdirs/cosmo/data/legacysurvey/dr4'
if (band == 'g' or band == 'r'):
fname=inputdir+'ccds-annotated-bass.fits.gz'
catalogue_name='BASS_DR4'+mjdw
#fname=inputdir+'ccds-annotated-dr4-90prime.fits.gz'
#catalogue_name = '90prime_DR4'+mjdw
if band == 'z' :
#fname = inputdir+'ccds-annotated-dr4-mzls.fits.gz'
fname = inputdir+'ccds-annotated-mzls.fits.gz'
catalogue_name = 'MZLS_DR4'+mjdw
# Bands info
if band == 'g':
be = 1
zp0 = 25.08
recm = 24.
if band == 'r':
be = 2
zp0 = 25.29
recm = 23.4
if band == 'z':
be = 4
zp0 = 24.92
recm = 22.5
f = fitsio.read(fname)
#read in magnitudes including extinction
counts2014 =0
n = 0
nl = []
for i in range(0,len(f)):
DS = 0
year = int(f[i]['date_obs'].split('-')[0])
if (year <= 2014): counts2014=counts2014+1
if year > 2014:
DS = 1 #enforce 2015 data
if f[i]['filter'] == band:
if DS == 1:
n += 1
if f[i]['dec'] > -20 and f[i]['photometric'] == True and f[i]['blacklist_ok'] == True :
magext = f[i]['galdepth'] - f[i]['decam_extinction'][be]
nmag = Magtonanomaggies(magext)/5. #total noise
nl.append(nmag)
ng = len(nl)
print "-----------"
print "Number of objects with DS=1", n
print "Number of objects in the sample", ng
print "Counts before or during 2014", counts2014
#Monte Carlo to predict magnitudes histogram
ndrawn = 0
nbr = 0
NTl = []
for indx, f in enumerate(FracExp,1) :
Nexp = indx # indx starts at 1 bc argument on enumearate :-), thus is the number of exposures
nd = int(round(ndraw * f))
ndrawn=ndrawn+nd
for i in range(0,nd):
detsigtoti = 0
for j in range(0,Nexp):
ind = int(random()*ng)
detsig1 = nl[ind]
detsigtoti += 1./detsig1**2.
detsigtot = sqrt(1./detsigtoti)
m = nanomaggiesToMag(detsigtot * 5.)
if m > recm: # pass requirement
nbr += 1.
NTl.append(m)
n += 1.
# Run some statistics
NTl=np.array(NTl)
mean = sum(NTl)/float(len(NTl))
std = sqrt(sum(NTl**2.)/float(len(NTl))-mean**2.)
NTl.sort()
if len(NTl)/2. != len(NTl)/2:
med = NTl[len(NTl)/2+1]
else:
med = (NTl[len(NTl)/2+1]+NTl[len(NTl)/2])/2.
print "Mean ", mean
print "Median ", med
print "Std ", std
print 'percentage better than requirements '+str(nbr/float(ndrawn))
# Prepare historgram
minN = max(min(NTl),vmin)
maxN = max(NTl)+.0001
hl = zeros((nbin)) # histogram counts
lowcounts=0
for i in range(0,len(NTl)):
bin = int(nbin*(NTl[i]-minN)/(maxN-minN))
if(bin >= 0) :
hl[bin] += 1
else:
lowcounts +=1
Nl = [] # x bin centers
for i in range(0,len(hl)):
Nl.append(minN+i*(maxN-minN)/float(nbin)+0.5*(maxN-minN)/float(nbin))
NTl = array(NTl)
#### Ploting histogram
print "Plotting the histogram now"
print "min,max depth ",min(NTl), max(NTl)
print "counts below ", vmin, "are ", lowcounts
from matplotlib.backends.backend_pdf import PdfPages
plt.clf()
pp = PdfPages(localdir+'validationplots/'+catalogue_name+band+'_pred_exposures.pdf')
plt.plot(Nl,hl,'k-')
plt.xlabel(r'5$\sigma$ '+band+ ' depth')
plt.ylabel('# of images')
plt.title('MC combined exposure depth '+str(mean)[:5]+r'$\pm$'+str(std)[:4]+r', $f_{\rm pass}=$'+str(nbr/float(ndrawn))[:5]+'\n '+catalogue_name)
#plt.xscale('log')
pp.savefig()
pp.close()
return True
def photometricReq(band,rel='DR3',survey='survename'):
# ------------------------------------------------------
# ------------------------------------------------------
nside = 1024 # Resolution of output maps
nsidesout = None # if you want full sky degraded maps to be written
ratiores = 1 # Superresolution/oversampling ratio, simp mode doesn't allow anything other than 1
mode = 1 # 1: fully sequential, 2: parallel then sequential, 3: fully parallel
pixoffset = 0 # How many pixels are being removed on the edge of each CCD? 15 for DES.
mjd_max = 10e10
mjdw = ''
# Survey inputs
if rel == 'DR2':
fname =inputdir+'decals-ccds-annotated.fits'
catalogue_name = 'DECaLS_DR2'+mjdw
if rel == 'DR3':
inputdir = '/project/projectdirs/cosmo/data/legacysurvey/dr3/' # where I get my data
fname =inputdir+'ccds-annotated-decals.fits.gz'
catalogue_name = 'DECaLS_DR3'+mjdw
if rel == 'DR4':
inputdir= '/global/projecta/projectdirs/cosmo/work/dr4/'
if (band == 'g' or band == 'r'):
fname=inputdir+'ccds-annotated-dr4-90prime.fits.gz'
catalogue_name = '90prime_DR4'+mjdw
if band == 'z' :
fname = inputdir+'ccds-annotated-dr4-mzls.fits.gz'
catalogue_name = 'MZLS_DR4'+mjdw
# Where to write the maps ? Make sure directory exists.
outroot = localdir
tbdata = pyfits.open(fname)[1].data
# ------------------------------------------------------
# Obtain indices
if band == 'g':
sample_names = ['band_g']
#indg = np.where((tbdata['filter'] == 'g') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
indg = np.where((tbdata['filter'] == 'g') & (tbdata['blacklist_ok'] == True))
#indg = np.where((tbdata['filter'] == 'g') )
inds = indg #redundant
if band == 'r':
sample_names = ['band_r']
#indr = np.where((tbdata['filter'] == 'r') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
indr = np.where((tbdata['filter'] == 'r') & (tbdata['blacklist_ok'] == True))
#indr = np.where((tbdata['filter'] == 'r') )
inds = indr #redundant
if band == 'z':
sample_names = ['band_z']
#indz = np.where((tbdata['filter'] == 'z') & (tbdata['photometric'] == True) & (tbdata['blacklist_ok'] == True))
indz = np.where((tbdata['filter'] == 'z') & (tbdata['blacklist_ok'] == True))
#indz = np.where((tbdata['filter'] == 'z') )
inds = indz # redundant
#Read data
#obtain invnoisesq here, including extinction
zptvar = tbdata['CCDPHRMS']**2/tbdata['CCDNMATCH']
zptivar = 1./zptvar
nccd = np.ones(len(tbdata))
# What properties do you want mapped?
# Each each tuple has [(quantity to be projected, weighting scheme, operation),(etc..)]
quicksipVerbose(sample.verbose)
propertiesandoperations = [ ('zptvar', '', 'total') , ('zptvar','','min') , ('nccd','','total') , ('zptivar','','total')]
# What properties to keep when reading the images?
#Should at least contain propertiesandoperations and the image corners.
# MARCM - actually no need for ra dec image corners.
# Only needs ra0 ra1 ra2 ra3 dec0 dec1 dec2 dec3 only if fast track appropriate quicksip subroutines were implemented
propertiesToKeep = [ 'filter', 'AIRMASS', 'FWHM','mjd_obs'] \
+ ['RA', 'DEC', 'crval1', 'crval2', 'crpix1', 'crpix2', 'cd1_1', 'cd1_2', 'cd2_1', 'cd2_2','width','height']
# Create big table with all relevant properties.
tbdata = np.core.records.fromarrays([tbdata[prop] for prop in propertiesToKeep] + [zptvar,zptivar,nccd], names = propertiesToKeep + [ 'zptvar','zptivar','nccd'])
# Read the table, create Healtree, project it into healpix maps, and write these maps.
# Done with Quicksip library, note it has quite a few hardcoded values (use new version by MARCM for BASS and MzLS)
# project_and_write_maps_simp(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside)
project_and_write_maps(mode, propertiesandoperations, tbdata, catalogue_name, outroot, sample_names, inds, nside, ratiores, pixoffset, nsidesout)
# ----- plot depth map -----
#prop='ivar'
#plotPropertyMap(band,survey=catalogue_name,prop=prop)
return True
# ***********************************************************************
# ***********************************************************************
# --- run depth maps
#band='r'
#depthfromIvar(band,rel='DR3')
#
#band='g'
#depthfromIvar(band,rel='DR3')
#
#band='z'
#depthfromIvar(band,rel='DR3')
#band='r'
#depthfromIvar(band,rel='DR4')
#band='g'
#depthfromIvar(band,rel='DR4')
#band='z'
#depthfromIvar(band,rel='DR4')
# DECALS (DR3) the final survey will be covered by
# 1, 2, 3, 4, and 5 exposures in the following fractions:
#FracExp=[0.02,0.24,0.50,0.22,0.02]
#print "DECaLS depth histogram r-band"
#band='r'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR3')
#print "DECaLS depth histogram g-band"
#band='g'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR3')
#print "DECaLS depth histogram z-band"
#band='z'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR3')
# For BASS (DR4) the coverage fractions for 1,2,3,4,5 exposures are:
#FracExp=[0.0014,0.0586,0.8124,0.1203,0.0054,0.0019]
#print "BASS depth histogram r-band"
#band='r'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR4')
#print "BASS depth histogram g-band"
#band='g'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR4')
# For MzLS fill factors of 100% with a coverage of at least 1,
# 99.5% with a coverage of at least 2, and 85% with a coverage of 3.
#FracExp=[0.005,0.145,0.85,0,0]
#print "MzLS depth histogram z-band"
#band='z'
#plotMaghist_pred(band,FracExp=FracExp,ndraw = 1e5,nbin=100,rel='DR4')
# --- run histogram deph peredictions
# prova
#photometricReq('g',rel='DR3',survey='survename')
#photometricReq('r',rel='DR3',survey='survename')
#photometricReq('z',rel='DR3',survey='survename')
#photometricReq('g',rel='DR4',survey='survename')
#photometricReq('r',rel='DR4',survey='survename')
#photometricReq('z',rel='DR4',survey='survename')
#prop = 'zptvar'
#opt = 'min'
#rel = 'DR3'
#band = 'g'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
#band = 'r'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
#band = 'z'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
#
#rel = 'DR4'
#band = 'g'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
#band = 'r'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
#band = 'z'
#plotPhotometryMap(band,prop=prop,op=opt,rel=rel)
| legacysurvey/pipeline | validationtests/DESIccdManera.py | Python | gpl-2.0 | 38,903 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from .index import TableIndices, TableLoc, TableILoc, TableLocIndices
import sys
from collections import OrderedDict
from collections.abc import Mapping
import warnings
from copy import deepcopy
import numpy as np
from numpy import ma
from astropy import log
from astropy.units import Quantity, QuantityInfo
from astropy.utils import isiterable, ShapedLikeNDArray
from astropy.utils.console import color_print
from astropy.utils.metadata import MetaData
from astropy.utils.data_info import BaseColumnInfo, MixinInfo, ParentDtypeInfo, DataInfo
from astropy.utils.decorators import format_doc
from astropy.utils.exceptions import AstropyDeprecationWarning, NoValue
from astropy.io.registry import UnifiedReadWriteMethod
from . import groups
from .pprint import TableFormatter
from .column import (BaseColumn, Column, MaskedColumn, _auto_names, FalseArray,
col_copy)
from .row import Row
from .np_utils import fix_column_name, recarray_fromrecords
from .info import TableInfo
from .index import Index, _IndexModeContext, get_index
from .connect import TableRead, TableWrite
from . import conf
_implementation_notes = """
This string has informal notes concerning Table implementation for developers.
Things to remember:
- Table has customizable attributes ColumnClass, Column, MaskedColumn.
Table.Column is normally just column.Column (same w/ MaskedColumn)
but in theory they can be different. Table.ColumnClass is the default
class used to create new non-mixin columns, and this is a function of
the Table.masked attribute. Column creation / manipulation in a Table
needs to respect these.
- Column objects that get inserted into the Table.columns attribute must
have the info.parent_table attribute set correctly. Beware just dropping
an object into the columns dict since an existing column may
be part of another Table and have parent_table set to point at that
table. Dropping that column into `columns` of this Table will cause
a problem for the old one so the column object needs to be copied (but
not necessarily the data).
Currently replace_column is always making a copy of both object and
data if parent_table is set. This could be improved but requires a
generic way to copy a mixin object but not the data.
- Be aware of column objects that have indices set.
- `cls.ColumnClass` is a property that effectively uses the `masked` attribute
to choose either `cls.Column` or `cls.MaskedColumn`.
"""
__doctest_skip__ = ['Table.read', 'Table.write', 'Table._read',
'Table.convert_bytestring_to_unicode',
'Table.convert_unicode_to_bytestring',
]
__doctest_requires__ = {'*pandas': ['pandas']}
_pprint_docs = """
{__doc__}
Parameters
----------
max_lines : int or `None`
Maximum number of lines in table output.
max_width : int or `None`
Maximum character width of output.
show_name : bool
Include a header row for column names. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include a header row for column dtypes. Default is True.
align : str or list or tuple or `None`
Left/right alignment of columns. Default is right (None) for all
columns. Other allowed values are '>', '<', '^', and '0=' for
right, left, centered, and 0-padded, respectively. A list of
strings can be provided for alignment of tables with multiple
columns.
"""
_pformat_docs = """
{__doc__}
Parameters
----------
max_lines : int or `None`
Maximum number of rows to output
max_width : int or `None`
Maximum character width of output
show_name : bool
Include a header row for column names. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include a header row for column dtypes. Default is True.
html : bool
Format the output as an HTML table. Default is False.
tableid : str or `None`
An ID tag for the table; only used if html is set. Default is
"table{id}", where id is the unique integer id of the table object,
id(self)
align : str or list or tuple or `None`
Left/right alignment of columns. Default is right (None) for all
columns. Other allowed values are '>', '<', '^', and '0=' for
right, left, centered, and 0-padded, respectively. A list of
strings can be provided for alignment of tables with multiple
columns.
tableclass : str or list of str or `None`
CSS classes for the table; only used if html is set. Default is
None.
Returns
-------
lines : list
Formatted table as a list of strings.
"""
class TableReplaceWarning(UserWarning):
"""
Warning class for cases when a table column is replaced via the
Table.__setitem__ syntax e.g. t['a'] = val.
This does not inherit from AstropyWarning because we want to use
stacklevel=3 to show the user where the issue occurred in their code.
"""
pass
def descr(col):
"""Array-interface compliant full description of a column.
This returns a 3-tuple (name, type, shape) that can always be
used in a structured array dtype definition.
"""
col_dtype = 'O' if (col.info.dtype is None) else col.info.dtype
col_shape = col.shape[1:] if hasattr(col, 'shape') else ()
return (col.info.name, col_dtype, col_shape)
def has_info_class(obj, cls):
return hasattr(obj, 'info') and isinstance(obj.info, cls)
# Note to future maintainers: when transitioning this to dict
# be sure to change the OrderedDict ref(s) in Row and in __len__().
class TableColumns(OrderedDict):
"""OrderedDict subclass for a set of columns.
This class enhances item access to provide convenient access to columns
by name or index, including slice access. It also handles renaming
of columns.
The initialization argument ``cols`` can be a list of ``Column`` objects
or any structure that is valid for initializing a Python dict. This
includes a dict, list of (key, val) tuples or [key, val] lists, etc.
Parameters
----------
cols : dict, list, tuple; optional
Column objects as data structure that can init dict (see above)
"""
def __init__(self, cols={}):
if isinstance(cols, (list, tuple)):
# `cols` should be a list of two-tuples, but it is allowed to have
# columns (BaseColumn or mixins) in the list.
newcols = []
for col in cols:
if has_info_class(col, BaseColumnInfo):
newcols.append((col.info.name, col))
else:
newcols.append(col)
cols = newcols
super().__init__(cols)
def __getitem__(self, item):
"""Get items from a TableColumns object.
::
tc = TableColumns(cols=[Column(name='a'), Column(name='b'), Column(name='c')])
tc['a'] # Column('a')
tc[1] # Column('b')
tc['a', 'b'] # <TableColumns names=('a', 'b')>
tc[1:3] # <TableColumns names=('b', 'c')>
"""
if isinstance(item, str):
return OrderedDict.__getitem__(self, item)
elif isinstance(item, (int, np.integer)):
return self.values()[item]
elif (isinstance(item, np.ndarray) and item.shape == () and item.dtype.kind == 'i'):
return self.values()[item.item()]
elif isinstance(item, tuple):
return self.__class__([self[x] for x in item])
elif isinstance(item, slice):
return self.__class__([self[x] for x in list(self)[item]])
else:
raise IndexError('Illegal key or index value for {} object'
.format(self.__class__.__name__))
def __setitem__(self, item, value, validated=False):
"""
Set item in this dict instance, but do not allow directly replacing an
existing column unless it is already validated (and thus is certain to
not corrupt the table).
NOTE: it is easily possible to corrupt a table by directly *adding* a new
key to the TableColumns attribute of a Table, e.g.
``t.columns['jane'] = 'doe'``.
"""
if item in self and not validated:
raise ValueError("Cannot replace column '{}'. Use Table.replace_column() instead."
.format(item))
super().__setitem__(item, value)
def __repr__(self):
names = (f"'{x}'" for x in self.keys())
return "<{1} names=({0})>".format(",".join(names), self.__class__.__name__)
def _rename_column(self, name, new_name):
if name == new_name:
return
if new_name in self:
raise KeyError(f"Column {new_name} already exists")
mapper = {name: new_name}
new_names = [mapper.get(name, name) for name in self]
cols = list(self.values())
self.clear()
self.update(list(zip(new_names, cols)))
# Define keys and values for Python 2 and 3 source compatibility
def keys(self):
return list(OrderedDict.keys(self))
def values(self):
return list(OrderedDict.values(self))
def isinstance(self, cls):
"""
Return a list of columns which are instances of the specified classes.
Parameters
----------
cls : class or tuple of classes
Column class (including mixin) or tuple of Column classes.
Returns
-------
col_list : list of Columns
List of Column objects which are instances of given classes.
"""
cols = [col for col in self.values() if isinstance(col, cls)]
return cols
def not_isinstance(self, cls):
"""
Return a list of columns which are not instances of the specified classes.
Parameters
----------
cls : class or tuple of classes
Column class (including mixin) or tuple of Column classes.
Returns
-------
col_list : list of Columns
List of Column objects which are not instances of given classes.
"""
cols = [col for col in self.values() if not isinstance(col, cls)]
return cols
class TableReadWrite:
def __get__(self, instance, owner_cls):
if instance is None:
# This is an unbound descriptor on the class
info = self
info._parent_cls = owner_cls
else:
info = instance.__dict__.get('info')
if info is None:
info = instance.__dict__['info'] = self.__class__(bound=True)
info._parent = instance
return info
class Table:
"""A class to represent tables of heterogeneous data.
`~astropy.table.Table` provides a class for heterogeneous tabular data,
making use of a `numpy` structured array internally to store the data
values. A key enhancement provided by the `~astropy.table.Table` class is
the ability to easily modify the structure of the table by adding or
removing columns, or adding new rows of data. In addition table and column
metadata are fully supported.
`~astropy.table.Table` differs from `~astropy.nddata.NDData` by the
assumption that the input data consists of columns of homogeneous data,
where each column has a unique identifier and may contain additional
metadata such as the data unit, format, and description.
See also: http://docs.astropy.org/en/stable/table/
Parameters
----------
data : numpy ndarray, dict, list, Table, or table-like object, optional
Data to initialize table.
masked : bool, optional
Specify whether the table is masked.
names : list, optional
Specify column names.
dtype : list, optional
Specify column data types.
meta : dict, optional
Metadata associated with the table.
copy : bool, optional
Copy the input data. If the input is a Table the ``meta`` is always
copied regardless of the ``copy`` parameter.
Default is True.
rows : numpy ndarray, list of lists, optional
Row-oriented data for table instead of ``data`` argument.
copy_indices : bool, optional
Copy any indices in the input data. Default is True.
**kwargs : dict, optional
Additional keyword args when converting table-like object.
"""
meta = MetaData(copy=False)
# Define class attributes for core container objects to allow for subclass
# customization.
Row = Row
Column = Column
MaskedColumn = MaskedColumn
TableColumns = TableColumns
TableFormatter = TableFormatter
# Unified I/O read and write methods from .connect
read = UnifiedReadWriteMethod(TableRead)
write = UnifiedReadWriteMethod(TableWrite)
def as_array(self, keep_byteorder=False, names=None):
"""
Return a new copy of the table in the form of a structured np.ndarray or
np.ma.MaskedArray object (as appropriate).
Parameters
----------
keep_byteorder : bool, optional
By default the returned array has all columns in native byte
order. However, if this option is `True` this preserves the
byte order of all columns (if any are non-native).
names : list, optional:
List of column names to include for returned structured array.
Default is to include all table columns.
Returns
-------
table_array : np.ndarray (unmasked) or np.ma.MaskedArray (masked)
Copy of table as a numpy structured array
"""
masked = self.masked or self.has_masked_columns or self.has_masked_values
empty_init = ma.empty if masked else np.empty
if len(self.columns) == 0:
return empty_init(0, dtype=None)
sys_byteorder = ('>', '<')[sys.byteorder == 'little']
native_order = ('=', sys_byteorder)
dtype = []
cols = self.columns.values()
if names is not None:
cols = [col for col in cols if col.info.name in names]
for col in cols:
col_descr = descr(col)
byteorder = col.info.dtype.byteorder
if not keep_byteorder and byteorder not in native_order:
new_dt = np.dtype(col_descr[1]).newbyteorder('=')
col_descr = (col_descr[0], new_dt, col_descr[2])
dtype.append(col_descr)
data = empty_init(len(self), dtype=dtype)
for col in cols:
# When assigning from one array into a field of a structured array,
# Numpy will automatically swap those columns to their destination
# byte order where applicable
data[col.info.name] = col
# For masked out, masked mixin columns need to set output mask attribute.
if masked and has_info_class(col, MixinInfo) and hasattr(col, 'mask'):
data[col.info.name].mask = col.mask
return data
def __init__(self, data=None, masked=False, names=None, dtype=None,
meta=None, copy=True, rows=None, copy_indices=True,
**kwargs):
# Set up a placeholder empty table
self._set_masked(masked)
self.columns = self.TableColumns()
self.formatter = self.TableFormatter()
self._copy_indices = True # copy indices from this Table by default
self._init_indices = copy_indices # whether to copy indices in init
self.primary_key = None
# Must copy if dtype are changing
if not copy and dtype is not None:
raise ValueError('Cannot specify dtype when copy=False')
# Row-oriented input, e.g. list of lists or list of tuples, list of
# dict, Row instance. Set data to something that the subsequent code
# will parse correctly.
is_list_of_dict = False
if rows is not None:
if data is not None:
raise ValueError('Cannot supply both `data` and `rows` values')
if all(isinstance(row, dict) for row in rows):
is_list_of_dict = True # Avoid doing the all(...) test twice.
data = rows
elif isinstance(rows, self.Row):
data = rows
else:
rec_data = recarray_fromrecords(rows)
data = [rec_data[name] for name in rec_data.dtype.names]
# Infer the type of the input data and set up the initialization
# function, number of columns, and potentially the default col names
default_names = None
if hasattr(data, '__astropy_table__'):
# Data object implements the __astropy_table__ interface method.
# Calling that method returns an appropriate instance of
# self.__class__ and respects the `copy` arg. The returned
# Table object should NOT then be copied.
data = data.__astropy_table__(self.__class__, copy, **kwargs)
copy = False
elif kwargs:
raise TypeError('__init__() got unexpected keyword argument {!r}'
.format(list(kwargs.keys())[0]))
if (isinstance(data, np.ndarray) and
data.shape == (0,) and
not data.dtype.names):
data = None
if isinstance(data, self.Row):
data = data._table[data._index:data._index + 1]
if isinstance(data, (list, tuple)):
init_func = self._init_from_list
if data and (is_list_of_dict or all(isinstance(row, dict) for row in data)):
n_cols = len(data[0])
else:
n_cols = len(data)
elif isinstance(data, np.ndarray):
if data.dtype.names:
init_func = self._init_from_ndarray # _struct
n_cols = len(data.dtype.names)
default_names = data.dtype.names
else:
init_func = self._init_from_ndarray # _homog
if data.shape == ():
raise ValueError('Can not initialize a Table with a scalar')
elif len(data.shape) == 1:
data = data[np.newaxis, :]
n_cols = data.shape[1]
elif isinstance(data, Mapping):
init_func = self._init_from_dict
default_names = list(data)
n_cols = len(default_names)
elif isinstance(data, Table):
# If user-input meta is None then use data.meta (if non-trivial)
if meta is None and data.meta:
# At this point do NOT deepcopy data.meta as this will happen after
# table init_func() is called. But for table input the table meta
# gets a key copy here if copy=False because later a direct object ref
# is used.
meta = data.meta if copy else data.meta.copy()
# Handle indices on input table. Copy primary key and don't copy indices
# if the input Table is in non-copy mode.
self.primary_key = data.primary_key
self._init_indices = self._init_indices and data._copy_indices
# Extract default names, n_cols, and then overwrite ``data`` to be the
# table columns so we can use _init_from_list.
default_names = data.colnames
n_cols = len(default_names)
data = list(data.columns.values())
init_func = self._init_from_list
elif data is None:
if names is None:
if dtype is None:
if meta is not None:
self.meta = deepcopy(meta) if copy else meta
return
try:
# No data nor names but dtype is available. This must be
# valid to initialize a structured array.
dtype = np.dtype(dtype)
names = dtype.names
dtype = [dtype[name] for name in names]
except Exception:
raise ValueError('dtype was specified but could not be '
'parsed for column names')
# names is guaranteed to be set at this point
init_func = self._init_from_list
n_cols = len(names)
data = [[]] * n_cols
else:
raise ValueError('Data type {} not allowed to init Table'
.format(type(data)))
# Set up defaults if names and/or dtype are not specified.
# A value of None means the actual value will be inferred
# within the appropriate initialization routine, either from
# existing specification or auto-generated.
if names is None:
names = default_names or [None] * n_cols
if dtype is None:
dtype = [None] * n_cols
# Numpy does not support bytes column names on Python 3, so fix them
# up now.
names = [fix_column_name(name) for name in names]
self._check_names_dtype(names, dtype, n_cols)
# Finally do the real initialization
init_func(data, names, dtype, n_cols, copy)
# Set table meta. If copy=True then deepcopy meta otherwise use the
# user-supplied meta directly.
if meta is not None:
self.meta = deepcopy(meta) if copy else meta
# Whatever happens above, the masked property should be set to a boolean
if self.masked not in (None, True, False):
raise TypeError("masked property must be None, True or False")
def __getstate__(self):
columns = OrderedDict((key, col if isinstance(col, BaseColumn) else col_copy(col))
for key, col in self.columns.items())
return (columns, self.meta)
def __setstate__(self, state):
columns, meta = state
self.__init__(columns, meta=meta)
@property
def mask(self):
# Dynamic view of available masks
if self.masked or self.has_masked_columns or self.has_masked_values:
mask_table = Table([getattr(col, 'mask', FalseArray(col.shape))
for col in self.itercols()],
names=self.colnames, copy=False)
# Set hidden attribute to force inplace setitem so that code like
# t.mask['a'] = [1, 0, 1] will correctly set the underlying mask.
# See #5556 for discussion.
mask_table._setitem_inplace = True
else:
mask_table = None
return mask_table
@mask.setter
def mask(self, val):
self.mask[:] = val
@property
def _mask(self):
"""This is needed so that comparison of a masked Table and a
MaskedArray works. The requirement comes from numpy.ma.core
so don't remove this property."""
return self.as_array().mask
def filled(self, fill_value=None):
"""Return copy of self, with masked values filled.
If input ``fill_value`` supplied then that value is used for all
masked entries in the table. Otherwise the individual
``fill_value`` defined for each table column is used.
Parameters
----------
fill_value : str
If supplied, this ``fill_value`` is used for all masked entries
in the entire table.
Returns
-------
filled_table : Table
New table with masked values filled
"""
if self.masked or self.has_masked_columns or self.has_masked_values:
# Get new columns with masked values filled, then create Table with those
# new cols (copy=False) but deepcopy the meta.
data = [col.filled(fill_value) if hasattr(col, 'filled') else col
for col in self.itercols()]
return self.__class__(data, meta=deepcopy(self.meta), copy=False)
else:
# Return copy of the original object.
return self.copy()
@property
def indices(self):
'''
Return the indices associated with columns of the table
as a TableIndices object.
'''
lst = []
for column in self.columns.values():
for index in column.info.indices:
if sum([index is x for x in lst]) == 0: # ensure uniqueness
lst.append(index)
return TableIndices(lst)
@property
def loc(self):
'''
Return a TableLoc object that can be used for retrieving
rows by index in a given data range. Note that both loc
and iloc work only with single-column indices.
'''
return TableLoc(self)
@property
def loc_indices(self):
"""
Return a TableLocIndices object that can be used for retrieving
the row indices corresponding to given table index key value or values.
"""
return TableLocIndices(self)
@property
def iloc(self):
'''
Return a TableILoc object that can be used for retrieving
indexed rows in the order they appear in the index.
'''
return TableILoc(self)
def add_index(self, colnames, engine=None, unique=False):
'''
Insert a new index among one or more columns.
If there are no indices, make this index the
primary table index.
Parameters
----------
colnames : str or list
List of column names (or a single column name) to index
engine : type or None
Indexing engine class to use, from among SortedArray, BST,
FastBST, FastRBT, and SCEngine. If the supplied argument is None
(by default), use SortedArray.
unique : bool
Whether the values of the index must be unique. Default is False.
'''
if isinstance(colnames, str):
colnames = (colnames,)
columns = self.columns[tuple(colnames)].values()
# make sure all columns support indexing
for col in columns:
if not getattr(col.info, '_supports_indexing', False):
raise ValueError('Cannot create an index on column "{}", of '
'type "{}"'.format(col.info.name, type(col)))
index = Index(columns, engine=engine, unique=unique)
if not self.indices:
self.primary_key = colnames
for col in columns:
col.info.indices.append(index)
def remove_indices(self, colname):
'''
Remove all indices involving the given column.
If the primary index is removed, the new primary
index will be the most recently added remaining
index.
Parameters
----------
colname : str
Name of column
'''
col = self.columns[colname]
for index in self.indices:
try:
index.col_position(col.info.name)
except ValueError:
pass
else:
for c in index.columns:
c.info.indices.remove(index)
def index_mode(self, mode):
'''
Return a context manager for an indexing mode.
Parameters
----------
mode : str
Either 'freeze', 'copy_on_getitem', or 'discard_on_copy'.
In 'discard_on_copy' mode,
indices are not copied whenever columns or tables are copied.
In 'freeze' mode, indices are not modified whenever columns are
modified; at the exit of the context, indices refresh themselves
based on column values. This mode is intended for scenarios in
which one intends to make many additions or modifications in an
indexed column.
In 'copy_on_getitem' mode, indices are copied when taking column
slices as well as table slices, so col[i0:i1] will preserve
indices.
'''
return _IndexModeContext(self, mode)
def __array__(self, dtype=None):
"""Support converting Table to np.array via np.array(table).
Coercion to a different dtype via np.array(table, dtype) is not
supported and will raise a ValueError.
"""
if dtype is not None:
raise ValueError('Datatype coercion is not allowed')
# This limitation is because of the following unexpected result that
# should have made a table copy while changing the column names.
#
# >>> d = astropy.table.Table([[1,2],[3,4]])
# >>> np.array(d, dtype=[('a', 'i8'), ('b', 'i8')])
# array([(0, 0), (0, 0)],
# dtype=[('a', '<i8'), ('b', '<i8')])
out = self.as_array()
return out.data if isinstance(out, np.ma.MaskedArray) else out
def _check_names_dtype(self, names, dtype, n_cols):
"""Make sure that names and dtype are both iterable and have
the same length as data.
"""
for inp_list, inp_str in ((dtype, 'dtype'), (names, 'names')):
if not isiterable(inp_list):
raise ValueError(f'{inp_str} must be a list or None')
if len(names) != n_cols or len(dtype) != n_cols:
raise ValueError(
'Arguments "names" and "dtype" must match number of columns'
.format(inp_str))
def _init_from_list_of_dicts(self, data, names, dtype, n_cols, copy):
names_from_data = set()
for row in data:
names_from_data.update(row)
if (isinstance(data[0], OrderedDict) and
set(data[0].keys()) == names_from_data):
names_from_data = list(data[0].keys())
else:
names_from_data = sorted(names_from_data)
# Note: if set(data[0].keys()) != names_from_data, this will give an
# exception later, so NO need to catch here.
cols = {}
for name in names_from_data:
cols[name] = []
for i, row in enumerate(data):
try:
cols[name].append(row[name])
except KeyError:
raise ValueError(f'Row {i} has no value for column {name}')
if all(name is None for name in names):
names = names_from_data
self._init_from_dict(cols, names, dtype, n_cols, copy)
return
def _init_from_list(self, data, names, dtype, n_cols, copy):
"""Initialize table from a list of column data. A column can be a
Column object, np.ndarray, mixin, or any other iterable object.
"""
if data and all(isinstance(row, dict) for row in data):
self._init_from_list_of_dicts(data, names, dtype, n_cols, copy)
return
cols = []
default_names = _auto_names(n_cols)
for col, name, default_name, dtype in zip(data, names, default_names, dtype):
col = self._convert_data_to_col(col, copy, default_name, dtype, name)
cols.append(col)
self._init_from_cols(cols)
def _convert_data_to_col(self, data, copy=True, default_name=None, dtype=None, name=None):
"""
Convert any allowed sequence data ``col`` to a column object that can be used
directly in the self.columns dict. This could be a Column, MaskedColumn,
or mixin column.
The final column name is determined by::
name or data.info.name or def_name
If ``data`` has no ``info`` then ``name = name or def_name``.
The behavior of ``copy`` for Column objects is:
- copy=True: new class instance with a copy of data and deep copy of meta
- copy=False: new class instance with same data and a key-only copy of meta
For mixin columns:
- copy=True: new class instance with copy of data and deep copy of meta
- copy=False: original instance (no copy at all)
Parameters
----------
data : object (column-like sequence)
Input column data
copy : bool
Make a copy
default_name : str
Default name
dtype : np.dtype or None
Data dtype
name : str or None
Column name
Returns
-------
col : Column, MaskedColumn, mixin-column type
Object that can be used as a column in self
"""
is_mixin = self._is_mixin_for_table(data)
masked_col_cls = (self.ColumnClass
if issubclass(self.ColumnClass, self.MaskedColumn)
else self.MaskedColumn)
# Structured ndarray gets viewed as a mixin unless already a valid
# mixin class
if isinstance(data, np.ndarray) and len(data.dtype) > 1 and not is_mixin:
data = data.view(NdarrayMixin)
# Get the final column name using precedence. Some objects may not
# have an info attribute.
if not name:
if hasattr(data, 'info'):
name = data.info.name or default_name
else:
name = default_name
if isinstance(data, Column):
# If self.ColumnClass is a subclass of col, then "upgrade" to ColumnClass,
# otherwise just use the original class. The most common case is a
# table with masked=True and ColumnClass=MaskedColumn. Then a Column
# gets upgraded to MaskedColumn, but the converse (pre-4.0) behavior
# of downgrading from MaskedColumn to Column (for non-masked table)
# does not happen.
col_cls = self._get_col_cls_for_table(data)
elif self._is_mixin_for_table(data):
# Copy the mixin column attributes if they exist since the copy below
# may not get this attribute.
col = col_copy(data, copy_indices=self._init_indices) if copy else data
col.info.name = name
return col
elif isinstance(data, np.ma.MaskedArray):
# Require that col_cls be a subclass of MaskedColumn, remembering
# that ColumnClass could be a user-defined subclass (though more-likely
# could be MaskedColumn).
col_cls = masked_col_cls
elif not hasattr(data, 'dtype'):
# If value doesn't have a dtype then convert to a masked numpy array.
# Then check if there were any masked elements. This logic is handling
# normal lists like [1, 2] but also odd-ball cases like a list of masked
# arrays (see #8977). Use np.ma.array() to do the heavy lifting.
np_data = np.ma.array(data, dtype=dtype)
if np_data.ndim > 0 and len(np_data) == 0:
# Implies input was an empty list (e.g. initializing an empty table
# with pre-declared names and dtypes but no data). Here we need to
# fall through to initializing with the original data=[].
col_cls = self.ColumnClass
else:
if np_data.mask is np.ma.nomask:
data = np_data.data
col_cls = self.ColumnClass
else:
data = np_data
col_cls = masked_col_cls
copy = False
else:
# `data` is none of the above, so just go for it and try init'ing Column
col_cls = self.ColumnClass
try:
col = col_cls(name=name, data=data, dtype=dtype,
copy=copy, copy_indices=self._init_indices)
except Exception:
# Broad exception class since we don't know what might go wrong
raise ValueError('unable to convert data to Column for Table')
col = self._convert_col_for_table(col)
return col
def _init_from_ndarray(self, data, names, dtype, n_cols, copy):
"""Initialize table from an ndarray structured array"""
data_names = data.dtype.names or _auto_names(n_cols)
struct = data.dtype.names is not None
names = [name or data_names[i] for i, name in enumerate(names)]
cols = ([data[name] for name in data_names] if struct else
[data[:, i] for i in range(n_cols)])
self._init_from_list(cols, names, dtype, n_cols, copy)
def _init_from_dict(self, data, names, dtype, n_cols, copy):
"""Initialize table from a dictionary of columns"""
data_list = [data[name] for name in names]
self._init_from_list(data_list, names, dtype, n_cols, copy)
def _get_col_cls_for_table(self, col):
"""Get the correct column class to use for upgrading any Column-like object.
For a masked table, ensure any Column-like object is a subclass
of the table MaskedColumn.
For unmasked table, ensure any MaskedColumn-like object is a subclass
of the table MaskedColumn. If not a MaskedColumn, then ensure that any
Column-like object is a subclass of the table Column.
"""
col_cls = col.__class__
if self.masked:
if isinstance(col, Column) and not isinstance(col, self.MaskedColumn):
col_cls = self.MaskedColumn
else:
if isinstance(col, MaskedColumn):
if not isinstance(col, self.MaskedColumn):
col_cls = self.MaskedColumn
elif isinstance(col, Column) and not isinstance(col, self.Column):
col_cls = self.Column
return col_cls
def _convert_col_for_table(self, col):
"""
Make sure that all Column objects have correct base class for this type of
Table. For a base Table this most commonly means setting to
MaskedColumn if the table is masked. Table subclasses like QTable
override this method.
"""
if isinstance(col, Column) and not isinstance(col, self.ColumnClass):
col_cls = self._get_col_cls_for_table(col)
if col_cls is not col.__class__:
col = col_cls(col, copy=False)
return col
def _init_from_cols(self, cols):
"""Initialize table from a list of Column or mixin objects"""
lengths = set(len(col) for col in cols)
if len(lengths) > 1:
raise ValueError('Inconsistent data column lengths: {}'
.format(lengths))
# Make sure that all Column-based objects have correct class. For
# plain Table this is self.ColumnClass, but for instance QTable will
# convert columns with units to a Quantity mixin.
newcols = [self._convert_col_for_table(col) for col in cols]
self._make_table_from_cols(self, newcols)
# Deduplicate indices. It may happen that after pickling or when
# initing from an existing table that column indices which had been
# references to a single index object got *copied* into an independent
# object. This results in duplicates which will cause downstream problems.
index_dict = {}
for col in self.itercols():
for i, index in enumerate(col.info.indices or []):
names = tuple(ind_col.info.name for ind_col in index.columns)
if names in index_dict:
col.info.indices[i] = index_dict[names]
else:
index_dict[names] = index
def _new_from_slice(self, slice_):
"""Create a new table as a referenced slice from self."""
table = self.__class__(masked=self.masked)
if self.meta:
table.meta = self.meta.copy() # Shallow copy for slice
table.primary_key = self.primary_key
newcols = []
for col in self.columns.values():
newcol = col[slice_]
# Note in line below, use direct attribute access to col.indices for Column
# instances instead of the generic col.info.indices. This saves about 4 usec
# per column.
if (col if isinstance(col, Column) else col.info).indices:
# TODO : as far as I can tell the only purpose of setting _copy_indices
# here is to communicate that to the initial test in `slice_indices`.
# Why isn't that just sent as an arg to the function?
col.info._copy_indices = self._copy_indices
newcol = col.info.slice_indices(newcol, slice_, len(col))
# Don't understand why this is forcing a value on the original column.
# Normally col.info does not even have a _copy_indices attribute. Tests
# still pass if this line is deleted. (Each col.info attribute access
# is expensive).
col.info._copy_indices = True
newcols.append(newcol)
self._make_table_from_cols(table, newcols, verify=False, names=self.columns.keys())
return table
@staticmethod
def _make_table_from_cols(table, cols, verify=True, names=None):
"""
Make ``table`` in-place so that it represents the given list of ``cols``.
"""
if names is None:
names = [col.info.name for col in cols]
# Note: we do not test for len(names) == len(cols) if names is not None. In that
# case the function is being called by from "trusted" source (e.g. right above here)
# that is assumed to provide valid inputs. In that case verify=False.
if verify:
if None in names:
raise TypeError('Cannot have None for column name')
if len(set(names)) != len(names):
raise ValueError('Duplicate column names')
table.columns = table.TableColumns((name, col) for name, col in zip(names, cols))
for col in cols:
table._set_col_parent_table_and_mask(col)
def _set_col_parent_table_and_mask(self, col):
"""
Set ``col.parent_table = self`` and force ``col`` to have ``mask``
attribute if the table is masked and ``col.mask`` does not exist.
"""
# For Column instances it is much faster to do direct attribute access
# instead of going through .info
col_info = col if isinstance(col, Column) else col.info
col_info.parent_table = self
# Legacy behavior for masked table
if self.masked and not hasattr(col, 'mask'):
col.mask = FalseArray(col.shape)
def itercols(self):
"""
Iterate over the columns of this table.
Examples
--------
To iterate over the columns of a table::
>>> t = Table([[1], [2]])
>>> for col in t.itercols():
... print(col)
col0
----
1
col1
----
2
Using ``itercols()`` is similar to ``for col in t.columns.values()``
but is syntactically preferred.
"""
for colname in self.columns:
yield self[colname]
def _base_repr_(self, html=False, descr_vals=None, max_width=None,
tableid=None, show_dtype=True, max_lines=None,
tableclass=None):
if descr_vals is None:
descr_vals = [self.__class__.__name__]
if self.masked:
descr_vals.append('masked=True')
descr_vals.append('length={}'.format(len(self)))
descr = ' '.join(descr_vals)
if html:
from astropy.utils.xml.writer import xml_escape
descr = '<i>{}</i>\n'.format(xml_escape(descr))
else:
descr = f'<{descr}>\n'
if tableid is None:
tableid = 'table{id}'.format(id=id(self))
data_lines, outs = self.formatter._pformat_table(
self, tableid=tableid, html=html, max_width=max_width,
show_name=True, show_unit=None, show_dtype=show_dtype,
max_lines=max_lines, tableclass=tableclass)
out = descr + '\n'.join(data_lines)
return out
def _repr_html_(self):
return self._base_repr_(html=True, max_width=-1,
tableclass=conf.default_notebook_table_class)
def __repr__(self):
return self._base_repr_(html=False, max_width=None)
def __str__(self):
return '\n'.join(self.pformat())
def __bytes__(self):
return str(self).encode('utf-8')
@property
def has_mixin_columns(self):
"""
True if table has any mixin columns (defined as columns that are not Column
subclasses).
"""
return any(has_info_class(col, MixinInfo) for col in self.columns.values())
@property
def has_masked_columns(self):
"""True if table has any ``MaskedColumn`` columns.
This does not check for mixin columns that may have masked values, use the
``has_masked_values`` property in that case.
"""
return any(isinstance(col, MaskedColumn) for col in self.itercols())
@property
def has_masked_values(self):
"""True if column in the table has values which are masked.
This may be relatively slow for large tables as it requires checking the mask
values of each column.
"""
for col in self.itercols():
if hasattr(col, 'mask') and np.any(col.mask):
return True
else:
return False
def _is_mixin_for_table(self, col):
"""
Determine if ``col`` should be added to the table directly as
a mixin column.
"""
if isinstance(col, BaseColumn):
return False
# Is it a mixin but not not Quantity (which gets converted to Column with
# unit set).
return has_info_class(col, MixinInfo) and not has_info_class(col, QuantityInfo)
@format_doc(_pprint_docs)
def pprint(self, max_lines=None, max_width=None, show_name=True,
show_unit=None, show_dtype=False, align=None):
"""Print a formatted string representation of the table.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default is taken from the
configuration item ``astropy.conf.max_lines``. If a negative
value of ``max_lines`` is supplied then there is no line limit
applied.
The same applies for max_width except the configuration item is
``astropy.conf.max_width``.
"""
lines, outs = self.formatter._pformat_table(self, max_lines, max_width,
show_name=show_name, show_unit=show_unit,
show_dtype=show_dtype, align=align)
if outs['show_length']:
lines.append('Length = {} rows'.format(len(self)))
n_header = outs['n_header']
for i, line in enumerate(lines):
if i < n_header:
color_print(line, 'red')
else:
print(line)
@format_doc(_pprint_docs)
def pprint_all(self, max_lines=-1, max_width=-1, show_name=True,
show_unit=None, show_dtype=False, align=None):
"""Print a formatted string representation of the entire table.
This method is the same as `astropy.table.Table.pprint` except that
the default ``max_lines`` and ``max_width`` are both -1 so that by
default the entire table is printed instead of restricting to the size
of the screen terminal.
"""
return self.pprint(max_lines, max_width, show_name,
show_unit, show_dtype, align)
def _make_index_row_display_table(self, index_row_name):
if index_row_name not in self.columns:
idx_col = self.ColumnClass(name=index_row_name, data=np.arange(len(self)))
return self.__class__([idx_col] + self.columns.values(),
copy=False)
else:
return self
def show_in_notebook(self, tableid=None, css=None, display_length=50,
table_class='astropy-default', show_row_index='idx'):
"""Render the table in HTML and show it in the IPython notebook.
Parameters
----------
tableid : str or `None`
An html ID tag for the table. Default is ``table{id}-XXX``, where
id is the unique integer id of the table object, id(self), and XXX
is a random number to avoid conflicts when printing the same table
multiple times.
table_class : str or `None`
A string with a list of HTML classes used to style the table.
The special default string ('astropy-default') means that the string
will be retrieved from the configuration item
``astropy.table.default_notebook_table_class``. Note that these
table classes may make use of bootstrap, as this is loaded with the
notebook. See `this page <http://getbootstrap.com/css/#tables>`_
for the list of classes.
css : string
A valid CSS string declaring the formatting for the table. Defaults
to ``astropy.table.jsviewer.DEFAULT_CSS_NB``.
display_length : int, optional
Number or rows to show. Defaults to 50.
show_row_index : str or False
If this does not evaluate to False, a column with the given name
will be added to the version of the table that gets displayed.
This new column shows the index of the row in the table itself,
even when the displayed table is re-sorted by another column. Note
that if a column with this name already exists, this option will be
ignored. Defaults to "idx".
Notes
-----
Currently, unlike `show_in_browser` (with ``jsviewer=True``), this
method needs to access online javascript code repositories. This is due
to modern browsers' limitations on accessing local files. Hence, if you
call this method while offline (and don't have a cached version of
jquery and jquery.dataTables), you will not get the jsviewer features.
"""
from .jsviewer import JSViewer
from IPython.display import HTML
if tableid is None:
tableid = 'table{}-{}'.format(id(self),
np.random.randint(1, 1e6))
jsv = JSViewer(display_length=display_length)
if show_row_index:
display_table = self._make_index_row_display_table(show_row_index)
else:
display_table = self
if table_class == 'astropy-default':
table_class = conf.default_notebook_table_class
html = display_table._base_repr_(html=True, max_width=-1, tableid=tableid,
max_lines=-1, show_dtype=False,
tableclass=table_class)
columns = display_table.columns.values()
sortable_columns = [i for i, col in enumerate(columns)
if col.info.dtype.kind in 'iufc']
html += jsv.ipynb(tableid, css=css, sort_columns=sortable_columns)
return HTML(html)
def show_in_browser(self, max_lines=5000, jsviewer=False,
browser='default', jskwargs={'use_local_files': True},
tableid=None, table_class="display compact",
css=None, show_row_index='idx'):
"""Render the table in HTML and show it in a web browser.
Parameters
----------
max_lines : int
Maximum number of rows to export to the table (set low by default
to avoid memory issues, since the browser view requires duplicating
the table in memory). A negative value of ``max_lines`` indicates
no row limit.
jsviewer : bool
If `True`, prepends some javascript headers so that the table is
rendered as a `DataTables <https://datatables.net>`_ data table.
This allows in-browser searching & sorting.
browser : str
Any legal browser name, e.g. ``'firefox'``, ``'chrome'``,
``'safari'`` (for mac, you may need to use ``'open -a
"/Applications/Google Chrome.app" {}'`` for Chrome). If
``'default'``, will use the system default browser.
jskwargs : dict
Passed to the `astropy.table.JSViewer` init. Defaults to
``{'use_local_files': True}`` which means that the JavaScript
libraries will be served from local copies.
tableid : str or `None`
An html ID tag for the table. Default is ``table{id}``, where id
is the unique integer id of the table object, id(self).
table_class : str or `None`
A string with a list of HTML classes used to style the table.
Default is "display compact", and other possible values can be
found in https://www.datatables.net/manual/styling/classes
css : string
A valid CSS string declaring the formatting for the table. Defaults
to ``astropy.table.jsviewer.DEFAULT_CSS``.
show_row_index : str or False
If this does not evaluate to False, a column with the given name
will be added to the version of the table that gets displayed.
This new column shows the index of the row in the table itself,
even when the displayed table is re-sorted by another column. Note
that if a column with this name already exists, this option will be
ignored. Defaults to "idx".
"""
import os
import webbrowser
import tempfile
from .jsviewer import DEFAULT_CSS
from urllib.parse import urljoin
from urllib.request import pathname2url
if css is None:
css = DEFAULT_CSS
# We can't use NamedTemporaryFile here because it gets deleted as
# soon as it gets garbage collected.
tmpdir = tempfile.mkdtemp()
path = os.path.join(tmpdir, 'table.html')
with open(path, 'w') as tmp:
if jsviewer:
if show_row_index:
display_table = self._make_index_row_display_table(show_row_index)
else:
display_table = self
display_table.write(tmp, format='jsviewer', css=css,
max_lines=max_lines, jskwargs=jskwargs,
table_id=tableid, table_class=table_class)
else:
self.write(tmp, format='html')
try:
br = webbrowser.get(None if browser == 'default' else browser)
except webbrowser.Error:
log.error(f"Browser '{browser}' not found.")
else:
br.open(urljoin('file:', pathname2url(path)))
@format_doc(_pformat_docs, id="{id}")
def pformat(self, max_lines=None, max_width=None, show_name=True,
show_unit=None, show_dtype=False, html=False, tableid=None,
align=None, tableclass=None):
"""Return a list of lines for the formatted string representation of
the table.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default is taken from the
configuration item ``astropy.conf.max_lines``. If a negative
value of ``max_lines`` is supplied then there is no line limit
applied.
The same applies for ``max_width`` except the configuration item is
``astropy.conf.max_width``.
"""
lines, outs = self.formatter._pformat_table(
self, max_lines, max_width, show_name=show_name,
show_unit=show_unit, show_dtype=show_dtype, html=html,
tableid=tableid, tableclass=tableclass, align=align)
if outs['show_length']:
lines.append('Length = {} rows'.format(len(self)))
return lines
@format_doc(_pformat_docs, id="{id}")
def pformat_all(self, max_lines=-1, max_width=-1, show_name=True,
show_unit=None, show_dtype=False, html=False, tableid=None,
align=None, tableclass=None):
"""Return a list of lines for the formatted string representation of
the entire table.
If no value of ``max_lines`` is supplied then the height of the
screen terminal is used to set ``max_lines``. If the terminal
height cannot be determined then the default is taken from the
configuration item ``astropy.conf.max_lines``. If a negative
value of ``max_lines`` is supplied then there is no line limit
applied.
The same applies for ``max_width`` except the configuration item is
``astropy.conf.max_width``.
"""
return self.pformat(max_lines, max_width, show_name,
show_unit, show_dtype, html, tableid,
align, tableclass)
def more(self, max_lines=None, max_width=None, show_name=True,
show_unit=None, show_dtype=False):
"""Interactively browse table with a paging interface.
Supported keys::
f, <space> : forward one page
b : back one page
r : refresh same page
n : next row
p : previous row
< : go to beginning
> : go to end
q : quit browsing
h : print this help
Parameters
----------
max_lines : int
Maximum number of lines in table output
max_width : int or `None`
Maximum character width of output
show_name : bool
Include a header row for column names. Default is True.
show_unit : bool
Include a header row for unit. Default is to show a row
for units only if one or more columns has a defined value
for the unit.
show_dtype : bool
Include a header row for column dtypes. Default is True.
"""
self.formatter._more_tabcol(self, max_lines, max_width, show_name=show_name,
show_unit=show_unit, show_dtype=show_dtype)
def __getitem__(self, item):
if isinstance(item, str):
return self.columns[item]
elif isinstance(item, (int, np.integer)):
return self.Row(self, item)
elif (isinstance(item, np.ndarray) and item.shape == () and item.dtype.kind == 'i'):
return self.Row(self, item.item())
elif self._is_list_or_tuple_of_str(item):
out = self.__class__([self[x] for x in item],
copy_indices=self._copy_indices)
out._groups = groups.TableGroups(out, indices=self.groups._indices,
keys=self.groups._keys)
out.meta = self.meta.copy() # Shallow copy for meta
return out
elif ((isinstance(item, np.ndarray) and item.size == 0) or
(isinstance(item, (tuple, list)) and not item)):
# If item is an empty array/list/tuple then return the table with no rows
return self._new_from_slice([])
elif (isinstance(item, slice) or
isinstance(item, np.ndarray) or
isinstance(item, list) or
isinstance(item, tuple) and all(isinstance(x, np.ndarray)
for x in item)):
# here for the many ways to give a slice; a tuple of ndarray
# is produced by np.where, as in t[np.where(t['a'] > 2)]
# For all, a new table is constructed with slice of all columns
return self._new_from_slice(item)
else:
raise ValueError('Illegal type {} for table item access'
.format(type(item)))
def __setitem__(self, item, value):
# If the item is a string then it must be the name of a column.
# If that column doesn't already exist then create it now.
if isinstance(item, str) and item not in self.colnames:
self.add_column(value, name=item, copy=True)
else:
n_cols = len(self.columns)
if isinstance(item, str):
# Set an existing column by first trying to replace, and if
# this fails do an in-place update. See definition of mask
# property for discussion of the _setitem_inplace attribute.
if (not getattr(self, '_setitem_inplace', False)
and not conf.replace_inplace):
try:
self._replace_column_warnings(item, value)
return
except Exception:
pass
self.columns[item][:] = value
elif isinstance(item, (int, np.integer)):
self._set_row(idx=item, colnames=self.colnames, vals=value)
elif (isinstance(item, slice) or
isinstance(item, np.ndarray) or
isinstance(item, list) or
(isinstance(item, tuple) and # output from np.where
all(isinstance(x, np.ndarray) for x in item))):
if isinstance(value, Table):
vals = (col for col in value.columns.values())
elif isinstance(value, np.ndarray) and value.dtype.names:
vals = (value[name] for name in value.dtype.names)
elif np.isscalar(value):
import itertools
vals = itertools.repeat(value, n_cols)
else: # Assume this is an iterable that will work
if len(value) != n_cols:
raise ValueError('Right side value needs {} elements (one for each column)'
.format(n_cols))
vals = value
for col, val in zip(self.columns.values(), vals):
col[item] = val
else:
raise ValueError('Illegal type {} for table item access'
.format(type(item)))
def __delitem__(self, item):
if isinstance(item, str):
self.remove_column(item)
elif isinstance(item, (int, np.integer)):
self.remove_row(item)
elif (isinstance(item, (list, tuple, np.ndarray)) and
all(isinstance(x, str) for x in item)):
self.remove_columns(item)
elif (isinstance(item, (list, np.ndarray)) and
np.asarray(item).dtype.kind == 'i'):
self.remove_rows(item)
elif isinstance(item, slice):
self.remove_rows(item)
else:
raise IndexError('illegal key or index value')
def _ipython_key_completions_(self):
return self.colnames
def field(self, item):
"""Return column[item] for recarray compatibility."""
return self.columns[item]
@property
def masked(self):
return self._masked
@masked.setter
def masked(self, masked):
raise Exception('Masked attribute is read-only (use t = Table(t, masked=True)'
' to convert to a masked table)')
def _set_masked(self, masked):
"""
Set the table masked property.
Parameters
----------
masked : bool
State of table masking (`True` or `False`)
"""
if masked in [True, False, None]:
self._masked = masked
else:
raise ValueError("masked should be one of True, False, None")
self._column_class = self.MaskedColumn if self._masked else self.Column
@property
def ColumnClass(self):
if self._column_class is None:
return self.Column
else:
return self._column_class
@property
def dtype(self):
return np.dtype([descr(col) for col in self.columns.values()])
@property
def colnames(self):
return list(self.columns.keys())
@staticmethod
def _is_list_or_tuple_of_str(names):
"""Check that ``names`` is a tuple or list of strings"""
return (isinstance(names, (tuple, list)) and names and
all(isinstance(x, str) for x in names))
def keys(self):
return list(self.columns.keys())
def __len__(self):
# For performance reasons (esp. in Row) cache the first column name
# and use that subsequently for the table length. If might not be
# available yet or the column might be gone now, in which case
# try again in the except block.
try:
return len(OrderedDict.__getitem__(self.columns, self._first_colname))
except (AttributeError, KeyError):
if len(self.columns) == 0:
return 0
# Get the first column name
self._first_colname = next(iter(self.columns))
return len(self.columns[self._first_colname])
def index_column(self, name):
"""
Return the positional index of column ``name``.
Parameters
----------
name : str
column name
Returns
-------
index : int
Positional index of column ``name``.
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Get index of column 'b' of the table::
>>> t.index_column('b')
1
"""
try:
return self.colnames.index(name)
except ValueError:
raise ValueError(f"Column {name} does not exist")
def add_column(self, col, index=None, name=None, rename_duplicate=False, copy=True,
default_name=None):
"""
Add a new column to the table using ``col`` as input. If ``index``
is supplied then insert column before ``index`` position
in the list of columns, otherwise append column to the end
of the list.
The ``col`` input can be any data object which is acceptable as a
`~astropy.table.Table` column object or can be converted. This includes
mixin columns and scalar or length=1 objects which get broadcast to match
the table length.
To add several columns at once use ``add_columns()`` or simply call
``add_column()`` for each one. There is very little performance difference
in the two approaches.
Parameters
----------
col : object
Data object for the new column
index : int or `None`
Insert column before this position or at end (default).
name : str
Column name
rename_duplicate : bool
Uniquify column name if it already exist. Default is False.
copy : bool
Make a copy of the new column. Default is True.
default_name : str or `None`
Name to use if both ``name`` and ``col.info.name`` are not available.
Defaults to ``col{number_of_columns}``.
Examples
--------
Create a table with two columns 'a' and 'b', then create a third column 'c'
and append it to the end of the table::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> col_c = Column(name='c', data=['x', 'y'])
>>> t.add_column(col_c)
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
Add column 'd' at position 1. Note that the column is inserted
before the given index::
>>> t.add_column(['a', 'b'], name='d', index=1)
>>> print(t)
a d b c
--- --- --- ---
1 a 0.1 x
2 b 0.2 y
Add second column named 'b' with rename_duplicate::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> t.add_column(1.1, name='b', rename_duplicate=True)
>>> print(t)
a b b_1
--- --- ---
1 0.1 1.1
2 0.2 1.1
Add an unnamed column or mixin object in the table using a default name
or by specifying an explicit name with ``name``. Name can also be overridden::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> t.add_column(['a', 'b'])
>>> t.add_column(col_c, name='d')
>>> print(t)
a b col2 d
--- --- ---- ---
1 0.1 a x
2 0.2 b y
"""
if default_name is None:
default_name = 'col{}'.format(len(self.columns))
# Convert col data to acceptable object for insertion into self.columns.
# Note that along with the lines above and below, this allows broadcasting
# of scalars to the correct shape for adding to table.
col = self._convert_data_to_col(col, name=name, copy=copy,
default_name=default_name)
# Make col data shape correct for scalars. The second test is to allow
# broadcasting an N-d element to a column, e.g. t['new'] = [[1, 2]].
if (col.shape == () or col.shape[0] == 1) and len(self) > 0:
new_shape = (len(self),) + getattr(col, 'shape', ())[1:]
if isinstance(col, np.ndarray):
col = np.broadcast_to(col, shape=new_shape,
subok=True)
elif isinstance(col, ShapedLikeNDArray):
col = col._apply(np.broadcast_to, shape=new_shape,
subok=True)
# broadcast_to() results in a read-only array. Apparently it only changes
# the view to look like the broadcasted array. So copy.
col = col_copy(col)
name = col.info.name
# Ensure that new column is the right length
if len(self.columns) > 0 and len(col) != len(self):
raise ValueError('Inconsistent data column lengths')
if rename_duplicate:
orig_name = name
i = 1
while name in self.columns:
# Iterate until a unique name is found
name = orig_name + '_' + str(i)
i += 1
col.info.name = name
# Set col parent_table weakref and ensure col has mask attribute if table.masked
self._set_col_parent_table_and_mask(col)
# Add new column as last column
self.columns[name] = col
if index is not None:
# Move the other cols to the right of the new one
move_names = self.colnames[index:-1]
for move_name in move_names:
self.columns.move_to_end(move_name, last=True)
def add_columns(self, cols, indexes=None, names=None, copy=True, rename_duplicate=False):
"""
Add a list of new columns the table using ``cols`` data objects. If a
corresponding list of ``indexes`` is supplied then insert column
before each ``index`` position in the *original* list of columns,
otherwise append columns to the end of the list.
The ``cols`` input can include any data objects which are acceptable as
`~astropy.table.Table` column objects or can be converted. This includes
mixin columns and scalar or length=1 objects which get broadcast to match
the table length.
From a performance perspective there is little difference between calling
this method once or looping over the new columns and calling ``add_column()``
for each column.
Parameters
----------
cols : list of objects
List of data objects for the new columns
indexes : list of ints or `None`
Insert column before this position or at end (default).
names : list of str
Column names
copy : bool
Make a copy of the new columns. Default is True.
rename_duplicate : bool
Uniquify new column names if they duplicate the existing ones.
Default is False.
Examples
--------
Create a table with two columns 'a' and 'b', then create columns 'c' and 'd'
and append them to the end of the table::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> col_c = Column(name='c', data=['x', 'y'])
>>> col_d = Column(name='d', data=['u', 'v'])
>>> t.add_columns([col_c, col_d])
>>> print(t)
a b c d
--- --- --- ---
1 0.1 x u
2 0.2 y v
Add column 'c' at position 0 and column 'd' at position 1. Note that
the columns are inserted before the given position::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> t.add_columns([['x', 'y'], ['u', 'v']], names=['c', 'd'],
... indexes=[0, 1])
>>> print(t)
c a d b
--- --- --- ---
x 1 u 0.1
y 2 v 0.2
Add second column 'b' and column 'c' with ``rename_duplicate``::
>>> t = Table([[1, 2], [0.1, 0.2]], names=('a', 'b'))
>>> t.add_columns([[1.1, 1.2], ['x', 'y']], names=('b', 'c'),
... rename_duplicate=True)
>>> print(t)
a b b_1 c
--- --- --- ---
1 0.1 1.1 x
2 0.2 1.2 y
Add unnamed columns or mixin objects in the table using default names
or by specifying explicit names with ``names``. Names can also be overridden::
>>> t = Table()
>>> col_b = Column(name='b', data=['u', 'v'])
>>> t.add_columns([[1, 2], col_b])
>>> t.add_columns([[3, 4], col_b], names=['c', 'd'])
>>> print(t)
col0 b c d
---- --- --- ---
1 u 3 u
2 v 4 v
"""
if indexes is None:
indexes = [len(self.columns)] * len(cols)
elif len(indexes) != len(cols):
raise ValueError('Number of indexes must match number of cols')
if names is None:
names = (None,) * len(cols)
elif len(names) != len(cols):
raise ValueError('Number of names must match number of cols')
default_names = ['col{}'.format(ii + len(self.columns))
for ii in range(len(cols))]
for ii in reversed(np.argsort(indexes)):
self.add_column(cols[ii], index=indexes[ii], name=names[ii],
default_name=default_names[ii],
rename_duplicate=rename_duplicate, copy=copy)
def _replace_column_warnings(self, name, col):
"""
Same as replace_column but issues warnings under various circumstances.
"""
warns = conf.replace_warnings
if 'refcount' in warns and name in self.colnames:
refcount = sys.getrefcount(self[name])
if name in self.colnames:
old_col = self[name]
# This may raise an exception (e.g. t['a'] = 1) in which case none of
# the downstream code runs.
self.replace_column(name, col)
if 'always' in warns:
warnings.warn(f"replaced column '{name}'",
TableReplaceWarning, stacklevel=3)
if 'slice' in warns:
try:
# Check for ndarray-subclass slice. An unsliced instance
# has an ndarray for the base while sliced has the same class
# as parent.
if isinstance(old_col.base, old_col.__class__):
msg = ("replaced column '{}' which looks like an array slice. "
"The new column no longer shares memory with the "
"original array.".format(name))
warnings.warn(msg, TableReplaceWarning, stacklevel=3)
except AttributeError:
pass
if 'refcount' in warns:
# Did reference count change?
new_refcount = sys.getrefcount(self[name])
if refcount != new_refcount:
msg = ("replaced column '{}' and the number of references "
"to the column changed.".format(name))
warnings.warn(msg, TableReplaceWarning, stacklevel=3)
if 'attributes' in warns:
# Any of the standard column attributes changed?
changed_attrs = []
new_col = self[name]
# Check base DataInfo attributes that any column will have
for attr in DataInfo.attr_names:
if getattr(old_col.info, attr) != getattr(new_col.info, attr):
changed_attrs.append(attr)
if changed_attrs:
msg = ("replaced column '{}' and column attributes {} changed."
.format(name, changed_attrs))
warnings.warn(msg, TableReplaceWarning, stacklevel=3)
def replace_column(self, name, col, copy=True):
"""
Replace column ``name`` with the new ``col`` object.
The behavior of ``copy`` for Column objects is:
- copy=True: new class instance with a copy of data and deep copy of meta
- copy=False: new class instance with same data and a key-only copy of meta
For mixin columns:
- copy=True: new class instance with copy of data and deep copy of meta
- copy=False: original instance (no copy at all)
Parameters
----------
name : str
Name of column to replace
col : column object (list, ndarray, Column, etc)
New column object to replace the existing column
copy : bool
Make copy of the input ``col``, default=True
Examples
--------
Replace column 'a' with a float version of itself::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3]], names=('a', 'b'))
>>> float_a = t['a'].astype(float)
>>> t.replace_column('a', float_a)
"""
if name not in self.colnames:
raise ValueError(f'column name {name} is not in the table')
if self[name].info.indices:
raise ValueError('cannot replace a table index column')
col = self._convert_data_to_col(col, name=name, copy=copy)
self._set_col_parent_table_and_mask(col)
# Ensure that new column is the right length, unless it is the only column
# in which case re-sizing is allowed.
if len(self.columns) > 1 and len(col) != len(self[name]):
raise ValueError('length of new column must match table length')
self.columns.__setitem__(name, col, validated=True)
def remove_row(self, index):
"""
Remove a row from the table.
Parameters
----------
index : int
Index of row to remove
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Remove row 1 from the table::
>>> t.remove_row(1)
>>> print(t)
a b c
--- --- ---
1 0.1 x
3 0.3 z
To remove several rows at the same time use remove_rows.
"""
# check the index against the types that work with np.delete
if not isinstance(index, (int, np.integer)):
raise TypeError("Row index must be an integer")
self.remove_rows(index)
def remove_rows(self, row_specifier):
"""
Remove rows from the table.
Parameters
----------
row_specifier : slice, int, or array of ints
Specification for rows to remove
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Remove rows 0 and 2 from the table::
>>> t.remove_rows([0, 2])
>>> print(t)
a b c
--- --- ---
2 0.2 y
Note that there are no warnings if the slice operator extends
outside the data::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> t.remove_rows(slice(10, 20, 1))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
"""
# Update indices
for index in self.indices:
index.remove_rows(row_specifier)
keep_mask = np.ones(len(self), dtype=bool)
keep_mask[row_specifier] = False
columns = self.TableColumns()
for name, col in self.columns.items():
newcol = col[keep_mask]
newcol.info.parent_table = self
columns[name] = newcol
self._replace_cols(columns)
# Revert groups to default (ungrouped) state
if hasattr(self, '_groups'):
del self._groups
def remove_column(self, name):
"""
Remove a column from the table.
This can also be done with::
del table[name]
Parameters
----------
name : str
Name of column to remove
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Remove column 'b' from the table::
>>> t.remove_column('b')
>>> print(t)
a c
--- ---
1 x
2 y
3 z
To remove several columns at the same time use remove_columns.
"""
self.remove_columns([name])
def remove_columns(self, names):
'''
Remove several columns from the table.
Parameters
----------
names : list
A list containing the names of the columns to remove
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Remove columns 'b' and 'c' from the table::
>>> t.remove_columns(['b', 'c'])
>>> print(t)
a
---
1
2
3
Specifying only a single column also works. Remove column 'b' from the table::
>>> t = Table([[1, 2, 3], [0.1, 0.2, 0.3], ['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> t.remove_columns('b')
>>> print(t)
a c
--- ---
1 x
2 y
3 z
This gives the same as using remove_column.
'''
if isinstance(names, str):
names = [names]
for name in names:
if name not in self.columns:
raise KeyError(f"Column {name} does not exist")
for name in names:
self.columns.pop(name)
def _convert_string_dtype(self, in_kind, out_kind, encode_decode_func):
"""
Convert string-like columns to/from bytestring and unicode (internal only).
Parameters
----------
in_kind : str
Input dtype.kind
out_kind : str
Output dtype.kind
"""
for col in self.itercols():
if col.dtype.kind == in_kind:
try:
# This requires ASCII and is faster by a factor of up to ~8, so
# try that first.
newcol = col.__class__(col, dtype=out_kind)
except (UnicodeEncodeError, UnicodeDecodeError):
newcol = col.__class__(encode_decode_func(col, 'utf-8'))
# Quasi-manually copy info attributes. Unfortunately
# DataInfo.__set__ does not do the right thing in this case
# so newcol.info = col.info does not get the old info attributes.
for attr in col.info.attr_names - col.info._attrs_no_copy - set(['dtype']):
value = deepcopy(getattr(col.info, attr))
setattr(newcol.info, attr, value)
self[col.name] = newcol
def convert_bytestring_to_unicode(self):
"""
Convert bytestring columns (dtype.kind='S') to unicode (dtype.kind='U')
using UTF-8 encoding.
Internally this changes string columns to represent each character
in the string with a 4-byte UCS-4 equivalent, so it is inefficient
for memory but allows scripts to manipulate string arrays with
natural syntax.
"""
self._convert_string_dtype('S', 'U', np.char.decode)
def convert_unicode_to_bytestring(self):
"""
Convert unicode columns (dtype.kind='U') to bytestring (dtype.kind='S')
using UTF-8 encoding.
When exporting a unicode string array to a file, it may be desirable
to encode unicode columns as bytestrings.
"""
self._convert_string_dtype('U', 'S', np.char.encode)
def keep_columns(self, names):
'''
Keep only the columns specified (remove the others).
Parameters
----------
names : list
A list containing the names of the columns to keep. All other
columns will be removed.
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1, 2, 3],[0.1, 0.2, 0.3],['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> print(t)
a b c
--- --- ---
1 0.1 x
2 0.2 y
3 0.3 z
Specifying only a single column name keeps only this column.
Keep only column 'a' of the table::
>>> t.keep_columns('a')
>>> print(t)
a
---
1
2
3
Specifying a list of column names is keeps is also possible.
Keep columns 'a' and 'c' of the table::
>>> t = Table([[1, 2, 3],[0.1, 0.2, 0.3],['x', 'y', 'z']],
... names=('a', 'b', 'c'))
>>> t.keep_columns(['a', 'c'])
>>> print(t)
a c
--- ---
1 x
2 y
3 z
'''
if isinstance(names, str):
names = [names]
for name in names:
if name not in self.columns:
raise KeyError(f"Column {name} does not exist")
remove = list(set(self.keys()) - set(names))
self.remove_columns(remove)
def rename_column(self, name, new_name):
'''
Rename a column.
This can also be done directly with by setting the ``name`` attribute
for a column::
table[name].name = new_name
TODO: this won't work for mixins
Parameters
----------
name : str
The current name of the column.
new_name : str
The new name for the column
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1,2],[3,4],[5,6]], names=('a','b','c'))
>>> print(t)
a b c
--- --- ---
1 3 5
2 4 6
Renaming column 'a' to 'aa'::
>>> t.rename_column('a' , 'aa')
>>> print(t)
aa b c
--- --- ---
1 3 5
2 4 6
'''
if name not in self.keys():
raise KeyError(f"Column {name} does not exist")
self.columns[name].info.name = new_name
def rename_columns(self, names, new_names):
'''
Rename multiple columns.
Parameters
----------
names : list, tuple
A list or tuple of existing column names.
new_names : list, tuple
A list or tuple of new column names.
Examples
--------
Create a table with three columns 'a', 'b', 'c'::
>>> t = Table([[1,2],[3,4],[5,6]], names=('a','b','c'))
>>> print(t)
a b c
--- --- ---
1 3 5
2 4 6
Renaming columns 'a' to 'aa' and 'b' to 'bb'::
>>> names = ('a','b')
>>> new_names = ('aa','bb')
>>> t.rename_columns(names, new_names)
>>> print(t)
aa bb c
--- --- ---
1 3 5
2 4 6
'''
if not self._is_list_or_tuple_of_str(names):
raise TypeError("input 'names' must be a tuple or a list of column names")
if not self._is_list_or_tuple_of_str(new_names):
raise TypeError("input 'new_names' must be a tuple or a list of column names")
if len(names) != len(new_names):
raise ValueError("input 'names' and 'new_names' list arguments must be the same length")
for name, new_name in zip(names, new_names):
self.rename_column(name, new_name)
def _set_row(self, idx, colnames, vals):
try:
assert len(vals) == len(colnames)
except Exception:
raise ValueError('right hand side must be a sequence of values with '
'the same length as the number of selected columns')
# Keep track of original values before setting each column so that
# setting row can be transactional.
orig_vals = []
cols = self.columns
try:
for name, val in zip(colnames, vals):
orig_vals.append(cols[name][idx])
cols[name][idx] = val
except Exception:
# If anything went wrong first revert the row update then raise
for name, val in zip(colnames, orig_vals[:-1]):
cols[name][idx] = val
raise
def add_row(self, vals=None, mask=None):
"""Add a new row to the end of the table.
The ``vals`` argument can be:
sequence (e.g. tuple or list)
Column values in the same order as table columns.
mapping (e.g. dict)
Keys corresponding to column names. Missing values will be
filled with np.zeros for the column dtype.
`None`
All values filled with np.zeros for the column dtype.
This method requires that the Table object "owns" the underlying array
data. In particular one cannot add a row to a Table that was
initialized with copy=False from an existing array.
The ``mask`` attribute should give (if desired) the mask for the
values. The type of the mask should match that of the values, i.e. if
``vals`` is an iterable, then ``mask`` should also be an iterable
with the same length, and if ``vals`` is a mapping, then ``mask``
should be a dictionary.
Parameters
----------
vals : tuple, list, dict or `None`
Use the specified values in the new row
mask : tuple, list, dict or `None`
Use the specified mask values in the new row
Examples
--------
Create a table with three columns 'a', 'b' and 'c'::
>>> t = Table([[1,2],[4,5],[7,8]], names=('a','b','c'))
>>> print(t)
a b c
--- --- ---
1 4 7
2 5 8
Adding a new row with entries '3' in 'a', '6' in 'b' and '9' in 'c'::
>>> t.add_row([3,6,9])
>>> print(t)
a b c
--- --- ---
1 4 7
2 5 8
3 6 9
"""
self.insert_row(len(self), vals, mask)
def insert_row(self, index, vals=None, mask=None):
"""Add a new row before the given ``index`` position in the table.
The ``vals`` argument can be:
sequence (e.g. tuple or list)
Column values in the same order as table columns.
mapping (e.g. dict)
Keys corresponding to column names. Missing values will be
filled with np.zeros for the column dtype.
`None`
All values filled with np.zeros for the column dtype.
The ``mask`` attribute should give (if desired) the mask for the
values. The type of the mask should match that of the values, i.e. if
``vals`` is an iterable, then ``mask`` should also be an iterable
with the same length, and if ``vals`` is a mapping, then ``mask``
should be a dictionary.
Parameters
----------
vals : tuple, list, dict or `None`
Use the specified values in the new row
mask : tuple, list, dict or `None`
Use the specified mask values in the new row
"""
colnames = self.colnames
N = len(self)
if index < -N or index > N:
raise IndexError("Index {} is out of bounds for table with length {}"
.format(index, N))
if index < 0:
index += N
def _is_mapping(obj):
"""Minimal checker for mapping (dict-like) interface for obj"""
attrs = ('__getitem__', '__len__', '__iter__', 'keys', 'values', 'items')
return all(hasattr(obj, attr) for attr in attrs)
if _is_mapping(vals) or vals is None:
# From the vals and/or mask mappings create the corresponding lists
# that have entries for each table column.
if mask is not None and not _is_mapping(mask):
raise TypeError("Mismatch between type of vals and mask")
# Now check that the mask is specified for the same keys as the
# values, otherwise things get really confusing.
if mask is not None and set(vals.keys()) != set(mask.keys()):
raise ValueError('keys in mask should match keys in vals')
if vals and any(name not in colnames for name in vals):
raise ValueError('Keys in vals must all be valid column names')
vals_list = []
mask_list = []
for name in colnames:
if vals and name in vals:
vals_list.append(vals[name])
mask_list.append(False if mask is None else mask[name])
else:
col = self[name]
if hasattr(col, 'dtype'):
# Make a placeholder zero element of the right type which is masked.
# This assumes the appropriate insert() method will broadcast a
# numpy scalar to the right shape.
vals_list.append(np.zeros(shape=(), dtype=col.dtype))
# For masked table any unsupplied values are masked by default.
mask_list.append(self.masked and vals is not None)
else:
raise ValueError(f"Value must be supplied for column '{name}'")
vals = vals_list
mask = mask_list
if isiterable(vals):
if mask is not None and (not isiterable(mask) or _is_mapping(mask)):
raise TypeError("Mismatch between type of vals and mask")
if len(self.columns) != len(vals):
raise ValueError('Mismatch between number of vals and columns')
if mask is not None:
if len(self.columns) != len(mask):
raise ValueError('Mismatch between number of masks and columns')
else:
mask = [False] * len(self.columns)
else:
raise TypeError('Vals must be an iterable or mapping or None')
columns = self.TableColumns()
try:
# Insert val at index for each column
for name, col, val, mask_ in zip(colnames, self.columns.values(), vals, mask):
# If new val is masked and the existing column does not support masking
# then upgrade the column to a mask-enabled type: either the table-level
# default ColumnClass or else MaskedColumn.
if mask_ and isinstance(col, Column) and not isinstance(col, MaskedColumn):
col_cls = (self.ColumnClass
if issubclass(self.ColumnClass, self.MaskedColumn)
else self.MaskedColumn)
col = col_cls(col, copy=False)
newcol = col.insert(index, val, axis=0)
if len(newcol) != N + 1:
raise ValueError('Incorrect length for column {} after inserting {}'
' (expected {}, got {})'
.format(name, val, len(newcol), N + 1))
newcol.info.parent_table = self
# Set mask if needed and possible
if mask_:
if hasattr(newcol, 'mask'):
newcol[index] = np.ma.masked
else:
raise TypeError("mask was supplied for column '{}' but it does not "
"support masked values".format(col.info.name))
columns[name] = newcol
# insert row in indices
for table_index in self.indices:
table_index.insert_row(index, vals, self.columns.values())
except Exception as err:
raise ValueError("Unable to insert row because of exception in column '{}':\n{}"
.format(name, err))
else:
self._replace_cols(columns)
# Revert groups to default (ungrouped) state
if hasattr(self, '_groups'):
del self._groups
def _replace_cols(self, columns):
for col, new_col in zip(self.columns.values(), columns.values()):
new_col.info.indices = []
for index in col.info.indices:
index.columns[index.col_position(col.info.name)] = new_col
new_col.info.indices.append(index)
self.columns = columns
def argsort(self, keys=None, kind=None, reverse=False):
"""
Return the indices which would sort the table according to one or
more key columns. This simply calls the `numpy.argsort` function on
the table with the ``order`` parameter set to ``keys``.
Parameters
----------
keys : str or list of str
The column name(s) to order the table by
kind : {'quicksort', 'mergesort', 'heapsort'}, optional
Sorting algorithm.
reverse : bool
Sort in reverse order (default=False)
Returns
-------
index_array : ndarray, int
Array of indices that sorts the table by the specified key
column(s).
"""
if isinstance(keys, str):
keys = [keys]
# use index sorted order if possible
if keys is not None:
index = get_index(self, names=keys)
if index is not None:
return index.sorted_data()
kwargs = {}
if keys:
kwargs['order'] = keys
if kind:
kwargs['kind'] = kind
if keys:
data = self.as_array(names=keys)
else:
data = self.as_array()
idx = data.argsort(**kwargs)
if reverse:
return idx[::-1]
return idx
def sort(self, keys=None, reverse=False):
'''
Sort the table according to one or more keys. This operates
on the existing table and does not return a new table.
Parameters
----------
keys : str or list of str
The key(s) to order the table by. If None, use the
primary index of the Table.
reverse : bool
Sort in reverse order (default=False)
Examples
--------
Create a table with 3 columns::
>>> t = Table([['Max', 'Jo', 'John'], ['Miller', 'Miller', 'Jackson'],
... [12, 15, 18]], names=('firstname', 'name', 'tel'))
>>> print(t)
firstname name tel
--------- ------- ---
Max Miller 12
Jo Miller 15
John Jackson 18
Sorting according to standard sorting rules, first 'name' then 'firstname'::
>>> t.sort(['name', 'firstname'])
>>> print(t)
firstname name tel
--------- ------- ---
John Jackson 18
Jo Miller 15
Max Miller 12
Sorting according to standard sorting rules, first 'firstname' then 'tel',
in reverse order::
>>> t.sort(['firstname', 'tel'], reverse=True)
>>> print(t)
firstname name tel
--------- ------- ---
Max Miller 12
John Jackson 18
Jo Miller 15
'''
if keys is None:
if not self.indices:
raise ValueError("Table sort requires input keys or a table index")
keys = [x.info.name for x in self.indices[0].columns]
if isinstance(keys, str):
keys = [keys]
indexes = self.argsort(keys)
if reverse:
indexes = indexes[::-1]
sort_index = get_index(self, names=keys)
if sort_index is not None:
# avoid inefficient relabelling of sorted index
prev_frozen = sort_index._frozen
sort_index._frozen = True
for col in self.columns.values():
col[:] = col.take(indexes, axis=0)
if sort_index is not None:
# undo index freeze
sort_index._frozen = prev_frozen
# now relabel the sort index appropriately
sort_index.sort()
def reverse(self):
'''
Reverse the row order of table rows. The table is reversed
in place and there are no function arguments.
Examples
--------
Create a table with three columns::
>>> t = Table([['Max', 'Jo', 'John'], ['Miller','Miller','Jackson'],
... [12,15,18]], names=('firstname','name','tel'))
>>> print(t)
firstname name tel
--------- ------- ---
Max Miller 12
Jo Miller 15
John Jackson 18
Reversing order::
>>> t.reverse()
>>> print(t)
firstname name tel
--------- ------- ---
John Jackson 18
Jo Miller 15
Max Miller 12
'''
for col in self.columns.values():
col[:] = col[::-1]
for index in self.indices:
index.reverse()
def copy(self, copy_data=True):
'''
Return a copy of the table.
Parameters
----------
copy_data : bool
If `True` (the default), copy the underlying data array.
Otherwise, use the same data array. The ``meta`` is always
deepcopied regardless of the value for ``copy_data``.
'''
out = self.__class__(self, copy=copy_data)
# If the current table is grouped then do the same in the copy
if hasattr(self, '_groups'):
out._groups = groups.TableGroups(out, indices=self._groups._indices,
keys=self._groups._keys)
return out
def __deepcopy__(self, memo=None):
return self.copy(True)
def __copy__(self):
return self.copy(False)
def __lt__(self, other):
return super().__lt__(other)
def __gt__(self, other):
return super().__gt__(other)
def __le__(self, other):
return super().__le__(other)
def __ge__(self, other):
return super().__ge__(other)
def __eq__(self, other):
if isinstance(other, Table):
other = other.as_array()
if self.has_masked_columns:
if isinstance(other, np.ma.MaskedArray):
result = self.as_array() == other
else:
# If mask is True, then by definition the row doesn't match
# because the other array is not masked.
false_mask = np.zeros(1, dtype=[(n, bool) for n in self.dtype.names])
result = (self.as_array().data == other) & (self.mask == false_mask)
else:
if isinstance(other, np.ma.MaskedArray):
# If mask is True, then by definition the row doesn't match
# because the other array is not masked.
false_mask = np.zeros(1, dtype=[(n, bool) for n in other.dtype.names])
result = (self.as_array() == other.data) & (other.mask == false_mask)
else:
result = self.as_array() == other
return result
def __ne__(self, other):
return ~self.__eq__(other)
@property
def groups(self):
if not hasattr(self, '_groups'):
self._groups = groups.TableGroups(self)
return self._groups
def group_by(self, keys):
"""
Group this table by the specified ``keys``
This effectively splits the table into groups which correspond to unique
values of the ``keys`` grouping object. The output is a new
`~astropy.table.TableGroups` which contains a copy of this table but
sorted by row according to ``keys``.
The ``keys`` input to `group_by` can be specified in different ways:
- String or list of strings corresponding to table column name(s)
- Numpy array (homogeneous or structured) with same length as this table
- `~astropy.table.Table` with same length as this table
Parameters
----------
keys : str, list of str, numpy array, or `~astropy.table.Table`
Key grouping object
Returns
-------
out : `~astropy.table.Table`
New table with groups set
"""
return groups.table_group_by(self, keys)
def to_pandas(self, index=None):
"""
Return a :class:`pandas.DataFrame` instance
The index of the created DataFrame is controlled by the ``index``
argument. For ``index=True`` or the default ``None``, an index will be
specified for the DataFrame if there is a primary key index on the
Table *and* if it corresponds to a single column. If ``index=False``
then no DataFrame index will be specified. If ``index`` is the name of
a column in the table then that will be the DataFrame index.
In additional to vanilla columns or masked columns, this supports Table
mixin columns like Quantity, Time, or SkyCoord. In many cases these
objects have no analog in pandas and will be converted to a "encoded"
representation using only Column or MaskedColumn. The exception is
Time or TimeDelta columns, which will be converted to the corresponding
representation in pandas using ``np.datetime64`` or ``np.timedelta64``.
See the example below.
Returns
-------
dataframe : :class:`pandas.DataFrame`
A pandas :class:`pandas.DataFrame` instance
index : None, bool, str
Specify DataFrame index mode
Raises
------
ImportError
If pandas is not installed
ValueError
If the Table has multi-dimensional columns
Examples
--------
Here we convert a table with a few mixins to a
:class:`pandas.DataFrame` instance.
>>> import pandas as pd
>>> from astropy.table import QTable
>>> import astropy.units as u
>>> from astropy.time import Time, TimeDelta
>>> from astropy.coordinates import SkyCoord
>>> q = [1, 2] * u.m
>>> tm = Time([1998, 2002], format='jyear')
>>> sc = SkyCoord([5, 6], [7, 8], unit='deg')
>>> dt = TimeDelta([3, 200] * u.s)
>>> t = QTable([q, tm, sc, dt], names=['q', 'tm', 'sc', 'dt'])
>>> df = t.to_pandas(index='tm')
>>> with pd.option_context('display.max_columns', 20):
... print(df)
q sc.ra sc.dec dt
tm
1998-01-01 1.0 5.0 7.0 00:00:03
2002-01-01 2.0 6.0 8.0 00:03:20
"""
from pandas import DataFrame
if index is not False:
if index in (None, True):
# Default is to use the table primary key if available and a single column
if self.primary_key and len(self.primary_key) == 1:
index = self.primary_key[0]
else:
index = False
else:
if index not in self.colnames:
raise ValueError('index must be None, False, True or a table '
'column name')
def _encode_mixins(tbl):
"""Encode a Table ``tbl`` that may have mixin columns to a Table with only
astropy Columns + appropriate meta-data to allow subsequent decoding.
"""
from . import serialize
from astropy.utils.data_info import MixinInfo, serialize_context_as
from astropy.time import Time, TimeDelta
# Convert any Time or TimeDelta columns and pay attention to masking
time_cols = [col for col in tbl.itercols() if isinstance(col, Time)]
if time_cols:
# Make a light copy of table and clear any indices
new_cols = []
for col in tbl.itercols():
new_col = col_copy(col, copy_indices=False) if col.info.indices else col
new_cols.append(new_col)
tbl = tbl.__class__(new_cols, copy=False)
for col in time_cols:
if isinstance(col, TimeDelta):
# Convert to nanoseconds (matches astropy datetime64 support)
new_col = (col.sec * 1e9).astype('timedelta64[ns]')
nat = np.timedelta64('NaT')
else:
new_col = col.datetime64.copy()
nat = np.datetime64('NaT')
if col.masked:
new_col[col.mask] = nat
tbl[col.info.name] = new_col
# Convert the table to one with no mixins, only Column objects.
encode_tbl = serialize.represent_mixins_as_columns(tbl)
return encode_tbl
tbl = _encode_mixins(self)
badcols = [name for name, col in self.columns.items()
if (getattr(col, 'ndim', 1) > 1)]
if badcols:
raise ValueError(
"Cannot convert a table with multi-dimensional columns to a "
"pandas DataFrame. Offending columns are: {}".format(badcols))
out = OrderedDict()
for name, column in tbl.columns.items():
if isinstance(column, MaskedColumn) and np.any(column.mask):
if column.dtype.kind in ['i', 'u']:
out[name] = column.astype(float).filled(np.nan)
warnings.warn(
"converted column '{}' from integer to float".format(
name), TableReplaceWarning, stacklevel=3)
elif column.dtype.kind in ['f', 'c']:
out[name] = column.filled(np.nan)
else:
out[name] = column.astype(object).filled(np.nan)
else:
out[name] = column
if out[name].dtype.byteorder not in ('=', '|'):
out[name] = out[name].byteswap().newbyteorder()
kwargs = {'index': out.pop(index)} if index else {}
return DataFrame(out, **kwargs)
@classmethod
def from_pandas(cls, dataframe, index=False):
"""
Create a `~astropy.table.Table` from a :class:`pandas.DataFrame` instance
In addition to converting generic numeric or string columns, this supports
conversion of pandas Date and Time delta columns to `~astropy.time.Time`
and `~astropy.time.TimeDelta` columns, respectively.
Parameters
----------
dataframe : :class:`pandas.DataFrame`
A pandas :class:`pandas.DataFrame` instance
index : bool
Include the index column in the returned table (default=False)
Returns
-------
table : `~astropy.table.Table`
A `~astropy.table.Table` (or subclass) instance
Raises
------
ImportError
If pandas is not installed
Examples
--------
Here we convert a :class:`pandas.DataFrame` instance
to a `~astropy.table.QTable`.
>>> import numpy as np
>>> import pandas as pd
>>> from astropy.table import QTable
>>> time = pd.Series(['1998-01-01', '2002-01-01'], dtype='datetime64[ns]')
>>> dt = pd.Series(np.array([1, 300], dtype='timedelta64[s]'))
>>> df = pd.DataFrame({'time': time})
>>> df['dt'] = dt
>>> df['x'] = [3., 4.]
>>> with pd.option_context('display.max_columns', 20):
... print(df)
time dt x
0 1998-01-01 00:00:01 3.0
1 2002-01-01 00:05:00 4.0
>>> QTable.from_pandas(df)
<QTable length=2>
time dt x
object object float64
----------------------- ------ -------
1998-01-01T00:00:00.000 1.0 3.0
2002-01-01T00:00:00.000 300.0 4.0
"""
out = OrderedDict()
names = list(dataframe.columns)
columns = [dataframe[name] for name in names]
datas = [np.array(column) for column in columns]
masks = [np.array(column.isnull()) for column in columns]
if index:
index_name = dataframe.index.name or 'index'
while index_name in names:
index_name = '_' + index_name + '_'
names.insert(0, index_name)
columns.insert(0, dataframe.index)
datas.insert(0, np.array(dataframe.index))
masks.insert(0, np.zeros(len(dataframe), dtype=bool))
for name, column, data, mask in zip(names, columns, datas, masks):
if data.dtype.kind == 'O':
# If all elements of an object array are string-like or np.nan
# then coerce back to a native numpy str/unicode array.
string_types = (str, bytes)
nan = np.nan
if all(isinstance(x, string_types) or x is nan for x in data):
# Force any missing (null) values to b''. Numpy will
# upcast to str/unicode as needed.
data[mask] = b''
# When the numpy object array is represented as a list then
# numpy initializes to the correct string or unicode type.
data = np.array([x for x in data])
# Numpy datetime64
if data.dtype.kind == 'M':
from astropy.time import Time
out[name] = Time(data, format='datetime64')
if np.any(mask):
out[name][mask] = np.ma.masked
out[name].format = 'isot'
# Numpy timedelta64
elif data.dtype.kind == 'm':
from astropy.time import TimeDelta
data_sec = data.astype('timedelta64[ns]').astype(np.float64) / 1e9
out[name] = TimeDelta(data_sec, format='sec')
if np.any(mask):
out[name][mask] = np.ma.masked
else:
if np.any(mask):
out[name] = MaskedColumn(data=data, name=name, mask=mask)
else:
out[name] = Column(data=data, name=name)
return cls(out)
info = TableInfo()
class QTable(Table):
"""A class to represent tables of heterogeneous data.
`~astropy.table.QTable` provides a class for heterogeneous tabular data
which can be easily modified, for instance adding columns or new rows.
The `~astropy.table.QTable` class is identical to `~astropy.table.Table`
except that columns with an associated ``unit`` attribute are converted to
`~astropy.units.Quantity` objects.
See also:
- http://docs.astropy.org/en/stable/table/
- http://docs.astropy.org/en/stable/table/mixin_columns.html
Parameters
----------
data : numpy ndarray, dict, list, Table, or table-like object, optional
Data to initialize table.
masked : bool, optional
Specify whether the table is masked.
names : list, optional
Specify column names.
dtype : list, optional
Specify column data types.
meta : dict, optional
Metadata associated with the table.
copy : bool, optional
Copy the input data. Default is True.
rows : numpy ndarray, list of lists, optional
Row-oriented data for table instead of ``data`` argument.
copy_indices : bool, optional
Copy any indices in the input data. Default is True.
**kwargs : dict, optional
Additional keyword args when converting table-like object.
"""
def _is_mixin_for_table(self, col):
"""
Determine if ``col`` should be added to the table directly as
a mixin column.
"""
return has_info_class(col, MixinInfo)
def _convert_col_for_table(self, col):
if isinstance(col, Column) and getattr(col, 'unit', None) is not None:
# What to do with MaskedColumn with units: leave as MaskedColumn or
# turn into Quantity and drop mask? Assuming we have masking support
# in Quantity someday, let's drop the mask (consistent with legacy
# behavior) but issue a warning.
if isinstance(col, MaskedColumn) and np.any(col.mask):
warnings.warn("dropping mask in Quantity column '{}': "
"masked Quantity not supported".format(col.info.name))
# We need to turn the column into a quantity, or a subclass
# identified in the unit (such as u.mag()).
q_cls = getattr(col.unit, '_quantity_class', Quantity)
qcol = q_cls(col.data, col.unit, copy=False)
qcol.info = col.info
col = qcol
else:
col = super()._convert_col_for_table(col)
return col
class NdarrayMixin(np.ndarray):
"""
Mixin column class to allow storage of arbitrary numpy
ndarrays within a Table. This is a subclass of numpy.ndarray
and has the same initialization options as ndarray().
"""
info = ParentDtypeInfo()
def __new__(cls, obj, *args, **kwargs):
self = np.array(obj, *args, **kwargs).view(cls)
if 'info' in getattr(obj, '__dict__', ()):
self.info = obj.info
return self
def __array_finalize__(self, obj):
if obj is None:
return
if callable(super().__array_finalize__):
super().__array_finalize__(obj)
# Self was created from template (e.g. obj[slice] or (obj * 2))
# or viewcast e.g. obj.view(Column). In either case we want to
# init Column attributes for self from obj if possible.
if 'info' in getattr(obj, '__dict__', ()):
self.info = obj.info
def __reduce__(self):
# patch to pickle Quantity objects (ndarray subclasses), see
# http://www.mail-archive.com/[email protected]/msg02446.html
object_state = list(super().__reduce__())
object_state[2] = (object_state[2], self.__dict__)
return tuple(object_state)
def __setstate__(self, state):
# patch to unpickle NdarrayMixin objects (ndarray subclasses), see
# http://www.mail-archive.com/[email protected]/msg02446.html
nd_state, own_state = state
super().__setstate__(nd_state)
self.__dict__.update(own_state)
| bsipocz/astropy | astropy/table/table.py | Python | bsd-3-clause | 123,044 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 30 14:49:04 2017
@author: Sebastijan Mrak <[email protected]>
"""
import numpy as np
from mpl_toolkits.basemap import Basemap
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
def plotGpsPolarTrajectory(azimuth=[],elevation=[],labels=None,timelim=None):
"""
Draw a polar plot with trajectories of all given satellites. Azimuth and elevation
are obligatory inputs in a form of a list!
"""
fig = plt.figure(figsize=(8,8))
ax = fig.add_subplot(111, projection='polar')
for i in range(len(azimuth)):
if labels is not None:
ax.plot(np.radians(azimuth[i]), 90-elevation[i], label='sv'+str(labels[i]))
else:
ax.plot(np.radians(azimuth[i]), 90-elevation[i])
ax.set_theta_zero_location('N')
ax.set_theta_direction(-1)
ax.set_rmax(80)
ax.set_rticks([80, 60, 40, 20])
ax.set_yticklabels([20, 40, 60, 80])
plt.legend(bbox_to_anchor=(1.1, 1.1))
if timelim is not None:
plt.title('Boston: ' + timelim[0].strftime('%m/%d/%y-%H:%M :: ') + timelim[1].strftime('%m/%d/%y-%H:%M'))
plt.show()
def plotGpsMapTrajectory(lat=[], lon=[], rx=None, labels=None,
timelim=None, totalityc=[], totalityu=[], totalityd=[],
latlim=[41, 44], ms=10, color='k',
lonlim=[-74, -69], center=[42.36, -71.06],
parallels=[42,44], meridians = [-73, -70, -67],
ax=None, m=None):
"""
"""
if ax is None:
(fig,ax) = plt.subplots(1,1,facecolor='w')
m = Basemap(llcrnrlat=latlim[0],urcrnrlat=latlim[1],
llcrnrlon=lonlim[0],urcrnrlon=lonlim[1],
projection='merc', resolution='i', ax=ax)
m.drawparallels(parallels,labels=[False, True, True, True], linewidth=1)
m.drawmeridians(meridians,labels=[True,True,False,True], linewidth=1)
if len(totalityc) > 0:
x,y = m(totalityc[1], totalityc[0])
m.plot(x,y, lw=2, color='r')
if len(totalityu) > 0:
x,y = m(totalityu[1], totalityu[0])
m.plot(x,y, lw=2, color='b')
if len(totalityd) > 0:
x,y = m(totalityd[1], totalityd[0])
m.plot(x,y, lw=2, color='b')
if len(lat) > 0 and len(lon) > 0:
for i in range(len(lat)):
idx = np.where(np.isfinite(lon[i]))[0]
x,y = m(lon[i][idx], lat[i][idx])
if labels is not None:
m.plot(x,y, lw=2, label='sv'+str(labels[i]))
else:
m.plot(x,y, lw=2)
if (rx is not None) and isinstance(rx, np.ndarray):
if len(rx.shape) > 1:
for i in range(rx.shape[1]):
x,y = m(rx[1][i], rx[0][i])
m.scatter(x, y, marker='o', color=color, s=ms)
else:
x,y = m(rx[1], rx[0])
m.scatter(x, y, marker='o', color=color, s=ms)
m.drawcoastlines()
m.drawstates()
else:
if len(lat) > 0 and len(lon) > 0:
for i in range(len(lat)):
idx = np.where(np.isfinite(lon[i]))[0]
x,y = m(lon[i][idx], lat[i][idx])
if labels is not None:
m.plot(x,y, lw=2, label='sv'+str(labels[i]))
else:
m.plot(x,y, lw=2)
if (rx is not None) and isinstance(rx, np.ndarray):
if len(rx.shape) > 1:
for i in range(rx.shape[1]):
x,y = m(rx[1][i], rx[0][i])
m.scatter(x, y, marker='o', color=color, s=ms)
else:
x,y = m(rx[1], rx[0])
m.scatter(x, y, marker='o', color=color, s=ms)
return ax, m
def plotScatterTEC(lat=[], lon=[], z=[], latlim=[41, 44], ms=10, color='k',
lonlim=[-74, -69], center=[42.36, -71.06],
parallels=[42,44], meridians = [-73, -70, -67],
ax=None, m=None):
if ax is None:
(fig,ax) = plt.subplots(1,1,facecolor='w')
m = Basemap(llcrnrlat=latlim[0],urcrnrlat=latlim[1],
llcrnrlon=lonlim[0],urcrnrlon=lonlim[1],
projection='merc', resolution='i', ax=ax)
m.drawparallels(parallels,labels=[False, True, True, True], linewidth=1)
m.drawmeridians(meridians,labels=[True,True,False,True], linewidth=1)
if len(lat) > 0 and len(lon) > 0:
x,y = m(lon, lat)
m.scatter(x, y, marker='o', color=color, s=ms)
return ax, m
# plt.tight_layout()
# plt.legend(bbox_to_anchor=(1.1, 1.1))
#plotGpsMapTrajectory() | aldebaran1/gsit | gsit/pyGpsUtils.py | Python | mit | 4,890 |
__author__ = 'ntrepid8'
import agilebot.cmd_util
import logging
from logging import NullHandler
import json
import sys
import argparse
from agilebot import util
from functools import partial
import os
logger = logging.getLogger('agilebot.slack')
logger.addHandler(NullHandler())
def create_bot(args, conf):
# update trello arguments
conf = util.update_config_group('slack', args, conf)
# create the bot
return agilebot.cmd_util.create_bot(conf, logger)
def cmd_slack_post(args, conf):
bot = create_bot(args, conf)
try:
resp = bot.slack.post_msg(
text=args.text,
webhook_url=args.webhook_url,
channel=args.channel,
icon_emoji=args.icon_emoji,
username=args.username
)
except Exception as e:
logger.error('{}'.format(e))
sys.exit(1)
else:
print(json.dumps(resp))
def cmd_slack_help(parser, text=None):
t = text or 'slack'
logger.debug('show {} help'.format(t))
parser.print_help()
def sub_command(main_subparsers):
# slack sub-command
slack_parser = main_subparsers.add_parser('slack', help='slack interaction')
subparsers = slack_parser.add_subparsers(help='sub-commands', dest='subparser_1')
slack_parser.set_defaults(func_help=partial(cmd_slack_help, slack_parser, 'slack'))
# SUB-COMMAND: post (p)
p_desc = 'Post a message to a slack channel.'
p_parser = subparsers.add_parser(
'post',
aliases=['p'],
description=p_desc,
formatter_class=argparse.MetavarTypeHelpFormatter,
help=p_desc)
# p required arguments
p_req_group = p_parser.add_argument_group(
'required arguments',
)
p_req_group.add_argument('--text', '-t', required=True, type=str, help='text content of the message')
# p additional required arguments
p_add_group = p_parser.add_argument_group(
'additional required arguments',
'Required and may be specified here or in the configuration file.'
)
p_add_group.add_argument('--channel', type=str, help='Slack channel name')
p_add_group.add_argument('--username', type=str, help='username of the bot')
p_add_group.add_argument('--webhook-url', type=str, help='Slack url to POST the message to')
p_add_group.set_defaults(
channel=os.environ.get('SLACK_CHANNEL'),
username=os.environ.get('SLACK_USERNAME'),
webhook_url=os.environ.get('SLACK_WEBHOOK_URL'),
)
# p optional arguments
p_opt_group = p_parser.add_argument_group(
'additional optional arguments',
'Optional and may be specified here or in the configuration file.'
)
p_opt_group.add_argument('--icon-emoji', default=':ghost:', type=str, help='emoji to use for the bot icon')
p_opt_group.set_defaults(
icon_emoji=os.environ.get('SLACK_ICON_EMOJI'),
)
# p defaults
p_parser.set_defaults(func=cmd_slack_post)
return slack_parser
| ntrepid8/agilebot | agilebot/cmd_slack.py | Python | mit | 2,977 |
import hashlib
from django.core.cache import cache
from django.utils import six
from django.template.defaultfilters import slugify
try:
from django.utils.encoding import force_bytes
except ImportError:
force_bytes = str
try:
from django.contrib.auth import get_user_model
except ImportError:
from django.contrib.auth.models import User
def get_user_model():
return User
custom_user_model = False
else:
custom_user_model = True
from avatar.conf import settings
cached_funcs = set()
def get_username(user):
""" Return username of a User instance """
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
def get_user(username):
""" Return user from a username/ish identifier """
if custom_user_model:
return get_user_model().objects.get_by_natural_key(username)
else:
return get_user_model().objects.get(username=username)
def get_cache_key(user_or_username, size, prefix):
"""
Returns a cache key consisten of a username and image size.
"""
if isinstance(user_or_username, get_user_model()):
user_or_username = get_username(user_or_username)
key = six.u('%s_%s_%s') % (prefix, user_or_username, size)
return six.u('%s_%s') % (slugify(key)[:100],
hashlib.md5(force_bytes(key)).hexdigest())
def cache_set(key, value):
cache.set(key, value, settings.AVATAR_CACHE_TIMEOUT)
return value
def cache_result(default_size=settings.AVATAR_DEFAULT_SIZE):
"""
Decorator to cache the result of functions that take a ``user`` and a
``size`` value.
"""
if settings.AVATAR_DISABLE_CACHE:
def decorator(func):
return func
return decorator
def decorator(func):
def cached_func(user, size=None):
prefix = func.__name__
cached_funcs.add(prefix)
key = get_cache_key(user, size or default_size, prefix=prefix)
result = cache.get(key)
if result is None:
result = func(user, size or default_size)
cache_set(key, result)
return result
return cached_func
return decorator
def invalidate_cache(user, size=None):
"""
Function to be called when saving or changing an user's avatars.
"""
sizes = set(settings.AVATAR_AUTO_GENERATE_SIZES)
if size is not None:
sizes.add(size)
for prefix in cached_funcs:
for size in sizes:
cache.delete(get_cache_key(user, size, prefix))
def get_default_avatar_url():
base_url = getattr(settings, 'STATIC_URL', None)
if not base_url:
base_url = getattr(settings, 'MEDIA_URL', '')
# Don't use base_url if the default url starts with http:// of https://
if settings.AVATAR_DEFAULT_URL.startswith(('http://', 'https://')):
return settings.AVATAR_DEFAULT_URL
# We'll be nice and make sure there are no duplicated forward slashes
ends = base_url.endswith('/')
begins = settings.AVATAR_DEFAULT_URL.startswith('/')
if ends and begins:
base_url = base_url[:-1]
elif not ends and not begins:
return '%s/%s' % (base_url, settings.AVATAR_DEFAULT_URL)
return '%s%s' % (base_url, settings.AVATAR_DEFAULT_URL)
def get_primary_avatar(user, size=settings.AVATAR_DEFAULT_SIZE):
User = get_user_model()
if not isinstance(user, User):
try:
user = get_user(user)
except User.DoesNotExist:
return None
try:
# Order by -primary first; this means if a primary=True avatar exists
# it will be first, and then ordered by date uploaded, otherwise a
# primary=False avatar will be first. Exactly the fallback behavior we
# want.
avatar = user.avatar_set.order_by("-primary", "-date_uploaded")[0]
except IndexError:
avatar = None
if avatar:
if not avatar.thumbnail_exists(size):
avatar.create_thumbnail(size)
return avatar
| barbuza/django-avatar | avatar/util.py | Python | bsd-3-clause | 4,051 |
# Copyright 2019 the gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for cancellation example."""
import contextlib
import os
import signal
import socket
import subprocess
import unittest
_BINARY_DIR = os.path.realpath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
_SERVER_PATH = os.path.join(_BINARY_DIR, 'server')
_CLIENT_PATH = os.path.join(_BINARY_DIR, 'client')
@contextlib.contextmanager
def _get_port():
sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 0:
raise RuntimeError("Failed to set SO_REUSEPORT.")
sock.bind(('', 0))
try:
yield sock.getsockname()[1]
finally:
sock.close()
def _start_client(server_port,
desired_string,
ideal_distance,
interesting_distance=None):
interesting_distance_args = () if interesting_distance is None else (
'--show-inferior', interesting_distance)
return subprocess.Popen((_CLIENT_PATH, desired_string, '--server',
'localhost:{}'.format(server_port),
'--ideal-distance', str(ideal_distance)) +
interesting_distance_args)
class CancellationExampleTest(unittest.TestCase):
def test_successful_run(self):
with _get_port() as test_port:
server_process = subprocess.Popen(
(_SERVER_PATH, '--port', str(test_port)))
try:
client_process = _start_client(test_port, 'aa', 0)
client_return_code = client_process.wait()
self.assertEqual(0, client_return_code)
self.assertIsNone(server_process.poll())
finally:
server_process.kill()
server_process.wait()
def test_graceful_sigint(self):
with _get_port() as test_port:
server_process = subprocess.Popen(
(_SERVER_PATH, '--port', str(test_port)))
try:
client_process1 = _start_client(test_port, 'aaaaaaaaaa', 0)
client_process1.send_signal(signal.SIGINT)
client_process1.wait()
client_process2 = _start_client(test_port, 'aa', 0)
client_return_code = client_process2.wait()
self.assertEqual(0, client_return_code)
self.assertIsNone(server_process.poll())
finally:
server_process.kill()
server_process.wait()
if __name__ == '__main__':
unittest.main(verbosity=2)
| grpc/grpc-ios | native_src/examples/python/cancellation/test/_cancellation_example_test.py | Python | apache-2.0 | 3,200 |
# -*- coding:utf-8 -*-
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import vm
vm_ops = None
def test():
global vm_ops
vm_ops = vm.VM()
vm_ops.cancel_create_operation(res_type='vm', close=False)
vm_ops.cancel_create_operation(res_type='vm', close=True)
vm_ops.check_browser_console_log()
test_util.test_pass('Cancel VM Creation Test Successful')
def env_recover():
global vm_ops
vm_ops.close()
#Will be called only if exception happens in test().
def error_cleanup():
global vm_ops
try:
vm_ops.close()
except:
pass
| zstackio/zstack-woodpecker | integrationtest/vm/e2e_mini/vm/test_cancel_create_vm.py | Python | apache-2.0 | 627 |
# This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from blinker import Namespace
_signals = Namespace()
registered = _signals.signal('registered', """
Called once a user registers (either locally or joins through a provider). The
*sender* is the new user object.
""")
merged = _signals.signal('merged', """
Called when two users are merged. The *sender* is the main user while the merged
user (i.e. the one being deleted in the merge) is passed via the *source* kwarg.
""")
email_added = _signals.signal('email-added', """
Called when a new email address is added to a user. The *sender* is
the user object and the email address is passed in the `email` kwarg.
""")
profile_sidemenu = _signals.signal('profile-sidemenu', """
Expected to return `MenuItem` instances to be added to the user profile side menu.
The *sender* is the user whose profile is currently being displayed.
""")
preferences = _signals.signal('preferences', """
Expected to return a `ExtraUserPreferences` subclass which implements extra
preferences for the user preference page. The *sender* is the user for whom the
preferences page is being shown which might not be the currently logged-in
user!
""")
| belokop/indico_bare | indico/core/signals/users.py | Python | gpl-3.0 | 1,899 |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.exceptions import StackStormBaseException
class WorkflowDefinitionException(StackStormBaseException):
pass
| dennybaa/st2 | st2common/st2common/exceptions/workflow.py | Python | apache-2.0 | 908 |
# Yith Library Server is a password storage server.
# Copyright (C) 2012-2013 Yaco Sistemas
# Copyright (C) 2012-2013 Alejandro Blanco Escudero <[email protected]>
# Copyright (C) 2012-2013 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
import uuid
import requests
from pyramid.httpexceptions import HTTPBadRequest, HTTPFound, HTTPUnauthorized
from yithlibraryserver.compat import urlparse, url_encode
def oauth2_step1(request, auth_uri, client_id, redirect_url, scope):
state = str(uuid.uuid4())
request.session['state'] = state
params = {
'response_type': 'code',
'client_id': client_id,
'redirect_uri': redirect_url,
'scope': scope,
'state': state,
}
if 'next_url' in request.params:
request.session['next_url'] = request.params['next_url']
return HTTPFound(location=auth_uri + '?' + url_encode(params))
def oauth2_step2(request, token_uri, client_id, client_secret, redirect_url,
scope):
try:
code = request.params['code']
except KeyError:
return HTTPBadRequest('Missing required code')
try:
state = request.params['state']
except KeyError:
return HTTPBadRequest('Missing required state')
try:
my_state = request.session['state']
if state != my_state:
return HTTPUnauthorized('State parameter does not match internal '
'state. You may be a victim of CSRF')
else:
del request.session['state']
except KeyError:
return HTTPUnauthorized('Missing internal state. '
'You may be a victim of CSRF')
params = {
'grant_type': 'authorization_code',
'client_id': client_id,
'client_secret': client_secret,
'code': code,
'redirect_uri': redirect_url,
'scope': scope,
}
response = requests.post(token_uri, data=params)
if response.status_code != 200:
return HTTPUnauthorized(response.text)
response_json = response.json()
if response_json is None:
response_json = dict(urlparse.parse_qsl(response.text))
return response_json['access_token']
def get_user_info(info_uri, access_token):
headers = {
'Authorization': 'Bearer %s' % access_token,
}
response = requests.get(info_uri, headers=headers)
if response.status_code != 200:
return HTTPUnauthorized(response.text)
return response.json()
| lorenzogil/yith-library-server | yithlibraryserver/oauth2/client.py | Python | agpl-3.0 | 3,226 |
"""
Transitland Python Client.
This library implements an interface for the Transitland Feed Registry, Transitland Datastore, and working with Onestop IDs:
https://github.com/transit-land/onestop-id
Modules:
registry - Feed Registry reader
entities - Transitland entities
geom - Geometry utilities
util - Other utilities
errors - Exceptions
bootstrap - Create Transitland Feed from GTFS URL
fetch - Feed aggregator
"""
__version__ = 'dev' | transitland/transitland-python-client | transitland/__init__.py | Python | mit | 463 |
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Publishes a set of extensions to the webstore.
Given an unpacked extension, compresses and sends to the Chrome webstore.
Releasing to the webstore should involve the following manual steps before
running this script:
1. clean the output directory.
2. make a release build.
3. run manual smoke tests.
4. run automated tests.
'''
import webstore_extension_util
import generate_manifest
import json
import optparse
import os
import sys
import tempfile
from zipfile import ZipFile
_CHROMEVOX_ID = 'kgejglhpjiefppelpmljglcjbhoiplfn'
_SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
_CHROME_SOURCE_DIR = os.path.normpath(
os.path.join(
_SCRIPT_DIR, *[os.path.pardir] * 6))
sys.path.insert(
0, os.path.join(_CHROME_SOURCE_DIR, 'build', 'util'))
import version
# A list of files (or directories) to exclude from the webstore build.
EXCLUDE_PATHS = [
'manifest.json',
'manifest_guest.json',
]
def CreateOptionParser():
parser = optparse.OptionParser(description=__doc__)
parser.usage = (
'%prog --client_secret <client_secret> extension_id:extension_path ...')
parser.add_option('-c', '--client_secret', dest='client_secret',
action='store', metavar='CLIENT_SECRET')
parser.add_option('-p', '--publish', action='store_true',
help='publish the extension(s)')
return parser
def GetVersion():
'''Returns the chrome version string.'''
filename = os.path.join(_CHROME_SOURCE_DIR, 'chrome', 'VERSION')
values = version.fetch_values([filename])
return version.subst_template('@MAJOR@.@MINOR@.@BUILD@.@PATCH@', values)
def MakeChromeVoxManifest():
'''Create a manifest for the webstore.
Returns:
Temporary file with generated manifest.
'''
new_file = tempfile.NamedTemporaryFile(mode='w+a', bufsize=0)
in_file_name = os.path.join(_SCRIPT_DIR, os.path.pardir,
'manifest.json.jinja2')
context = {
'is_guest_manifest': '0',
'is_js_compressed': '1',
'is_webstore': '1',
'set_version': GetVersion()
}
generate_manifest.processJinjaTemplate(in_file_name, new_file.name, context)
return new_file
def RunInteractivePrompt(client_secret, output_path):
input = ''
while True:
print 'u upload'
print 'g get upload status'
print 't publish trusted tester'
print 'p publish public'
print 'q quit'
input = raw_input('Please select an option: ')
input = input.strip()
if input == 'g':
print ('Upload status: %s' %
webstore_extension_util.GetUploadStatus(client_secret).read())
elif input == 'u':
print ('Uploaded with status: %s' %
webstore_extension_util.PostUpload(output_path.name, client_secret))
elif input == 't':
print ('Published to trusted testers with status: %s' %
webstore_extension_util.PostPublishTrustedTesters(
client_secret).read())
elif input == 'p':
print ('Published to public with status: %s' %
webstore_extension_util.PostPublish(client_secret).read())
elif input == 'q':
sys.exit()
else:
print 'Unrecognized option: %s' % input
def main():
options, args = CreateOptionParser().parse_args()
if len(args) < 1 or not options.client_secret:
print 'Expected at least one argument and --client_secret flag'
print str(args)
sys.exit(1)
client_secret = options.client_secret
for extension in args:
webstore_extension_util.g_app_id, extension_path = extension.split(':')
output_path = tempfile.NamedTemporaryFile()
extension_path = os.path.expanduser(extension_path)
is_chromevox = webstore_extension_util.g_app_id == _CHROMEVOX_ID
with ZipFile(output_path, 'w') as zip:
for root, dirs, files in os.walk(extension_path):
rel_path = os.path.join(os.path.relpath(root, extension_path), '')
if is_chromevox and rel_path in EXCLUDE_PATHS:
continue
for extension_file in files:
if is_chromevox and extension_file in EXCLUDE_PATHS:
continue
zip.write(os.path.join(root, extension_file),
os.path.join(rel_path, extension_file))
if is_chromevox:
manifest_file = MakeChromeVoxManifest()
zip.write(manifest_file.name, 'manifest.json')
print 'Created extension zip file in %s' % output_path.name
print 'Please run manual smoke tests before proceeding.'
if options.publish:
print('Uploading...%s' %
webstore_extension_util.PostUpload(output_path.name, client_secret))
print('publishing...%s' %
webstore_extension_util.PostPublish(client_secret).read())
else:
RunInteractivePrompt(client_secret, output_path)
if __name__ == '__main__':
main()
| danakj/chromium | chrome/browser/resources/chromeos/chromevox/tools/publish_webstore_extension.py | Python | bsd-3-clause | 4,983 |
import receipt
import account_voucher
import receiptbook
import receipt_pay
import report
| pronexo-odoo/odoo-argentina | l10n_ar_receipt/__init__.py | Python | agpl-3.0 | 97 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('bookqa',
url(r'^$', 'views.qa_list', {'category_id':"0",'page_num':"0"}),
url(r'^qa_list/(\d+)/(\d+)/$', 'views.qa_list', name='qa_list'),
url(r'^topic_detail/(\d+)/(\d+)/$', 'views.topic_detail', name='topic_detail'),
url(r'^comment_detail/(\d+)/(\d+)/$', 'views.comment_detail', name='comment_detail'),
url(r'^qa_add_question/$', 'views.qa_add_question', name='qa_add_question'),
url(r'^tag_list/(\d+)/$', 'views.tag_list', name='tag_list'),
#url(r'^sharebook/$', 'views.sharebook', name='sharebook'),
# Examples:
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| jintianfree/bookbook | bookqa/urls.py | Python | gpl-3.0 | 1,015 |
# Copyright 2017, Google LLC All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
def add_single_feature_methods(cls):
"""Custom decorator intended for :class:`~vision.helpers.VisionHelpers`.
This metaclass adds a `{feature}` method for every feature
defined on the Feature enum.
"""
# Sanity check: This only makes sense if we are building the GAPIC
# subclass and have Feature enums already attached.
if not hasattr(cls, "Feature"):
return cls
# Add each single-feature method to the class.
for feature in cls.Feature.Type:
# Sanity check: Do not make a method for the falsy feature.
if feature.name == "TYPE_UNSPECIFIED":
continue
# Assign the appropriate metadata to the function.
detect = _create_single_feature_method(feature)
# Assign a qualified name to the function, and perform module
# replacement on the docstring.
detect.__qualname__ = "{cls}.{name}".format(
cls=cls.__name__, name=detect.__name__
)
detect.__doc__ = detect.__doc__.format(module=cls.__module__)
# Place the function on the class being created.
setattr(cls, detect.__name__, detect)
# Done; return the class.
return cls
def _create_single_feature_method(feature):
"""Return a function that will detect a single feature.
Args:
feature (enum): A specific feature defined as a member of
:class:`~Feature.Type`.
Returns:
function: A helper function to detect just that feature.
"""
# Define the function properties.
fx_name = feature.name.lower()
if "detection" in fx_name:
fx_doc = "Perform {0}.".format(fx_name.replace("_", " "))
else:
fx_doc = "Return {desc} information.".format(desc=fx_name.replace("_", " "))
# Provide a complete docstring with argument and return value
# information.
fx_doc += """
Args:
image (:class:`~.{module}.Image`): The image to analyze.
max_results (int):
Number of results to return, does not apply for
TEXT_DETECTION, DOCUMENT_TEXT_DETECTION, or CROP_HINTS.
retry (int): Number of retries to do before giving up.
timeout (int): Number of seconds before timing out.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
kwargs (dict): Additional properties to be set on the
:class:`~.{module}.types.AnnotateImageRequest`.
Returns:
:class:`~.{module}.AnnotateImageResponse`: The API response.
"""
# Get the actual feature value to send.
feature_value = {"type_": feature}
# Define the function to be returned.
def inner(
self,
image,
*,
max_results=None,
retry=None,
timeout=None,
metadata=(),
**kwargs
):
"""Return a single feature annotation for the given image.
Intended for use with functools.partial, to create the particular
single-feature methods.
"""
copied_features = feature_value.copy()
if max_results is not None:
copied_features["max_results"] = max_results
request = dict(image=image, features=[copied_features], **kwargs)
response = self.annotate_image(
request, retry=retry, timeout=timeout, metadata=metadata
)
return response
# Set the appropriate function metadata.
inner.__name__ = fx_name
inner.__doc__ = fx_doc
# Return the final function.
return inner
| googleapis/python-vision | google/cloud/vision_helpers/decorators.py | Python | apache-2.0 | 4,181 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
#
# FreeType high-level python API - Copyright 2011-2012 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
#
# -----------------------------------------------------------------------------
"""
An enumeration used to specify character sets supported by charmaps. Used in
the FT_Select_Charmap API function.
FT_ENCODING_NONE
The encoding value 0 is reserved.
FT_ENCODING_UNICODE
Corresponds to the Unicode character set. This value covers all versions of
the Unicode repertoire, including ASCII and Latin-1. Most fonts include a
Unicode charmap, but not all of them.
For example, if you want to access Unicode value U+1F028 (and the font
contains it), use value 0x1F028 as the input value for FT_Get_Char_Index.
FT_ENCODING_MS_SYMBOL
Corresponds to the Microsoft Symbol encoding, used to encode mathematical
symbols in the 32..255 character code range. For more information, see
'http://www.ceviz.net/symbol.htm'.
FT_ENCODING_SJIS
Corresponds to Japanese SJIS encoding. More info at at
'http://langsupport.japanreference.com/encoding.shtml'. See note on
multi-byte encodings below.
FT_ENCODING_GB2312
Corresponds to an encoding system for Simplified Chinese as used used in
mainland China.
FT_ENCODING_BIG5
Corresponds to an encoding system for Traditional Chinese as used in Taiwan
and Hong Kong.
FT_ENCODING_WANSUNG
Corresponds to the Korean encoding system known as Wansung. For more
information see 'http://www.microsoft.com/typography/unicode/949.txt'.
FT_ENCODING_JOHAB
The Korean standard character set (KS C 5601-1992), which corresponds to MS
Windows code page 1361. This character set includes all possible Hangeul
character combinations.
FT_ENCODING_ADOBE_LATIN_1
Corresponds to a Latin-1 encoding as defined in a Type 1 PostScript font. It
is limited to 256 character codes.
FT_ENCODING_ADOBE_STANDARD
Corresponds to the Adobe Standard encoding, as found in Type 1, CFF, and
OpenType/CFF fonts. It is limited to 256 character codes.
FT_ENCODING_ADOBE_EXPERT
Corresponds to the Adobe Expert encoding, as found in Type 1, CFF, and
OpenType/CFF fonts. It is limited to 256 character codes.
FT_ENCODING_ADOBE_CUSTOM
Corresponds to a custom encoding, as found in Type 1, CFF, and OpenType/CFF
fonts. It is limited to 256 character codes.
FT_ENCODING_APPLE_ROMAN
Corresponds to the 8-bit Apple roman encoding. Many TrueType and OpenType
fonts contain a charmap for this encoding, since older versions of Mac OS are
able to use it.
FT_ENCODING_OLD_LATIN_2
This value is deprecated and was never used nor reported by FreeType. Don't
use or test for it.
"""
def _FT_ENC_TAG(a,b,c,d):
return ( ord(a) << 24 | ord(b) << 16 | ord(c) << 8 | ord(d) )
FT_ENCODINGS = {'FT_ENCODING_NONE' : _FT_ENC_TAG('\0','\0','\0','\0'),
'FT_ENCODING_MS_SYMBOL' : _FT_ENC_TAG( 's','y','m','b' ),
'FT_ENCODING_UNICODE' : _FT_ENC_TAG( 'u','n','i','c' ),
'FT_ENCODING_SJIS' : _FT_ENC_TAG( 's','j','i','s' ),
'FT_ENCODING_GB2312' : _FT_ENC_TAG( 'g','b',' ',' ' ),
'FT_ENCODING_BIG5' : _FT_ENC_TAG( 'b','i','g','5' ),
'FT_ENCODING_WANSUNG' : _FT_ENC_TAG( 'w','a','n','s' ),
'FT_ENCODING_JOHAB' : _FT_ENC_TAG( 'j','o','h','a' ),
'FT_ENCODING_ADOBE_STANDARD' : _FT_ENC_TAG( 'A','D','O','B' ),
'FT_ENCODING_ADOBE_EXPERT' : _FT_ENC_TAG( 'A','D','B','E' ),
'FT_ENCODING_ADOBE_CUSTOM' : _FT_ENC_TAG( 'A','D','B','C' ),
'FT_ENCODING_ADOBE_LATIN1' : _FT_ENC_TAG( 'l','a','t','1' ),
'FT_ENCODING_OLD_LATIN2' : _FT_ENC_TAG( 'l','a','t','2' ),
'FT_ENCODING_APPLE_ROMAN' : _FT_ENC_TAG( 'a','r','m','n' ) }
globals().update(FT_ENCODINGS)
| duyuan11/glumpy | glumpy/ext/freetype/ft_enums/ft_encodings.py | Python | bsd-3-clause | 4,049 |
import doctest
import pytest
from insights.parsers import ParseException, SkipException
from insights.parsers import sctp
from insights.parsers.sctp import SCTPEps
from insights.parsers.sctp import SCTPAsc, SCTPAsc7
from insights.parsers.sctp import SCTPSnmp
from insights.tests import context_wrap
SCTP_EPS_DETAILS = """
ENDPT SOCK STY SST HBKT LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 29 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 30 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
ffff88061fba9800 ffff88061f8a3180 2 10 31 11167 200 273361145 10.0.0.102 10.0.0.70
ffff88031e6f1a00 ffff88031dbdb180 2 10 32 11168 200 273365974 10.0.0.102 10.0.0.70 192.168.11.2
ffff88031e6f1a00 ffff88031dbdb180 2 10 32 11168 200 273365974 192.168.11.12
""".strip()
SCTP_EPS_DETAILS_NO = """
ENDPT SOCK STY SST LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
ffff88061fba9800 ffff88061f8a3180 2 10 11167 200 273361145 10.0.0.102 10.0.0.70
ffff88031e6f1a00 ffff88031dbdb180 2 10 11168 200 273365974 10.0.0.102 10.0.0.70 192.168.11.2
""".strip()
SCTP_EPS_DETAILS_DOC = """
ENDPT SOCK STY SST HBKT LPORT UID INODE LADDRS
ffff88017e0a0200 ffff880299f7fa00 2 10 29 11165 200 299689357 10.0.0.102 10.0.0.70
ffff880612e81c00 ffff8803c28a1b00 2 10 30 11166 200 273361203 10.0.0.102 10.0.0.70 172.31.1.2
""".strip()
SCTP_EPS_DETAILS_NO_2 = """
""".strip()
SCTP_ASSOC = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff88061fbf2000 ffff88060ff92500 2 1 4 1460 942 0 0 200 273360669 11566 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff8803217b9000 ffff8801c6321580 2 1 4 1675 977 0 0 200 273361369 11565 11168 10.0.0.102 10.0.0.70 192.168.11.2 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff8803db908000 ffff88061e4a00c0 2 1 4 2229 967 0 0 200 273361177 12067 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.110 10.0.0.78 1000 2 2 10 0 0 0
ffff88062258f000 ffff88060fffaa40 2 1 4 2485 953 0 0 200 273360681 12066 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.103 10.0.0.71 1000 2 2 10 0 0 0
ffff8801ce686000 ffff8801c7083ac0 2 1 4 2741 982 0 0 200 273361381 12065 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.112 10.0.0.80 1000 2 2 10 0 0 0
ffff88031e1f4000 ffff8801c6fd9b00 2 1 4 7092 1005 0 0 200 273366011 11567 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.111 10.0.0.79 1000 2 2 10 0 0 0
""".strip()
SCTP_ASSOC_2 = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff8804239ca000 ffff8804238c6040 2 1 4 3091 1 0 0 500 90293 37379 3868 10.0.200.114 10.0.201.114 2010:0010:0000:0200:0000:0000:0000:0114 2010:0010:0000:0201:0000:0000:0000:0114 <-> *10.0.100.94 10.0.101.94 2010:0010:0000:0100:0000:0000:0000:0094 2010:0010:0000:0101:0000:0000:0000:0094 1000 5 5 10 0 0 0
""".strip()
SCTP_ASSOC_DOC = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
ffff88061fbf2000 ffff88060ff92500 2 1 4 1460 942 0 0 200 273360669 11566 11167 10.0.0.102 10.0.0.70 <-> *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
""".strip()
SCTP_ASSOC_NO = """
""".strip()
SCTP_ASSOC_NO_2 = """
SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC
ffff88045ac7e000 ffff88062077aa00 2 1 4 1205 963 0 0 200 273361167 11567 11166 10.0.0.102 10.0.0.70 *10.0.0.109 10.0.0.77 1000 2 2 10 0 0 0
""".strip()
SCTP_SNMP = """
SctpCurrEstab 5380
SctpActiveEstabs 12749
SctpPassiveEstabs 55
SctpAborteds 2142
SctpShutdowns 5295
SctpOutOfBlues 36786
SctpChecksumErrors 0
SctpOutCtrlChunks 1051492
SctpOutOrderChunks 17109
SctpOutUnorderChunks 0
SctpInCtrlChunks 1018398
SctpInOrderChunks 17033
SctpInUnorderChunks 0
SctpFragUsrMsgs 0
SctpReasmUsrMsgs 0
SctpOutSCTPPacks 1068678
""".strip()
SCTP_SNMP_NO_1 = """
""".strip()
SCTP_SNMP_NO_2 = """
SctpCurrEstab 5380 SctpActiveEstabs 12749 SctpPassiveEstabs 55 SctpAborteds 2142 SctpShutdowns 5295 SctpOutOfBlues 36786 SctpChecksumErrors 0 SctpOutCtrlChunks 1051492 SctpOutOrderChunks 17109
""".strip()
SCTP_SNMP_DOC = """
SctpCurrEstab 5380
SctpActiveEstabs 12749
SctpPassiveEstabs 55
SctpAborteds 2142
SctpShutdowns 5295
SctpOutOfBlues 36786
SctpChecksumErrors 0
SctpOutCtrlChunks 1051492
"""
SCTP_ASC_7 = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC wmema wmemq sndbuf rcvbuf
ffff8805d36b3000 ffff880f8911f380 0 10 3 0 12754 0 0 0 496595 3868 3868 10.131.222.5 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 11 12 1000000 2000000
ffff8805f17e1000 ffff881004aff380 0 10 3 0 12728 0 0 0 532396 3868 3868 10.131.222.3 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 13 14 3000000 4000000
ffff8805f17e0000 ffff880f8a117380 0 10 3 0 12727 0 0 0 582963 3868 3868 10.131.222.8 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 15 16 5000000 6000000
ffff88081d0bc000 ffff880f6fa66300 0 10 3 0 12726 0 0 0 582588 3868 3868 10.131.222.2 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 17 18 7000000 8000000
ffff88081d0f5000 ffff880f00a99600 0 10 3 0 12725 0 0 0 578082 3868 3868 10.131.222.1 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 19 20 9000000 10000000
""".strip()
SCTP_ASSOC_RHEL_7_DOC = """
ASSOC SOCK STY SST ST HBKT ASSOC-ID TX_QUEUE RX_QUEUE UID INODE LPORT RPORT LADDRS <-> RADDRS HBINT INS OUTS MAXRT T1X T2X RTXC wmema wmemq sndbuf rcvbuf
ffff8805d36b3000 ffff880f8911f380 0 10 3 0 12754 0 0 0 496595 3868 3868 10.131.222.5 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 11 12 1000000 2000000
ffff8805f17e1000 ffff881004aff380 0 10 3 0 12728 0 0 0 532396 3868 3868 10.131.222.3 <-> *10.131.160.81 10.131.176.81 30000 17 10 10 0 0 0 13 14 3000000 4000000
""".strip()
def test_sctp_eps():
sctp_info = SCTPEps(context_wrap(SCTP_EPS_DETAILS))
assert sorted(sctp_info.sctp_local_ports) == sorted(['11165', '11166', '11167', '11168'])
assert sorted(sctp_info.sctp_local_ips) == sorted(['10.0.0.102', '10.0.0.70', '172.31.1.2', '192.168.11.2', '192.168.11.12'])
assert sctp_info.sctp_eps_ips == {'ffff88017e0a0200': ['10.0.0.102', '10.0.0.70'],
'ffff880612e81c00': ['10.0.0.102', '10.0.0.70', '172.31.1.2'],
'ffff88061fba9800': ['10.0.0.102', '10.0.0.70'],
'ffff88031e6f1a00': ['10.0.0.102', '10.0.0.70', '192.168.11.2', '192.168.11.12']}
assert len(sctp_info.search(local_port='11165')) == 1
def test_sctp_asc():
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC))
assert sorted(sctp_asc.sctp_local_ports) == sorted(['11567', '11566', '11565', '12067', '12065', '12066'])
assert sorted(sctp_asc.search(local_port='11565')) == sorted([{'init_chunks_send': '0', 'uid': '200', 'shutdown_chunks_send': '0', 'max_outstream': '2', 'tx_que': '0', 'inode': '273361369', 'hrtbt_intrvl': '1000', 'sk_type': '2', 'remote_addr': ['*10.0.0.109', '10.0.0.77'], 'data_chunks_retrans': '0', 'local_addr': ['10.0.0.102', '10.0.0.70', '192.168.11.2'], 'asc_id': '977', 'max_instream': '2', 'remote_port': '11168', 'asc_state': '4', 'max_retrans_atmpt': '10', 'sk_state': '1', 'socket': 'ffff8801c6321580', 'asc_struct': 'ffff8803217b9000', 'local_port': '11565', 'hash_bkt': '1675', 'rx_que': '0'}])
assert len(sctp_asc.search(local_port='11567')) == 2
assert sorted(sctp_asc.sctp_local_ips) == sorted(['10.0.0.102', '10.0.0.70', '192.168.11.2'])
assert sorted(sctp_asc.sctp_remote_ips) == sorted(['*10.0.0.109', '10.0.0.77', '*10.0.0.110', '10.0.0.78', '*10.0.0.103', '10.0.0.71', '*10.0.0.112', '10.0.0.80', '*10.0.0.111', '10.0.0.79'])
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_2))
assert sorted(sctp_asc.sctp_local_ips) == sorted(['10.0.200.114', '10.0.201.114', '2010:0010:0000:0200:0000:0000:0000:0114', '2010:0010:0000:0201:0000:0000:0000:0114'])
assert sorted(sctp_asc.sctp_remote_ips) == sorted(['*10.0.100.94', '10.0.101.94', '2010:0010:0000:0100:0000:0000:0000:0094', '2010:0010:0000:0101:0000:0000:0000:0094'])
sctp_asc = SCTPAsc7(context_wrap(SCTP_ASC_7))
assert sctp_asc.sctp_local_ips == sorted(['10.131.222.5', '10.131.222.3', '10.131.222.8', '10.131.222.2', '10.131.222.1'])
assert sctp_asc.data[0]['rcvbuf'] == '2000000'
assert sctp_asc.data[1]['wmemq'] == '14'
assert sctp_asc.data[1]['rcvbuf'] == '4000000'
def test_sctp_eps_exceptions():
with pytest.raises(ParseException) as exc:
sctp_obj = SCTPEps(context_wrap(SCTP_EPS_DETAILS_NO))
assert sctp_obj is None # Just added to remove flake8 warnings
assert 'The following line is not compatible with this parser' in str(exc)
with pytest.raises(SkipException) as exc:
sctp_obj = SCTPEps(context_wrap(SCTP_EPS_DETAILS_NO_2))
assert sctp_obj is None # Just added to remove flake8 warnings
assert 'No Contents' in str(exc)
def test_sctp_asc_exceptions():
with pytest.raises(ParseException) as exc:
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_NO_2))
assert sctp_asc is None
assert 'The following line is not compatible with this parser' in str(exc)
with pytest.raises(SkipException) as exc:
sctp_asc = SCTPAsc(context_wrap(SCTP_ASSOC_NO))
assert sctp_asc is None
assert 'No Contents' in str(exc)
def test_sctp_doc_examples():
env = {
'sctp_info': SCTPEps(context_wrap(SCTP_EPS_DETAILS_DOC)),
'sctp_asc': SCTPAsc(context_wrap(SCTP_ASSOC_DOC)),
'sctp_asc_7': SCTPAsc7(context_wrap(SCTP_ASSOC_RHEL_7_DOC)),
'sctp_snmp': SCTPSnmp(context_wrap(SCTP_SNMP_DOC))
}
failed, total = doctest.testmod(sctp, globs=env)
assert failed == 0
def test_sctp_snmp():
sctp_snmp = SCTPSnmp(context_wrap(SCTP_SNMP))
assert sorted(sctp_snmp) == sorted({'SctpCurrEstab': 5380, 'SctpActiveEstabs': 12749, 'SctpPassiveEstabs': 55, 'SctpAborteds': 2142, 'SctpShutdowns': 5295, 'SctpOutOfBlues': 36786, 'SctpChecksumErrors': 0, 'SctpOutCtrlChunks': 1051492, 'SctpOutOrderChunks': 17109, 'SctpOutUnorderChunks': 0, 'SctpInCtrlChunks': 1018398, 'SctpInOrderChunks': 17033, 'SctpInUnorderChunks': 0, 'SctpFragUsrMsgs': 0, 'SctpReasmUsrMsgs': 0, 'SctpOutSCTPPacks': 1068678})
assert sctp_snmp.get('SctpCurrEstab') == 5380
assert sctp_snmp.get('SctpReasmUsrMsgs') == 0
assert sctp_snmp.get('something_else') is None
def test_sctp_snmp_exceptions():
with pytest.raises(SkipException) as exc:
sctp_snmp = SCTPSnmp(context_wrap(SCTP_SNMP_NO_1))
assert sctp_snmp is None
assert 'No Contents' in str(exc)
with pytest.raises(ParseException) as exc:
sctp_snmp = SCTPSnmp(context_wrap(SCTP_SNMP_NO_2))
assert sctp_snmp is None
assert 'Contents are not compatible to this parser' in str(exc)
| RedHatInsights/insights-core | insights/parsers/tests/test_sctp.py | Python | apache-2.0 | 13,044 |
##########################################################################
#
# Copyright (c) 2019, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import GafferImageUI # For ImageReaderPathPreview
import GafferSceneUI # For SceneReaderPathPreview
try :
import GafferCortexUI
except ImportError :
pass | lucienfostier/gaffer | startup/view/previews.py | Python | bsd-3-clause | 1,959 |
#! /usr/bin/python2.6
"""
"PYSTONE" Benchmark Program
Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes)
Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
Translated from ADA to C by Rick Richardson.
Every method to preserve ADA-likeness has been used,
at the expense of C-ness.
Translated from C to Python by Guido van Rossum.
Version History:
Version 1.1 corrects two bugs in version 1.0:
First, it leaked memory: in Proc1(), NextRecord ends
up having a pointer to itself. I have corrected this
by zapping NextRecord.PtrComp at the end of Proc1().
Second, Proc3() used the operator != to compare a
record to None. This is rather inefficient and not
true to the intention of the original benchmark (where
a pointer comparison to None is intended; the !=
operator attempts to find a method __cmp__ to do value
comparison of the record). Version 1.1 runs 5-10
percent faster than version 1.0, so benchmark figures
of different versions can't be compared directly.
"""
LOOPS = 50000
from time import clock
__version__ = "1.1"
[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
class Record:
def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
IntComp = 0, StringComp = 0):
self.PtrComp = PtrComp
self.Discr = Discr
self.EnumComp = EnumComp
self.IntComp = IntComp
self.StringComp = StringComp
def copy(self):
return Record(self.PtrComp, self.Discr, self.EnumComp,
self.IntComp, self.StringComp)
TRUE = 1
FALSE = 0
def main(loops=LOOPS):
benchtime, stones = pystones(loops)
print "Pystone(%s) time for %d passes = %g" % \
(__version__, loops, benchtime)
print "This machine benchmarks at %g pystones/second" % stones
def pystones(loops=LOOPS):
return Proc0(loops)
IntGlob = 0
BoolGlob = FALSE
Char1Glob = '\0'
Char2Glob = '\0'
Array1Glob = [0]*51
Array2Glob = map(lambda x: x[:], [Array1Glob]*51)
PtrGlb = None
PtrGlbNext = None
def Proc0(loops=LOOPS):
global IntGlob
global BoolGlob
global Char1Glob
global Char2Glob
global Array1Glob
global Array2Glob
global PtrGlb
global PtrGlbNext
starttime = clock()
for i in range(loops):
pass
nulltime = clock() - starttime
PtrGlbNext = Record()
PtrGlb = Record()
PtrGlb.PtrComp = PtrGlbNext
PtrGlb.Discr = Ident1
PtrGlb.EnumComp = Ident3
PtrGlb.IntComp = 40
PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
Array2Glob[8][7] = 10
starttime = clock()
for i in range(loops):
Proc5()
Proc4()
IntLoc1 = 2
IntLoc2 = 3
String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
EnumLoc = Ident2
BoolGlob = not Func2(String1Loc, String2Loc)
while IntLoc1 < IntLoc2:
IntLoc3 = 5 * IntLoc1 - IntLoc2
IntLoc3 = Proc7(IntLoc1, IntLoc2)
IntLoc1 = IntLoc1 + 1
Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
PtrGlb = Proc1(PtrGlb)
CharIndex = 'A'
while CharIndex <= Char2Glob:
if EnumLoc == Func1(CharIndex, 'C'):
EnumLoc = Proc6(Ident1)
CharIndex = chr(ord(CharIndex)+1)
IntLoc3 = IntLoc2 * IntLoc1
IntLoc2 = IntLoc3 / IntLoc1
IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
IntLoc1 = Proc2(IntLoc1)
benchtime = clock() - starttime - nulltime
if benchtime == 0.0:
loopsPerBenchtime = 0.0
else:
loopsPerBenchtime = (loops / benchtime)
return benchtime, loopsPerBenchtime
def Proc1(PtrParIn):
PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
PtrParIn.IntComp = 5
NextRecord.IntComp = PtrParIn.IntComp
NextRecord.PtrComp = PtrParIn.PtrComp
NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
if NextRecord.Discr == Ident1:
NextRecord.IntComp = 6
NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
NextRecord.PtrComp = PtrGlb.PtrComp
NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
else:
PtrParIn = NextRecord.copy()
NextRecord.PtrComp = None
return PtrParIn
def Proc2(IntParIO):
IntLoc = IntParIO + 10
while 1:
if Char1Glob == 'A':
IntLoc = IntLoc - 1
IntParIO = IntLoc - IntGlob
EnumLoc = Ident1
if EnumLoc == Ident1:
break
return IntParIO
def Proc3(PtrParOut):
global IntGlob
if PtrGlb is not None:
PtrParOut = PtrGlb.PtrComp
else:
IntGlob = 100
PtrGlb.IntComp = Proc7(10, IntGlob)
return PtrParOut
def Proc4():
global Char2Glob
BoolLoc = Char1Glob == 'A'
BoolLoc = BoolLoc or BoolGlob
Char2Glob = 'B'
def Proc5():
global Char1Glob
global BoolGlob
Char1Glob = 'A'
BoolGlob = FALSE
def Proc6(EnumParIn):
EnumParOut = EnumParIn
if not Func3(EnumParIn):
EnumParOut = Ident4
if EnumParIn == Ident1:
EnumParOut = Ident1
elif EnumParIn == Ident2:
if IntGlob > 100:
EnumParOut = Ident1
else:
EnumParOut = Ident4
elif EnumParIn == Ident3:
EnumParOut = Ident2
elif EnumParIn == Ident4:
pass
elif EnumParIn == Ident5:
EnumParOut = Ident3
return EnumParOut
def Proc7(IntParI1, IntParI2):
IntLoc = IntParI1 + 2
IntParOut = IntParI2 + IntLoc
return IntParOut
def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
global IntGlob
IntLoc = IntParI1 + 5
Array1Par[IntLoc] = IntParI2
Array1Par[IntLoc+1] = Array1Par[IntLoc]
Array1Par[IntLoc+30] = IntLoc
for IntIndex in range(IntLoc, IntLoc+2):
Array2Par[IntLoc][IntIndex] = IntLoc
Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1
Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc]
IntGlob = 5
def Func1(CharPar1, CharPar2):
CharLoc1 = CharPar1
CharLoc2 = CharLoc1
if CharLoc2 != CharPar2:
return Ident1
else:
return Ident2
def Func2(StrParI1, StrParI2):
IntLoc = 1
while IntLoc <= 1:
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE
else:
if StrParI1 > StrParI2:
IntLoc = IntLoc + 7
return TRUE
else:
return FALSE
def Func3(EnumParIn):
EnumLoc = EnumParIn
if EnumLoc == Ident3: return TRUE
return FALSE
if __name__ == '__main__':
import sys
def error(msg):
print >>sys.stderr, msg,
print >>sys.stderr, "usage: %s [number_of_loops]" % sys.argv[0]
sys.exit(100)
nargs = len(sys.argv) - 1
if nargs > 1:
error("%d arguments are too many;" % nargs)
elif nargs == 1:
try: loops = int(sys.argv[1])
except ValueError:
error("Invalid argument %r;" % sys.argv[1])
else:
loops = LOOPS
main(loops)
| 2ndy/RaspIM | usr/lib/python2.6/test/pystone.py | Python | gpl-2.0 | 7,366 |
#!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# +------------------------------------------------------------------+
# | ____ _ _ __ __ _ __ |
# | / ___| |__ ___ ___| | __ | \/ | |/ / |
# | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / |
# | | |___| | | | __/ (__| < | | | | . \ |
# | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ |
# | |
# | Copyright Mathias Kettner 2014 [email protected] |
# +------------------------------------------------------------------+
#
# This file is part of Check_MK.
# The official homepage is at http://mathias-kettner.de/check_mk.
#
# check_mk is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation in version 2. check_mk is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; with-
# out even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU General Public License for more de-
# ails. You should have received a copy of the GNU General Public
# License along with GNU Make; see the file COPYING. If not, write
# to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
# Boston, MA 02110-1301 USA.
mkeventd_enabled = False
mkeventd_pprint_rules = False
mkeventd_notify_contactgroup = None
mkeventd_notify_facility = 16
mkeventd_notify_remotehost = None
mkeventd_connect_timeout = 10
debug_mkeventd_queries = False
log_level = 0
log_rulehits = False
rule_optimizer = True
mkeventd_service_levels = [
(0, _("(no Service level)")),
(10, _("Silver")),
(20, _("Gold")),
(30, _("Platinum")),
]
| xorpaul/check_mk | mkeventd/web/plugins/config/mkeventd.py | Python | gpl-2.0 | 1,968 |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "si_py.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| xuanthuong/python-myocr | si_py/manage.py | Python | mit | 803 |
# -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2016 mrknow
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os,sys,re,json,urllib,urlparse,base64,datetime
try: action = dict(urlparse.parse_qsl(sys.argv[2].replace('?','')))['action']
except: action = None
from resources.lib.libraries import trakt
from resources.lib.libraries import control
from resources.lib.libraries import client
from resources.lib.libraries import cache
from resources.lib.libraries import metacache
from resources.lib.libraries import favourites
from resources.lib.libraries import workers
from resources.lib.libraries import views
class tvshows:
def __init__(self):
self.list = []
#self.tmdb_link = 'http://api.themoviedb.org'
self.trakt_link = 'http://api-v2launch.trakt.tv'
self.imdb_link = 'http://www.imdb.com'
#self.tmdb_key = control.tmdb_key
self.tvdb_key = control.tvdb_key
self.datetime = (datetime.datetime.utcnow() - datetime.timedelta(hours = 5))
self.today_date = (self.datetime).strftime('%Y-%m-%d')
self.week_date = (self.datetime - datetime.timedelta(days = 7)).strftime('%Y-%m-%d')
self.month_date = (self.datetime - datetime.timedelta(days = 30)).strftime('%Y-%m-%d')
self.year_date = (self.datetime - datetime.timedelta(days = 365)).strftime('%Y-%m-%d')
self.trakt_user = control.setting('trakt.user')
self.imdb_user = control.setting('imdb_user').replace('ur', '')
self.info_lang = control.setting('infoLang') or 'en'
#self.tmdb_info_link = 'http://api.themoviedb.org/3/tv/%s?api_key=%s&language=%s&append_to_response=credits,content_ratings,external_ids' % ('%s', self.tmdb_key, self.info_lang)
self.tvdb_info_link = 'http://thetvdb.com/api/%s/series/%s/%s.xml' % (self.tvdb_key, '%s', re.sub('bg', 'en', self.info_lang))
#self.tmdb_by_imdb = 'http://api.themoviedb.org/3/find/%s?api_key=%s&external_source=imdb_id' % ('%s', self.tmdb_key)
self.tvdb_by_imdb = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s'
self.imdb_by_query = 'http://www.omdbapi.com/?t=%s&y=%s'
#self.tmdb_image = 'http://image.tmdb.org/t/p/original'
#self.tmdb_poster = 'http://image.tmdb.org/t/p/w500'
self.tvdb_image = 'http://thetvdb.com/banners/'
#self.persons_link = 'http://api.themoviedb.org/3/search/person?api_key=%s&query=%s&include_adult=false&page=1' % (self.tmdb_key, '%s')
#self.genres_link = 'http://api.themoviedb.org/3/genre/tv/list?api_key=%s&language=%s' % (self.tmdb_key, self.info_lang)
#self.popular_link = 'http://api.themoviedb.org/3/tv/popular?api_key=%s&page=1'
#self.popular_link = 'https://www.themoviedb.org/remote/tv?language=%s&list_style=poster_card&page=1' % (self.info_lang)
#self.popular_link = 'http://www.imdb.com/search/title?production_status=released&title_type=tv_series'
self.popular_link = 'http://www.imdb.com/search/title?title_type=tv_series,mini_series&languages=en&num_votes=100,sort=moviemeter,asc&count=20&start=1'
self.year_link = 'http://www.imdb.com/search/title?title_type=tv_series,mini_series&languages=en&num_votes=100,sort=moviemeter,asc&count=20&start=1&year=%s'
self.rating_link = 'http://www.imdb.com/search/title?at=0&count=20&languages=en&num_votes=500,&sort=user_rating&title_type=tv_series,mini_series&start=1'
self.views_link = 'http://www.imdb.com/search/title?at=0&count=20&languages=en&sort=num_votes&title_type=tv_series,mini_series&start=1'
self.genre_link = 'http://www.imdb.com/search/title?title_type=tv_series,mini_series&languages=en&num_votes=100,&genres=%s&sort=moviemeter,asc&count=20&start=1'
self.airing_link = 'https://api-v2launch.trakt.tv/calendars/all/shows/%s/1?limit=20' % self.today_date
#self.airing_link = 'http://api.themoviedb.org/3/tv/airing_today?api_key=%s&page=1'
#self.airing_link = 'https://www.themoviedb.org/remote/tv/airing-today?list_style=poster_card&page=1'
#self.premiere_link = 'http://api.themoviedb.org/3/discover/tv?api_key=%s&first_air_date.gte=%s&first_air_date.lte=%s&page=1' % ('%s', self.year_date, self.today_date)
self.premiere_link = 'https://api-v2launch.trakt.tv/calendars/all/shows/premieres/%s/7?limit=20' % self.week_date
#self.active_link = 'http://api.themoviedb.org/3/tv/on_the_air?api_key=%s&page=1'
#self.active_link = 'https://www.themoviedb.org/remote/tv/on-the-air?list_style=poster_card&page=1'
#self.person_link = 'http://api.themoviedb.org/3/person/%s?api_key=%s&append_to_response=tv_credits'
#self.genre_link = 'http://api.themoviedb.org/3/discover/tv?api_key=%s&with_genres=%s&air_date.gte=%s&air_date.lte=%s&page=1' % ('%s', '%s', self.year_date, self.today_date)
#self.network_link = 'http://api.themoviedb.org/3/discover/tv?api_key=%s&with_networks=%s&air_date.gte=%s&air_date.lte=%s&page=1' % ('%s', '%s', self.year_date, self.today_date)
#self.network_link = 'http://www.imdb.com/search/title?title_type=tv_series,mini_series&release_date=,%S&company=%s&sort=moviemeter,asc&count=40&start=1' % (self.today_date, %s)
self.trending_link = 'http://api-v2launch.trakt.tv/shows/trending?limit=20&page=1'
self.search_link = 'http://api-v2launch.trakt.tv/search?type=show&query=%s'
self.traktlists_link = 'http://api-v2launch.trakt.tv/users/%s/lists' % self.trakt_user
self.traktlist_link = 'http://api-v2launch.trakt.tv/users/%s/lists/%s/items' % (self.trakt_user, '%s')
self.traktcollection_link = 'http://api-v2launch.trakt.tv/users/%s/collection/shows' % self.trakt_user
self.traktwatchlist_link = 'http://api-v2launch.trakt.tv/users/%s/watchlist/shows' % self.trakt_user
self.traktfeatured_link = 'http://api-v2launch.trakt.tv/recommendations/shows?limit=20'
self.traktratings_link = 'https://api-v2launch.trakt.tv/users/%s/ratings/shows' % self.trakt_user
self.imdblists_link = 'http://www.imdb.com/user/ur%s/lists?tab=all&sort=modified:desc&filter=titles' % self.imdb_user
self.imdblist_link = 'http://www.imdb.com/list/%s/?view=detail&sort=title:asc&title_type=tv_series,mini_series&start=1'
self.imdbwatchlist_link = 'http://www.imdb.com/user/ur%s/watchlist' % self.imdb_user
self.genres_tab = [('Action', 'action'), ('Adventure', 'adventure'), ('Animation', 'animation'),('Biography', 'biography'),
('Comedy', 'comedy'), ('Crime', 'crime'), ('Drama', 'drama'),('Family', 'family'), ('Fantasy', 'fantasy'),
('History', 'history'), ('Horror', 'horror'),('Music ', 'music'), ('Musical', 'musical'), ('Mystery', 'mystery'),
('Romance', 'romance'),('Science Fiction', 'sci_fi'), ('Sport', 'sport'), ('Thriller', 'thriller'), ('War', 'war'),('Western', 'western')]
def get(self, url, idx=True):
try:
try: url = getattr(self, url + '_link')
except: pass
try: u = urlparse.urlparse(url).netloc.lower()
except: pass
#if u in self.tmdb_link:
# self.list = cache.get(self.tmdb_list, 24, url)
# self.worker()
#elif u in self.tmdb_link2:
# self.list = cache.get(self.tmdb_list2, 24, url)
# self.worker()
if u in self.trakt_link and '/users/' in url:
self.list = cache.get(self.trakt_list, 0, url)
self.list = sorted(self.list, key=lambda k: k['title'])
if idx == True: self.worker()
elif u in self.trakt_link:
self.list = cache.get(self.trakt_list, 24, url)
if idx == True: self.worker()
elif u in self.imdb_link and ('/user/' in url or '/list/' in url):
self.list = cache.get(self.imdb_list, 0, url, idx)
self.worker()
elif u in self.imdb_link and '/search/title' in url:
control.log("><><><><> ******************** %s" % url)
self.list = cache.get(self.imdb_list2, 24, url)
self.worker()
elif u in self.imdb_link:
control.log("><><><><> ******************** %s" % u)
self.list = cache.get(self.imdb_list, 24, url)
self.worker()
if idx == True: self.tvshowDirectory(self.list)
return self.list
except:
pass
def favourites(self):
try:
items = favourites.getFavourites('tvshows')
self.list = [i[1] for i in items]
for i in self.list:
if not 'name' in i: i['name'] = i['title']
try: i['title'] = i['title'].encode('utf-8')
except: pass
try: i['name'] = i['name'].encode('utf-8')
except: pass
if not 'year' in i: i['year'] = '0'
if not 'duration' in i: i['duration'] = '0'
if not 'imdb' in i: i['imdb'] = '0'
if not 'tmdb' in i: i['tmdb'] = '0'
if not 'tvdb' in i: i['tvdb'] = '0'
if not 'tvrage' in i: i['tvrage'] = '0'
if not 'poster' in i: i['poster'] = '0'
if not 'banner' in i: i['banner'] = '0'
if not 'fanart' in i: i['fanart'] = '0'
self.worker()
self.list = sorted(self.list, key=lambda k: k['title'])
self.tvshowDirectory(self.list)
except:
return
def search(self, query=None):
try:
if query == None:
t = control.lang(30231).encode('utf-8')
k = control.keyboard('', t) ; k.doModal()
self.query = k.getText() if k.isConfirmed() else None
else:
self.query = query
if (self.query == None or self.query == ''): return
url = self.search_link % urllib.quote_plus(self.query)
self.list = cache.get(self.trakt_list, 0, url)
self.worker()
self.tvshowDirectory(self.list)
return self.list
except:
return
def person(self, query=None):
try:
if query == None:
t = control.lang(30231).encode('utf-8')
k = control.keyboard('', t) ; k.doModal()
self.query = k.getText() if k.isConfirmed() else None
else:
self.query = query
if (self.query == None or self.query == ''): return
url = self.persons_link % urllib.quote_plus(self.query)
self.list = cache.get(self.tmdb_person_list, 0, url)
for i in range(0, len(self.list)): self.list[i].update({'action': 'tvshows'})
self.addDirectory(self.list)
return self.list
except:
return
def genres(self):
try:
for i in self.genres_tab:
self.list.append({'name': i[0], 'url': self.genre_link % i[1], 'image': 'tvGenres.png', 'action': 'tvshows'})
self.addDirectory(self.list)
return self.list
except:
return
def networks(self):
networks = [
('ABC', '2'), ('CBS', '16'), ('NBC', '6|582'), ('FOX', '19|303'), ('CW', '71|194'), ('A&E', '129|567|891'),
('ABC Family', '75'), ('AMC', '174'), ('Animal Planet', '91'), ('Bravo', '74|312|485'),
('Cartoon Network', '56|217|262'), ('Cinemax', '359'), ('Comedy Central', '47|278'),
('Disney Channel', '54|515|539|730'), ('Disney XD', '44'), ('Discovery Channel', '64|106|755'),
('E! Entertainment', '76|407|645'), ('FX', '88'), ('Hallmark', '384'), ('HBO', '49'), ('HGTV', '210|482'),
('History Channel', '65|238|893'), ('Discovery ID', '244'), ('Lifetime', '34|892'), ('MTV', '33|335|488'),
('National Geographic', '43|799'), ('Nickelodeon', '13|35|234|259|416'), ('Showtime', '67|643'),
('Spike', '55'), ('Starz', '318'), ('Syfy', '77|586'), ('TBS', '68'), ('TLC', '84'), ('TNT', '41|613|939'),
('Travel Channel', '209'), ('TV Land', '397'), ('USA', '30'), ('VH1', '158')]
for i in networks: self.list.append({'name': i[0], 'url': self.network_link % ('%s', i[1]), 'image': 'tvshows.jpg', 'action': 'tvshows'})
self.addDirectory(self.list)
return self.list
def years(self):
year = (self.datetime.strftime('%Y'))
for i in range(int(year)-0, int(year)-50, -1): self.list.append({'name': str(i), 'url': self.year_link % (str(i)), 'image': 'tvshows.jpg', 'action': 'tvshows'})
self.addDirectory(self.list)
return self.list
def userlists(self):
try:
userlists = []
if trakt.getTraktCredentials() == False: raise Exception()
userlists += cache.get(self.trakt_user_list, 0, self.traktlists_link)
except:
pass
try:
self.list = []
if self.imdb_user == '': raise Exception()
userlists += cache.get(self.imdb_user_list, 0, self.imdblists_link)
except:
pass
self.list = userlists
for i in range(0, len(self.list)): self.list[i].update({'image': 'tvUserlists.jpg', 'action': 'tvshows'})
self.addDirectory(self.list)
return self.list
def imdb_list2(self, url, idx=True):
#control.log("><><><><> imdb_list2 ******************** %s" % idx)
#http://ia.media-imdb.com/images/M/MV5BMTg3OTc0NzkyOV5BMl5BanBnXkFtZTgwMDMwMTM3MjE@._V1_SX640_SY720_.jpg
try:
if url == self.imdbwatchlist_link:
def imdb_watchlist_id(url):
return re.compile('/export[?]list_id=(ls\d*)').findall(client.request(url))[0]
url = cache.get(imdb_watchlist_id, 8640, url)
url = self.imdblist_link % url
headers = {'Accept-Language': 'en-US'}
result = str(client.request(url,headers=headers))
try:
if idx == True: raise Exception()
pages = client.parseDOM(result, 'div', attrs = {'class': 'desc'})[0]
pages = re.compile('Page \d+? of (\d*)').findall(pages)[0]
for i in range(1, int(pages)):
u = url.replace('&start=1', '&start=%s' % str(i*100+1))
result += str(client.request(u))
except:
pass
result = result.replace('\n','')
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'tr', attrs = {'class': '.+?'})
items += client.parseDOM(result, 'div', attrs = {'class': 'list_item.+?'})
except:
return
try:
next = client.parseDOM(result, 'span', attrs = {'class': 'pagination'})
next += client.parseDOM(result, 'div', attrs = {'class': 'pagination'})
name = client.parseDOM(next[-1], 'a')[-1]
if 'laquo' in name: raise Exception()
next = client.parseDOM(next, 'a', ret='href')[-1]
next = url.replace(urlparse.urlparse(url).query, urlparse.urlparse(next).query)
next = client.replaceHTMLCodes(next)
next = next.encode('utf-8')
except:
next = ''
for item in items:
try:
try: title = client.parseDOM(item, 'a')[1]
except: pass
try: title = client.parseDOM(item, 'a', attrs = {'onclick': '.+?'})[-1]
except: pass
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = client.parseDOM(item, 'span', attrs = {'class': 'year_type'})[0]
year = re.compile('(\d{4})').findall(year)[-1]
year = year.encode('utf-8')
if int(year) > int((self.datetime).strftime('%Y')): raise Exception()
name = '%s (%s)' % (title, year)
try: name = name.encode('utf-8')
except: pass
imdb = client.parseDOM(item, 'a', ret='href')[0]
imdb = 'tt' + re.sub('[^0-9]', '', imdb.rsplit('tt', 1)[-1])
imdb = imdb.encode('utf-8')
poster = '0'
try: poster = client.parseDOM(item, 'img', ret='src')[0]
except: pass
try: poster = client.parseDOM(item, 'img', ret='loadlate')[0]
except: pass
if not ('_SX' in poster or '_SY' in poster): poster = '0'
poster = re.sub('_SX\d*|_SY\d*|_CR\d+?,\d+?,\d+?,\d*','_SX500', poster)
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
genre = client.parseDOM(item, 'span', attrs = {'class': 'genre'})
genre = client.parseDOM(genre, 'a')
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
try: duration = re.compile('(\d+?) mins').findall(item)[-1]
except: duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
try: rating = client.parseDOM(item, 'span', attrs = {'class': 'rating-rating'})[0]
except: rating = '0'
try: rating = client.parseDOM(rating, 'span', attrs = {'class': 'value'})[0]
except: rating = '0'
if rating == '' or rating == '-': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: votes = client.parseDOM(item, 'div', ret='title', attrs = {'class': 'rating rating-list'})[0]
except: votes = '0'
try: votes = re.compile('[(](.+?) votes[)]').findall(votes)[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
try: mpaa = client.parseDOM(item, 'span', attrs = {'class': 'certificate'})[0]
except: mpaa = '0'
try: mpaa = client.parseDOM(mpaa, 'span', ret='title')[0]
except: mpaa = '0'
if mpaa == '' or mpaa == 'NOT_RATED': mpaa = '0'
mpaa = mpaa.replace('_', '-')
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
director = client.parseDOM(item, 'span', attrs = {'class': 'credit'})
director += client.parseDOM(item, 'div', attrs = {'class': 'secondary'})
try: director = [i for i in director if 'Director:' in i or 'Dir:' in i][0]
except: director = '0'
director = director.split('With:', 1)[0].strip()
director = client.parseDOM(director, 'a')
director = ' / '.join(director)
if director == '': director = '0'
director = client.replaceHTMLCodes(director)
director = director.encode('utf-8')
cast = client.parseDOM(item, 'span', attrs = {'class': 'credit'})
cast += client.parseDOM(item, 'div', attrs = {'class': 'secondary'})
try: cast = [i for i in cast if 'With:' in i or 'Stars:' in i][0]
except: cast = '0'
cast = cast.split('With:', 1)[-1].strip()
cast = client.replaceHTMLCodes(cast)
cast = cast.encode('utf-8')
cast = client.parseDOM(cast, 'a')
if cast == []: cast = '0'
plot = '0'
try: plot = client.parseDOM(item, 'span', attrs = {'class': 'outline'})[0]
except: pass
try: plot = client.parseDOM(item, 'div', attrs = {'class': 'item_description'})[0]
except: pass
plot = plot.rsplit('<span>', 1)[0].strip()
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
tagline = re.compile('[.!?][\s]{1,2}(?=[A-Z])').split(plot)[0]
try: tagline = tagline.encode('utf-8')
except: pass
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': '0', 'studio': '0', 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'director': director, 'writer': '0', 'cast': cast, 'plot': plot, 'tagline': tagline, 'name': name, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': '0', 'fanart': '0', 'next': next})
#self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': premiered, 'studio': '0', 'genre': '0', 'duration': '0', 'rating': rating, 'votes': votes, 'mpaa': '0', 'cast': '0', 'plot': plot, 'name': title, 'code': '0', 'imdb': '0', 'tmdb': tmdb, 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': '0', 'fanart': fanart, 'next': next})
except:
pass
return self.list
def trakt_list(self, url):
try:
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
q.update({'extended': 'full,images'})
q = (urllib.urlencode(q)).replace('%2C', ',')
u = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
result = trakt.getTrakt(u)
result = json.loads(result)
items = []
for i in result:
try: items.append(i['show'])
except: pass
if len(items) == 0:
items = result
except:
return
try:
q = dict(urlparse.parse_qsl(urlparse.urlsplit(url).query))
p = str(int(q['page']) + 1)
if p == '5': raise Exception()
q.update({'page': p})
q = (urllib.urlencode(q)).replace('%2C', ',')
next = url.replace('?' + urlparse.urlparse(url).query, '') + '?' + q
next = next.encode('utf-8')
except:
next = ''
for item in items:
try:
title = item['title']
title = re.sub('\s(|[(])(UK|US|AU|\d{4})(|[)])$', '', title)
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = item['year']
year = re.sub('[^0-9]', '', str(year))
year = year.encode('utf-8')
if int(year) > int((self.datetime).strftime('%Y')): raise Exception()
tmdb = item['ids']['tmdb']
if tmdb == None or tmdb == '': tmdb = '0'
tmdb = re.sub('[^0-9]', '', str(tmdb))
tmdb = tmdb.encode('utf-8')
tvrage = item['ids']['tvrage']
if tvrage == None or tvrage == '': tvrage = '0'
tvrage = re.sub('[^0-9]', '', str(tvrage))
tvrage = tvrage.encode('utf-8')
imdb = item['ids']['imdb']
if imdb == None or imdb == '': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
tvdb = item['ids']['tvdb']
if tvdb == None or tvdb == '': raise Exception()
tvdb = re.sub('[^0-9]', '', str(tvdb))
tvdb = tvdb.encode('utf-8')
poster = '0'
try: poster = item['images']['poster']['medium']
except: pass
if poster == None or not '/posters/' in poster: poster = '0'
poster = poster.rsplit('?', 1)[0]
poster = poster.encode('utf-8')
banner = poster
try: banner = item['images']['banner']['full']
except: pass
if banner == None or not '/banners/' in banner: banner = '0'
banner = banner.rsplit('?', 1)[0]
banner = banner.encode('utf-8')
fanart = '0'
try: fanart = item['images']['fanart']['full']
except: pass
if fanart == None or not '/fanarts/' in fanart: fanart = '0'
fanart = fanart.rsplit('?', 1)[0]
fanart = fanart.encode('utf-8')
try: premiered = item['first_aired']
except: premiered = '0'
try: premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = '0'
premiered = premiered.encode('utf-8')
try: studio = item['network']
except: studio = '0'
if studio == None: studio = '0'
studio = studio.encode('utf-8')
try: genre = item['genres']
except: genre = '0'
genre = [i.title() for i in genre]
if genre == []: genre = '0'
genre = ' / '.join(genre)
genre = genre.encode('utf-8')
try: duration = str(item['runtime'])
except: duration = '0'
if duration == None: duration = '0'
duration = duration.encode('utf-8')
try: rating = str(item['rating'])
except: rating = '0'
if rating == None or rating == '0.0': rating = '0'
rating = rating.encode('utf-8')
try: votes = str(item['votes'])
except: votes = '0'
try: votes = str(format(int(votes),',d'))
except: pass
if votes == None: votes = '0'
votes = votes.encode('utf-8')
try: mpaa = item['certification']
except: mpaa = '0'
if mpaa == None: mpaa = '0'
mpaa = mpaa.encode('utf-8')
try: plot = item['overview']
except: plot = '0'
if plot == None: plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': premiered, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'cast': '0', 'plot': plot, 'name': title, 'code': imdb, 'imdb': imdb, 'tmdb': tmdb, 'tvrage': tvrage, 'tvdb': tvdb, 'poster': poster, 'banner': banner, 'fanart': fanart, 'next': next})
except:
pass
return self.list
def trakt_user_list(self, url):
try:
result = trakt.getTrakt(url)
items = json.loads(result)
except:
pass
for item in items:
try:
name = item['name']
name = client.replaceHTMLCodes(name)
name = name.encode('utf-8')
url = self.traktlist_link % item['ids']['slug']
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
return self.list
def imdb_list(self, url, idx=True):
try:
if url == self.imdbwatchlist_link:
def imdb_watchlist_id(url):
return re.compile('/export[?]list_id=(ls\d*)').findall(client.request(url))[0]
url = cache.get(imdb_watchlist_id, 8640, url)
url = self.imdblist_link % url
result = str(client.request(url))
try:
if idx == True: raise Exception()
pages = client.parseDOM(result, 'div', attrs = {'class': 'desc'})[0]
pages = re.compile('Page \d+? of (\d*)').findall(pages)[0]
for i in range(1, int(pages)):
u = url.replace('&start=1', '&start=%s' % str(i*100+1))
result += str(client.request(u))
except:
pass
result = result.replace('\n','')
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'div', attrs = {'class': 'list_item.+?'})
except:
return
try:
next = client.parseDOM(result, 'div', attrs = {'class': 'pagination'})[-1]
name = client.parseDOM(next, 'a')[-1]
if 'laquo' in name: raise Exception()
next = client.parseDOM(next, 'a', ret='href')[-1]
next = '%s%s' % (url.split('?', 1)[0], next)
next = client.replaceHTMLCodes(next)
next = next.encode('utf-8')
except:
next = ''
for item in items:
try:
title = client.parseDOM(item, 'a', attrs = {'onclick': '.+?'})[-1]
title = client.replaceHTMLCodes(title)
title = title.encode('utf-8')
year = client.parseDOM(item, 'span', attrs = {'class': 'year_type'})[0]
year = re.compile('(\d{4})').findall(year)[-1]
year = year.encode('utf-8')
if int(year) > int((self.datetime).strftime('%Y')): raise Exception()
imdb = client.parseDOM(item, 'a', ret='href')[0]
imdb = 'tt' + re.sub('[^0-9]', '', imdb.rsplit('tt', 1)[-1])
imdb = imdb.encode('utf-8')
poster = '0'
try: poster = client.parseDOM(item, 'img', ret='src')[0]
except: pass
try: poster = client.parseDOM(item, 'img', ret='loadlate')[0]
except: pass
if not ('_SX' in poster or '_SY' in poster): poster = '0'
poster = re.sub('_SX\d*|_SY\d*|_CR\d+?,\d+?,\d+?,\d*','_SX500', poster)
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
try: rating = client.parseDOM(item, 'span', attrs = {'class': 'rating-rating'})[0]
except: rating = '0'
try: rating = client.parseDOM(item, 'span', attrs = {'class': 'value'})[0]
except: rating = '0'
if rating == '' or rating == '-': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
try: plot = client.parseDOM(item, 'div', attrs = {'class': 'item_description'})[0]
except: plot = '0'
plot = plot.rsplit('<span>', 1)[0].strip()
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
self.list.append({'title': title, 'originaltitle': title, 'year': year, 'premiered': '0', 'studio': '0', 'genre': '0', 'duration': '0', 'rating': rating, 'votes': '0', 'mpaa': '0', 'cast': '0', 'plot': plot, 'name': title, 'code': imdb, 'imdb': imdb, 'tmdb': '0', 'tvdb': '0', 'tvrage': '0', 'poster': poster, 'banner': '0', 'fanart': '0'})
except:
pass
return self.list
def imdb_user_list(self, url):
try:
result = client.request(url)
result = result.decode('iso-8859-1').encode('utf-8')
items = client.parseDOM(result, 'div', attrs = {'class': 'list_name'})
except:
pass
for item in items:
try:
name = client.parseDOM(item, 'a')[0]
name = client.replaceHTMLCodes(name)
name = name.encode('utf-8')
url = client.parseDOM(item, 'a', ret='href')[0]
url = url.split('/list/', 1)[-1].replace('/', '')
url = self.imdblist_link % url
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
self.list.append({'name': name, 'url': url, 'context': url})
except:
pass
return self.list
def worker(self):
self.meta = []
total = len(self.list)
for i in range(0, total): self.list[i].update({'metacache': False})
self.list = metacache.fetch(self.list, self.info_lang)
for r in range(0, total, 25):
threads = []
for i in range(r, r+25):
if i <= total: threads.append(workers.Thread(self.super_info, i))
[i.start() for i in threads]
[i.join() for i in threads]
#self.list = [i for i in self.list if not i['tvdb'] == '0']
if len(self.meta) > 0: metacache.insert(self.meta)
def super_info(self, i):
#control.log("##################><><><><> super_info START %s" % i)
try:
if self.list[i]['metacache'] == True: raise Exception()
try: imdb = self.list[i]['imdb']
except: imdb = '0'
try: tmdb = self.list[i]['tmdb']
except: tmdb = '0'
try: tvdb = self.list[i]['tvdb']
except: tvdb = '0'
try: tvrage = self.list[i]['tvrage']
except: tvrage = '0'
"""
if not tmdb == '0':
tmdb = re.sub('[^0-9]', '', str(tmdb))
tmdb = tmdb.encode('utf-8')
url = self.tmdb_info_link % tmdb
item = client.request(url, timeout='10')
if item == None: raise Exception()
item = json.loads(item)
if tvdb == '0':
tvdb = item['external_ids']['tvdb_id']
if tvdb == '' or tvdb == None: tvdb = '0'
tvdb = re.sub('[^0-9]', '', str(tvdb))
tvdb = tvdb.encode('utf-8')
self.list[i].update({'tvdb': tvdb})
if tvrage == '0':
tvrage = item['external_ids']['tvrage_id']
if tvrage == '' or tvrage == None: tvrage = '0'
tvrage = re.sub('[^0-9]', '', str(tvrage))
tvrage = tvrage.encode('utf-8')
self.list[i].update({'tvrage': tvrage})
if imdb == '0':
imdb = item['external_ids']['imdb_id']
if imdb == '' or imdb == None: imdb = '0'
if not imdb == '0': imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
self.list[i].update({'imdb': imdb})
elif not imdb == '0':
url = self.tmdb_by_imdb % imdb
result = client.request(url, timeout='10')
result = json.loads(result)
tmdb = result['tv_results'][0]['id']
if tmdb == '' or tmdb == None: tmdb = '0'
tmdb = re.sub('[^0-9]', '', str(tmdb))
tmdb = tmdb.encode('utf-8')
self.list[i].update({'tmdb': tmdb})
if not tmdb == '0':
url = self.tmdb_info_link % tmdb
item = client.request(url, timeout='10')
if item == None: raise Exception()
item = json.loads(item)
tvdb = item['external_ids']['tvdb_id']
if tvdb == '' or tvdb == None: tvdb = '0'
tvdb = re.sub('[^0-9]', '', str(tvdb))
tvdb = tvdb.encode('utf-8')
self.list[i].update({'tvdb': tvdb})
tvrage = item['external_ids']['tvrage_id']
if tvrage == '' or tvrage == None: tvrage = '0'
tvrage = re.sub('[^0-9]', '', str(tvrage))
tvrage = tvrage.encode('utf-8')
self.list[i].update({'tvrage': tvrage})
"""
if tvdb == '0' and not imdb == '0':
url = self.tvdb_by_imdb % imdb
result = client.request(url, timeout='10')
try: tvdb = client.parseDOM(result, 'seriesid')[0]
except: tvdb = '0'
try: name = client.parseDOM(result, 'SeriesName')[0]
except: name = '0'
dupe = re.compile('[***]Duplicate (\d*)[***]').findall(name)
if len(dupe) > 0: tvdb = str(dupe[0])
if tvdb == '': tvdb = '0'
self.list[i].update({'tvdb': tvdb})
if not tvdb == '0':
url = self.tvdb_info_link % tvdb
item2 = client.request(url, timeout='10')
if imdb == '0':
try: imdb = client.parseDOM(item2, 'IMDB_ID')[0]
except: pass
if imdb == '': imdb = '0'
imdb = imdb.encode('utf-8')
self.list[i].update({'imdb': imdb})
if imdb == '0':
url = self.imdb_by_query % (urllib.quote_plus(self.list[i]['title']), self.list[i]['year'])
item3 = client.request(url, timeout='10')
item3 = json.loads(item3)
imdb = item3['imdbID']
if imdb == None or imdb == '' or imdb == 'N/A': imdb = '0'
else: imdb = 'tt' + re.sub('[^0-9]', '', str(imdb))
imdb = imdb.encode('utf-8')
self.list[i].update({'imdb': imdb})
try: poster = item['poster_path']
except: poster = ''
if poster == '' or poster == None: poster = '0'
if not poster == '0': poster = '%s%s' % (self.tmdb_poster, poster)
if poster == '0':
try: poster = client.parseDOM(item2, 'poster')[0]
except: poster = '0'
if not poster == '0': poster = self.tvdb_image + poster
poster = client.replaceHTMLCodes(poster)
poster = poster.encode('utf-8')
if not poster == '0': self.list[i].update({'poster': poster})
try: banner = client.parseDOM(item2, 'banner')[0]
except: banner = ''
if not banner == '': banner = self.tvdb_image + banner
else: banner = '0'
banner = client.replaceHTMLCodes(banner)
banner = banner.encode('utf-8')
if not banner == '0': self.list[i].update({'banner': banner})
try: fanart = item['backdrop_path']
except: fanart = ''
if fanart == '' or fanart == None: fanart = '0'
if not fanart == '0': fanart = '%s%s' % (self.tmdb_image, fanart)
if fanart == '0':
try: fanart = client.parseDOM(item2, 'fanart')[0]
except: fanart = '0'
if not fanart == '0': fanart = self.tvdb_image + fanart
fanart = client.replaceHTMLCodes(fanart)
fanart = fanart.encode('utf-8')
if not fanart == '0' and self.list[i]['fanart'] == '0': self.list[i].update({'fanart': fanart})
try: premiered = item['first_air_date']
except: premiered = ''
try: premiered = re.compile('(\d{4}-\d{2}-\d{2})').findall(premiered)[0]
except: premiered = ''
if premiered == '' or premiered == None:
try: premiered = client.parseDOM(item2, 'FirstAired')[0]
except: premiered = '0'
if premiered == '': premiered = '0'
premiered = client.replaceHTMLCodes(premiered)
premiered = premiered.encode('utf-8')
if not premiered == '0': self.list[i].update({'premiered': premiered})
try: studio = item['networks'][0]['name']
except: studio = ''
if studio == '' or studio == None:
try: studio = client.parseDOM(item2, 'Network')[0]
except: studio = ''
if studio == '': studio = '0'
studio = client.replaceHTMLCodes(studio)
studio = studio.encode('utf-8')
if not studio == '0': self.list[i].update({'studio': studio})
try: genre = item['genres']
except: genre = []
try: genre = [x['name'] for x in genre]
except: genre = []
if genre == '' or genre == None or genre == []:
try: genre = client.parseDOM(item2, 'Genre')[0]
except: genre = ''
genre = [x for x in genre.split('|') if not x == '']
genre = ' / '.join(genre)
if genre == '': genre = '0'
genre = client.replaceHTMLCodes(genre)
genre = genre.encode('utf-8')
if not genre == '0': self.list[i].update({'genre': genre})
try: duration = str(item['episode_run_time'][0])
except: duration = ''
if duration == '' or duration == None:
try: duration = client.parseDOM(item2, 'Runtime')[0]
except: duration = ''
if duration == '': duration = '0'
duration = client.replaceHTMLCodes(duration)
duration = duration.encode('utf-8')
if not duration == '0': self.list[i].update({'duration': duration})
try: rating = str(item['vote_average'])
except: rating = ''
if rating == '' or rating == None:
try: rating = client.parseDOM(item2, 'Rating')[0]
except: rating = ''
if rating == '': rating = '0'
rating = client.replaceHTMLCodes(rating)
rating = rating.encode('utf-8')
if not rating == '0': self.list[i].update({'rating': rating})
try: votes = str(item['vote_count'])
except: votes = ''
try: votes = str(format(int(votes),',d'))
except: pass
if votes == '' or votes == None:
try: votes = client.parseDOM(item2, 'RatingCount')[0]
except: votes = '0'
if votes == '': votes = '0'
votes = client.replaceHTMLCodes(votes)
votes = votes.encode('utf-8')
if not votes == '0': self.list[i].update({'votes': votes})
try: mpaa = item['content_ratings']['results'][-1]['rating']
except: mpaa = ''
if mpaa == '' or mpaa == None:
try: mpaa = client.parseDOM(item2, 'ContentRating')[0]
except: mpaa = ''
if mpaa == '': mpaa = '0'
mpaa = client.replaceHTMLCodes(mpaa)
mpaa = mpaa.encode('utf-8')
if not mpaa == '0': self.list[i].update({'mpaa': mpaa})
try: cast = item['credits']['cast']
except: cast = []
try: cast = [(x['name'].encode('utf-8'), x['character'].encode('utf-8')) for x in cast]
except: cast = []
if cast == []:
try: cast = client.parseDOM(item2, 'Actors')[0]
except: cast = ''
cast = [x for x in cast.split('|') if not x == '']
try: cast = [(x.encode('utf-8'), '') for x in cast]
except: cast = []
if len(cast) > 0: self.list[i].update({'cast': cast})
try: plot = item['overview']
except: plot = ''
if plot == '' or plot == None:
try: plot = client.parseDOM(item2, 'Overview')[0]
except: plot = ''
if plot == '': plot = '0'
plot = client.replaceHTMLCodes(plot)
plot = plot.encode('utf-8')
if not plot == '0': self.list[i].update({'plot': plot})
self.meta.append({'imdb': imdb, 'tmdb': tmdb, 'tvdb': tvdb, 'lang': self.info_lang, 'item': {'code': imdb, 'imdb': imdb, 'tmdb': tmdb, 'tvdb': tvdb, 'tvrage': tvrage, 'poster': poster, 'banner': banner, 'fanart': fanart, 'premiered': premiered, 'studio': studio, 'genre': genre, 'duration': duration, 'rating': rating, 'votes': votes, 'mpaa': mpaa, 'cast': cast, 'plot': plot}})
except:
pass
def tvshowDirectory(self, items):
#control.log("******************** tvshowDirectory %s" % items)
if items == None or len(items) == 0: return
isFolder = True if control.setting('autoplay') == 'false' and control.setting('host_select') == '1' else False
isFolder = False if control.window.getProperty('PseudoTVRunning') == 'True' else isFolder
traktMode = False if trakt.getTraktCredentials() == False else True
addonPoster, addonBanner = control.addonPoster(), control.addonBanner()
addonFanart, settingFanart = control.addonFanart(), control.setting('fanart')
sysaddon = sys.argv[0]
try:
favitems = favourites.getFavourites('tvshows')
favitems = [i[0] for i in favitems]
except:
pass
try:
if traktMode == False: raise Exception()
indicators = trakt.syncTVShows(timeout=720)
indicators = json.loads(indicators)
except:
pass
for i in items:
try:
label = i['name']
systitle = sysname = urllib.quote_plus(i['title'])
sysimage = urllib.quote_plus(i['poster'])
imdb, tmdb, tvdb, tvrage, year = i['imdb'], i['tmdb'], i['tvdb'], i['tvrage'], i['year']
poster, banner, fanart = i['poster'], i['banner'], i['fanart']
if poster == '0': poster = addonPoster
if banner == '0' and poster == '0': banner = addonBanner
elif banner == '0': banner = poster
meta = dict((k,v) for k, v in i.iteritems() if not v == '0')
meta.update({'trailer': '%s?action=trailer&name=%s' % (sysaddon, sysname)})
if i['duration'] == '0': meta.update({'duration': '60'})
try: meta.update({'duration': str(int(meta['duration']) * 60)})
except: pass
sysmeta = urllib.quote_plus(json.dumps(meta))
url = '%s?action=seasons&tvshowtitle=%s&year=%s&imdb=%s&tmdb=%s&tvdb=%s&tvrage=%s' % (sysaddon, systitle, year, imdb, tmdb, tvdb, tvrage)
cm = []
if isFolder == False:
cm.append((control.lang(30232).encode('utf-8'), 'RunPlugin(%s?action=queueItem)' % sysaddon))
cm.append((control.lang(30233).encode('utf-8'), 'Action(Info)'))
if not action == 'tvSearch':
cm.append((control.lang(30234).encode('utf-8'), 'RunPlugin(%s?action=tvPlaycount&name=%s&year=%s&imdb=%s&tvdb=%s&query=7)' % (sysaddon, systitle, year, imdb, tvdb)))
cm.append((control.lang(30235).encode('utf-8'), 'RunPlugin(%s?action=tvPlaycount&name=%s&year=%s&imdb=%s&tvdb=%s&query=6)' % (sysaddon, systitle, year, imdb, tvdb)))
if action == 'tvFavourites':
cm.append((control.lang(30238).encode('utf-8'), 'RunPlugin(%s?action=deleteFavourite&meta=%s&content=tvshows)' % (sysaddon, sysmeta)))
elif action.startswith('tvSearch'):
cm.append((control.lang(30237).encode('utf-8'), 'RunPlugin(%s?action=addFavourite&meta=%s&query=0&content=tvshows)' % (sysaddon, sysmeta)))
else:
if not imdb in favitems and not tvdb in favitems: cm.append((control.lang(30237).encode('utf-8'), 'RunPlugin(%s?action=addFavourite&meta=%s&content=tvshows)' % (sysaddon, sysmeta)))
else: cm.append((control.lang(30238).encode('utf-8'), 'RunPlugin(%s?action=deleteFavourite&meta=%s&content=tvshows)' % (sysaddon, sysmeta)))
cm.append((control.lang(30239).encode('utf-8'), 'RunPlugin(%s?action=tvshowToLibrary&tvshowtitle=%s&year=%s&imdb=%s&tmdb=%s&tvdb=%s&tvrage=%s)' % (sysaddon, systitle, year, imdb, tmdb, tvdb, tvrage)))
cm.append((control.lang(30240).encode('utf-8'), 'RunPlugin(%s?action=addView&content=tvshows)' % sysaddon))
item = control.item(label=label, iconImage=poster, thumbnailImage=poster)
try: item.setArt({'poster': poster, 'tvshow.poster': poster, 'season.poster': poster, 'banner': banner, 'tvshow.banner': banner, 'season.banner': banner})
except: pass
if settingFanart == 'true' and not fanart == '0':
item.setProperty('Fanart_Image', fanart)
elif not addonFanart == None:
item.setProperty('Fanart_Image', addonFanart)
item.setInfo(type='Video', infoLabels = meta)
item.setProperty('Video', 'true')
item.addContextMenuItems(cm, replaceItems=True)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=True)
except:
pass
try:
url = items[0]['next']
if url == '': raise Exception()
url = '%s?action=tvshows&url=%s' % (sysaddon, urllib.quote_plus(url))
addonNext = control.addonNext()
item = control.item(label=control.lang(30241).encode('utf-8'), iconImage=addonNext, thumbnailImage=addonNext)
item.addContextMenuItems([], replaceItems=False)
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=True)
except:
pass
control.content(int(sys.argv[1]), 'tvshows')
control.directory(int(sys.argv[1]), cacheToDisc=True)
views.setView('tvshows', {'skin.confluence': 500})
def addDirectory(self, items):
if items == None or len(items) == 0: return
sysaddon = sys.argv[0]
addonFanart = control.addonFanart()
addonThumb = control.addonThumb()
artPath = control.artPath()
for i in items:
try:
try: name = control.lang(i['name']).encode('utf-8')
except: name = i['name']
if i['image'].startswith('http://'): thumb = i['image']
elif not artPath == None: thumb = os.path.join(artPath, i['image'])
else: thumb = addonThumb
url = '%s?action=%s' % (sysaddon, i['action'])
try: url += '&url=%s' % urllib.quote_plus(i['url'])
except: pass
cm = []
try: cm.append((control.lang(30239).encode('utf-8'), 'RunPlugin(%s?action=tvshowsToLibrary&url=%s)' % (sysaddon, urllib.quote_plus(i['context']))))
except: pass
item = control.item(label=name, iconImage=thumb, thumbnailImage=thumb)
item.addContextMenuItems(cm, replaceItems=False)
if not addonFanart == None: item.setProperty('Fanart_Image', addonFanart)
control.addItem(handle=int(sys.argv[1]), url=url, listitem=item, isFolder=True)
except:
pass
control.directory(int(sys.argv[1]), cacheToDisc=True)
| mrknow/filmkodi | plugin.video.mrknowtv/resources/lib/indexers/tvshows.py | Python | apache-2.0 | 51,870 |
#codeing=utf-8
def a(m,n):
n.append(m)
print(n)
if __name__ == "__main__":
a([9,8,7],[])
a([4,4,4],[])
| bosichong/17python.com | deftest/deftest.py | Python | apache-2.0 | 121 |
import sys
import matplotlib.pyplot as plt
import numpy as np
a = np.fromfile(sys.stdin, dtype='>u2')
for ch in xrange(4):
plt.plot(a[ch::4], alpha=.7, label='ch%d'%ch)
plt.legend()
plt.show() | thotypous/AcqSys | src/emu/plotdac.py | Python | mit | 197 |
import json
from discord.ext import commands
from audio import AudioPlayer
def load_credentials():
with open('creds.json') as f:
return json.load(f)
cmd_extensions = [
'cogs.ping',
'cogs.foam',
'cogs.bizkit',
'cogs.response'
]
description = "The ultimate annoy bot"
cmd_prefix = '--'
bot = commands.Bot(description=description, command_prefix=cmd_prefix)
bot.add_cog(AudioPlayer(bot)) # required for all audio-based cogs
for extension in cmd_extensions:
bot.load_extension(extension)
creds = load_credentials()
bot.run(creds['token'])
| Flamacue/foambot | foambot/bot.py | Python | apache-2.0 | 573 |
#!/usr/bin/python
# -*-coding:utf8-*-
"""
Decompression all the zip and rar files in the specific directory.
"""
import os
import zipfile
import traceback
import argparse
from pprint import pprint
def find_path_file(specific_file, search_directory):
"""
result_path_filename
"""
result_path_filename = list()
result_path_filename.extend(
[os.path.join(dirpath, filename) for dirpath, dirnames, filenames in os.walk(search_directory) for filename in
filenames if os.path.splitext(filename)[1] == ('.' + specific_file)])
return result_path_filename
def Decompression_rar(specific_file):
"""
Decompression_rar
if you want use this function,you need install unrar,for ubuntu:
sudo apt-get install unrar
another decomperssion method is to use:rarfile,for help you can visit:
http://www.pythonclub.org/python-files/rar
"""
cmd = 'unrar x "' + specific_file + '"' + ' "' + os.path.split(specific_file)[0] + '"'
os.system(cmd)
def Decompression_zip(specific_file):
"""
Decompression_zip
"""
if zipfile.is_zipfile(specific_file):
try:
zipfile.ZipFile(specific_file).extractall(os.path.split(specific_file)[0])
except Exception as err:
traceback.print_exc()
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--delsource", action='store_true', help="delete the source file(rar or zip)")
args = parser.parse_args()
path = os.path.abspath(os.path.dirname(__file__)) + "/../media/book_files"
_rar = find_path_file('rar', path)
for i in _rar:
Decompression_rar(i)
_zip = find_path_file('zip', path)
for i in _zip:
Decompression_zip(i)
if args.delsource:
_delete_rar = find_path_file('rar', path)
_delete_zip = find_path_file('zip', path)
for i in _delete_rar:
os.remove(i)
for i in _delete_zip:
os.remove(i)
| openslack/openslack-crawler | scripts/decomperssion.py | Python | apache-2.0 | 2,025 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
@author: RaNaN
"""
from imp import find_module
from os.path import join, exists
from urllib import quote
ENGINE = ""
DEBUG = False
JS = False
PYV8 = False
NODE = False
RHINO = False
JS2PY = False
if not ENGINE:
try:
import js2py
out = js2py.eval_js("(23+19).toString()")
#integrity check
if out.strip() == "42":
ENGINE = "js2py"
JS2PY = True
except:
pass
if not ENGINE or DEBUG:
try:
import subprocess
subprocess.Popen(["js", "-v"], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
p = subprocess.Popen(["js", "-e", "print(23+19)"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
#integrity check
if out.strip() == "42":
ENGINE = "js"
JS = True
except:
pass
if not ENGINE or DEBUG:
try:
find_module("PyV8")
ENGINE = "pyv8"
PYV8 = True
except:
pass
if not ENGINE or DEBUG:
try:
import subprocess
subprocess.Popen(["node", "-v"], bufsize=-1, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
p = subprocess.Popen(["node", "-e", "console.log(23+19)"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
#integrity check
if out.strip() == "42":
ENGINE = "node"
NODE = True
except:
pass
if not ENGINE or DEBUG:
try:
path = "" #path where to find rhino
if exists("/usr/share/java/js.jar"):
path = "/usr/share/java/js.jar"
elif exists("js.jar"):
path = "js.jar"
elif exists(join(pypath, "js.jar")): #may raises an exception, but js.jar wasnt found anyway
path = join(pypath, "js.jar")
if not path:
raise Exception
import subprocess
p = subprocess.Popen(["java", "-cp", path, "org.mozilla.javascript.tools.shell.Main", "-e", "print(23+19)"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
#integrity check
if out.strip() == "42":
ENGINE = "rhino"
RHINO = True
except:
pass
class JsEngine():
def __init__(self):
self.engine = ENGINE
self.init = False
def __nonzero__(self):
return False if not ENGINE else True
def eval(self, script):
if not self.init:
if ENGINE == "pyv8" or (DEBUG and PYV8):
import PyV8
global PyV8
self.init = True
if type(script) == unicode:
script = script.encode("utf8")
if not ENGINE:
raise Exception("No JS Engine")
if not DEBUG:
if ENGINE == "pyv8":
return self.eval_pyv8(script)
elif ENGINE == "js2py":
return self.eval_js2py(script)
elif ENGINE == "js":
return self.eval_js(script)
elif ENGINE == "node":
return self.eval_node(script)
elif ENGINE == "rhino":
return self.eval_rhino(script)
else:
results = []
if PYV8:
res = self.eval_pyv8(script)
print "PyV8:", res
results.append(res)
if JS2PY:
res = self.eval_js2py(script)
print "js2py:", res
results.append(res)
if JS:
res = self.eval_js(script)
print "JS:", res
results.append(res)
if NODE:
res = self.eval_node(script)
print "NODE:", res
results.append(res)
if RHINO:
res = self.eval_rhino(script)
print "Rhino:", res
results.append(res)
warning = False
for x in results:
for y in results:
if x != y:
warning = True
if warning: print "### WARNING ###: Different results"
return results[0]
def eval_pyv8(self, script):
rt = PyV8.JSContext()
rt.enter()
return rt.eval(script)
def eval_js(self, script):
script = "print(eval(unescape('%s')))" % quote(script)
p = subprocess.Popen(["js", "-e", script], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1)
out, err = p.communicate()
res = out.strip()
return res
def eval_js2py(self, script):
script = "(eval(unescape('%s'))).toString()" % quote(script)
res = js2py.eval_js(script).strip()
return res
def eval_node(self, script):
script = "console.log(eval(unescape('%s')))" % quote(script)
p = subprocess.Popen(["node", "-e", script], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1)
out, err = p.communicate()
res = out.strip()
return res
def eval_rhino(self, script):
script = "print(eval(unescape('%s')))" % quote(script)
p = subprocess.Popen(["java", "-cp", path, "org.mozilla.javascript.tools.shell.Main", "-e", script],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1)
out, err = p.communicate()
res = out.strip()
return res.decode("utf8").encode("ISO-8859-1")
def error(self):
return _("No js engine detected, please install either js2py, Spidermonkey, ossp-js, pyv8, nodejs or rhino")
if __name__ == "__main__":
js = JsEngine()
test = u'"ü"+"ä"'
js.eval(test) | TheBraveWarrior/pyload | module/common/JsEngine.py | Python | gpl-3.0 | 6,373 |
from scipy.stats import mstats
from pylab import plot, title, xlabel, ylabel, show
from numpy import array, zeros, sqrt, shape
epsilon[]
MEAN_REVERTING_PERIOD = 60
epsilon = list[]
X_t = list[]
for i in range(0, MEAN_REVERTING_PERIOD-1):
X_t[i] = sum(epsilon[0:i])
X_t
# Gradient & intercept
# a = 0.000194054, b = 0.840379173
beta0 = 0.000194054
beta = 0.840379173
residual = mmm_data[1] -beta0- beta*spy_data[1]
n = 1964
for i in np.arange(0,n):
residual = mmm_data[i] -beta0- beta*spy_data[i]
print residual
a = 0.000194054 # Gradient
b = 0.840379173 # intercept
K = -log(b)*252
m = a/(1-b)
residual.lag= residual[2] - residual[1]
for j in np.arrange(1,n+1):
residual.lag = residual[j+1]-residual[j]
residual.quantile= mstats.mquantiles(residual.lag)
sigma = sqrt(residual.quantile*2*k/(1-b^2))
sigma.eq = sqrt(residual.quantile/(1-b^2))
s = (residual-m)/sigma.eq
s.mod = s -beta0/(k*sigma.eq)
plot(t, residual,'-+r')
plot(t, s,'-b')
plot(t, s.mod, '-g')
title('Simulations')
xlabel('time')
ylabel('S-score')
show()
| AvengersPy/MyPairs | Simulation/OU.py | Python | apache-2.0 | 1,045 |
# =============================================================================
# Authors: PAR Government
# Organization: DARPA
#
# Copyright (c) 2016 PAR Government
# All rights reserved.
#==============================================================================
from os.path import expanduser
import new
from types import MethodType
import logging
from threading import RLock
class Proxy(object):
def __init__(self, target):
self._target = target
def __getattr__(self, name):
target = self._target
f = getattr(target, name)
if isinstance(f, MethodType):
# Rebind the method to the target.
return new.instancemethod(f.im_func, self, target.__class__)
else:
return f
def removeValue(obj, path):
part = path
splitpos = path.find(".")
if splitpos > 0:
part = path[0:splitpos]
path = path[splitpos + 1:]
else:
path = None
bpos = part.find('[')
pos = 0
if bpos > 0:
pos = int(part[bpos + 1:-1])
part = part[0:bpos]
if part in obj:
current_value = obj[part]
if path is None:
if type(current_value) is list or type(current_value) is tuple :
obj[part] = tuple(list(current_value[:pos]) + list(current_value[pos+1:]))
return current_value[pos]
else:
return obj.pop(part)
else:
if bpos > 0:
current_value = current_value[pos]
return removeValue(current_value,path)
def setPathValue(d, path, value):
pos = path.find('.')
lbracket = path.find('[')
listpos = None
nextpath = path[pos + 1:] if pos > 0 else None
if lbracket > 0 and (pos < 0 or lbracket < pos):
rbracket = path.find(']')
listpos = int(path[lbracket + 1:rbracket])
pos = lbracket
if pos < 0:
if listpos is not None:
d[path][listpos] = value
elif value is None and path in d:
d.pop(path)
elif value is not None:
d[path] = value
elif listpos is not None:
setPathValue(d[path[0:pos]][listpos], nextpath, value)
else:
if path[0:pos] not in d:
d[path[0:pos]] = {}
setPathValue(d[path[0:pos]], nextpath, value)
def getPathValuesFunc(path):
from functools import partial
def getValuePath(path, d, **kwargs):
return getPathValues(d, path)
return partial(getValuePath, path)
def getPathValues(d, path):
"""
Given a nest structure,
return all the values reference by the given path.
Always returns a list.
If the value is not found, the list is empty
NOTE: Processing a list is its own recursion.
"""
pos = path.find('.')
currentpath = path[0:pos] if pos > 0 else path
nextpath = path[pos + 1:] if pos > 0 else None
lbracket = path.find('[')
itemnum = None
if lbracket >= 0 and (pos < 0 or lbracket < pos):
rbracket = path.find(']')
itemnum = int(path[lbracket + 1:rbracket])
currentpath = path[0:lbracket]
# keep the bracket for the next recurive depth
nextpath = path[lbracket:] if lbracket > 0 else nextpath
if type(d) is list:
result = []
if itemnum is not None:
result.extend(getPathValues(d[itemnum], nextpath))
else:
for item in d:
# still on the current path node
result.extend(getPathValues(item, path))
return result
if pos < 0:
if currentpath == '*':
result = []
for k, v in d.iteritems():
result.append(v)
return result
return [d[currentpath]] if currentpath in d and d[currentpath] else []
else:
if currentpath == '*':
result = []
for k, v in d.iteritems():
result.extend(getPathValues(v, nextpath))
return result
return getPathValues(d[currentpath], nextpath) if currentpath in d else []
def getValue(obj, path, defaultValue=None, convertFunction=None):
""""Return the value as referenced by the path in the embedded set of dictionaries as referenced by an object
obj is a node or edge
path is a dictionary path: a.b.c
convertFunction converts the value
This function recurses
"""
if obj is None:
return defaultValue
if not path:
return convertFunction(obj) if convertFunction and obj is not None else (defaultValue if obj is None else obj)
current = obj
part = path
splitpos = path.find(".")
if splitpos > 0:
part = path[0:splitpos]
path = path[splitpos + 1:]
else:
path = None
bpos = part.find('[')
pos = 0
if bpos > 0:
pos = int(part[bpos + 1:-1])
part = part[0:bpos]
if part in current:
current = current[part]
if type(current) is list or type(current) is tuple:
if bpos > 0:
current = current[pos]
else:
result = []
for item in current:
v = getValue(item, path, defaultValue=defaultValue, convertFunction=convertFunction)
if v is not None:
result.append(v)
return result
return getValue(current, path, defaultValue=defaultValue, convertFunction=convertFunction)
return defaultValue
class MaskgenThreadPool:
def __init__(self,size):
from multiprocessing.pool import ThreadPool
if size > 1:
self.thread_pool = ThreadPool(size)
else:
self.thread_pool = None
def apply_async(self, func, args=(), kwds={}):
if self.thread_pool is not None:
return self.thread_pool.apply_async(func, args=args, kwds=kwds)
else:
from multiprocessing.pool import AsyncResult
result = AsyncResult({},False)
result._set(0,(True,func(*args, **kwds)))
return result
class ModuleStatus:
def __init__(self,system_name, module_name, component, percentage):
self.system_name = system_name
self.module_name = module_name
self.component = component
self.percentage = percentage
class StatusTracker:
def __init__(self, system_name='System', module_name='?', amount=100, status_cb=None):
self.amount = amount
self.system_name = system_name
self.module_name = module_name
self.current = 0
self.lock = RLock()
self.status_cb = status_cb
self.logger = logging.getLogger('maskgen')
def post(self, module_status):
"""
:param module_status:
:return:
@type module_status : ModuleStatus
"""
if self.status_cb is None:
self.logger.info(
'{} module {} for component {}: {}% Complete'.format(module_status.system_name,
module_status.module_name,
module_status.component,
module_status.percentage))
else:
self.status_cb(module_status)
def complete(self):
self.post(ModuleStatus(self.system_name, self.module_name, 'Complete',100.0))
def next(self,id):
with self.lock:
self.post(ModuleStatus(self.system_name, self.module_name, id, (float(self.current)/self.amount)*100.0))
self.current += 1
| rwgdrummer/maskgen | maskgen/support.py | Python | bsd-3-clause | 7,641 |
import os, sys, shlex
from base import *
from task import *
from tmux import *
__all__ = [ 'create_layout', 'create_window', 'layout2tmux', 'all_context_templates', 'site_dir_contexts', 'user_dir_contexts', 'all_windows' ]
#DEBUG ONLY
__all__ += [ 'layout', 'reg_order' ]
all_context_templates = {}
T_OPEN = '('
T_CLOSE = ')'
T_SYM = 'sym'
T_VERT = '|'
T_HORIZ = '--'
T_ERROR = '!'
T_NUM = '0'
sym_alphabet = set()
num_alphabet = set()
for c in xrange(ord('a'), ord('z')+1):
sym_alphabet.add(c)
for c in xrange(ord('A'), ord('Z')+1):
sym_alphabet.add(c)
for c in xrange(ord('0'), ord('9')+1):
sym_alphabet.add(c)
num_alphabet.add(c)
sym_alphabet.add(ord('_'))
sym_alphabet.add(ord('-'))
def layout_scanner(txt):
txt = txt.lstrip()
if txt.startswith(T_OPEN):
return T_OPEN, '', txt[len(T_OPEN):]
if txt.startswith(T_CLOSE):
return T_CLOSE, '', txt[len(T_CLOSE):]
if txt.startswith(T_VERT):
return T_VERT, '', txt[len(T_VERT):]
if txt.startswith(T_HORIZ):
return T_HORIZ, '', txt[len(T_HORIZ):]
i=0
while ord(txt[i]) in num_alphabet:
i=i+1
if i>0:
return T_NUM, txt[:i], txt[i:]
while ord(txt[i]) in sym_alphabet:
i=i+1
if i>0:
return T_SYM, get_task_prefix()+txt[:i], txt[i:]
while ord('0') <= ord(txt[i]) <= ord('9'):
i=i+1
if i>0:
return T_NUM, txt[:i], txt[i:]
raise Exception("Scan failed around '%s'"%txt)
def make_pane(v):
if v in all_embedded:
return subcontext(all_embedded[v]['layout'])
else:
return pane(v)
def accept(tok, toktype):
t, v = tok
if t is not toktype:
raise ValueError(t)
def parse_layout(tokiter):
t, v = tokiter.next()
if t is T_OPEN:
left = parse_layout(tokiter)
elif t is T_SYM:
left = make_pane(v)
else:
raise ValueError(t)
t, v = tokiter.next()
if t is T_HORIZ:
mode='-v'
elif t is T_VERT:
mode='-h'
else:
raise ValueError(t)
t, v = tokiter.next()
if t is not T_NUM:
raise ValueError(t)
sz = "-p "+v
t, v = tokiter.next()
if t is T_OPEN:
right = parse_layout(tokiter)
elif t is T_SYM:
right = make_pane(v)
else:
raise ValueError(t)
accept(tokiter.next(), T_CLOSE)
return splitter(mode, sz, left, right)
class pane(object):
def __init__(self, name):
self.name = name
self.task = all_tasks[name]
self.index = -1
self.opts = None
# def set_index(self):
# pass
def first_pane(self):
return self
def __str__(self):
return self.task['T']
def __repr__(self):
return self.task['T']
class splitter(object):
top_index = 0
@classmethod
def alloc_index(cls):
cls.top_index += 1
return cls.top_index
def __init__(self, mode, sz, a, b):
self.opts = mode+" "+sz
self.a = a
self.b = b
self.index = 0
# def set_index(self):
# self.a.index = self.index
# self.b.index = splitter.alloc_index()
# self.a.set_index()
# self.b.set_index()
def first_pane(self):
return self.a.first_pane()
def flat_pane_list(self, parent=None, parent_opts = None):
l = []
if type(self.a) in (splitter, subcontext):
l += self.a.flat_pane_list(parent, parent_opts)
else:
l += [ (self.a.index, parent, parent_opts, self.a.task.tmux_shell_cmd()) ]
if type(self.b) in (splitter, subcontext):
l += self.b.flat_pane_list(self.a.index, self.opts)
else:
l += [ (self.a.index, self.index, self.opts, self.b.task.tmux_shell_cmd()) ]
return sorted(l, None, lambda x: x[0])
def __str__(self):
if self.opts.startswith("-v"):
sep = "--"+self.opts[6:]
else:
sep = "|"+self.opts[6:]
return "(%s %s %s)"%(str(self.a), sep, str(self.b))
def __repr__(self):
return str(self)
class subcontext(splitter):
def __init__(self, l):
#print l, l.opts
mode, t, sz = shlex.split(l.opts)
splitter.__init__(self, mode, ' '.join((t, sz)), l.a, l.b)
layout = None
def pane2cmd(x, p, opts):
if x.index==-1:
x.index = splitter.alloc_index()
return [(p and p.index, opts, x.task)]
def reg_order(l, window_index):
def reg_if_new(x, v, p, opts):
if x.task.window_index!=-1:
return
x.index = v()
x.task.task_index = x.index
x.task.opts = opts
x.task.parent = p
x.task.window_index = window_index
def rec_reg(l):
if type(l) in (splitter, subcontext):
fp = l.first_pane()
sp = l.b.first_pane()
reg_if_new(fp, splitter.alloc_index, None, None)
l.index = fp.task.pane_index
reg_if_new(sp, splitter.alloc_index, fp.task, l.opts)
rec_reg(l.a)
rec_reg(l.b)
def pane_order(l):
first = l.first_pane()
L = []
def rec(l):
#print "rec", l
if type(l) is pane:
L.append(l)
elif type(l) in (splitter, subcontext):
rec(l.a)
rec(l.b)
rec(l)
#print len(L), L, len(all_tasks)
for i in xrange(len(L)):
L[i].pane_index = i
return L
#for x in all_tasks.values():
# x.task_index = -1
# x.pane_index = -1
splitter.top_index=-1
#print pane_order(l)
rec_reg(l)
def create_layout(l, name=app_name):
#global layout
tokens = []
splitter.top_index=0
try:
while len(l)>0:
t, v, l = layout_scanner(l)
tokens.append((t, v))
except Exception, e:
#print e
pass
#print tokens
tokiter = iter(tokens)
#accept(tokiter.next(), T_OPEN)
t, v = tokiter.next()
if t is T_OPEN:
layout = parse_layout(tokiter)
elif t is T_SYM:
layout = make_pane(v)
reg_order(layout, name)
return layout
all_windows = {}
def create_window(l):
i = l.find(' ')
bad = l.find('(')
if i==-1 or (-1 < bad < i):
raise ValueError("line must contain one word before layout data : "+l)
wname, l = l[:i], l[i+1:]
all_windows[get_task_prefix()+wname] = create_layout(l, wname)
return all_windows
def layout2tmux():
tasks_by_window = dict([ (app_name, list()) ] + [ (k, list()) for k in all_windows ])
for t in all_tasks.values():
if t.task_index!=-1:
tasks_by_window[t.window_index].append(t)
L = sorted((t for t in tasks_by_window[app_name]), None, lambda x: x.task_index)
cmds = [ tmux_session(L[0].tmux_shell_cmd()) ] + [ tmux_split(l.parent.task_index, l.opts, l.tmux_shell_cmd()) for l in L[1:] ]
#print app_name, tasks_by_window[app_name], L
del tasks_by_window[app_name]
for tk in tasks_by_window:
tw = tasks_by_window[tk]
L = sorted((t for t in tw), None, lambda x: x.task_index)
#print tk, tasks_by_window[tk], L
cmds += [ tmux_window(L[0].tmux_shell_cmd(), tk) ] + [ tmux_split(l.parent.task_index, l.opts, l.tmux_shell_cmd(), tk) for l in L[1:] ]
return cmds
##print L
##return [ tmux_window(L[0].tmux_shell_cmd()) ] + [ tmux_split(l.parent.pane_index, l.opts, l.tmux_shell_cmd()) for l in L[1:] ]
#return [ tmux_window(L[0].tmux_shell_cmd()) ] + [ tmux_split(l.parent.task_index, l.opts, l.tmux_shell_cmd()) for l in L[1:] ]
# static module init
site_dir_contexts = hackide_root+'/contexts/'
user_dir_contexts = os.getenv('HOME')+'/.hackide/contexts'
os.path.isdir(user_dir_contexts) or os.makedirs(user_dir_contexts)
for contextdefdir in (site_dir_contexts, user_dir_contexts):
for context_def in os.listdir(contextdefdir):
if not context_def.endswith('.hackide'):
continue
all_context_templates[context_def[:-8]] = open(contextdefdir+context_def).readlines()
| bl0b/hack-ide | py/layout.py | Python | bsd-3-clause | 8,034 |
# coding=utf8
#
# Copyright 2013 Dreamlab Onet.pl
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 3.0.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, visit
#
# http://www.gnu.org/licenses/lgpl.txt
#
class Spec(object):
def __init__(self):
self.mod_callbacks = []
def apply(self, rmock):
pass
def set_modified(self):
for callback in self.mod_callbacks:
callback()
def add_modified_callback(self, callback):
self.mod_callbacks.append(callback)
def reset(self):
pass
| tikan/rmock | src/rmock/core/spec.py | Python | lgpl-3.0 | 1,016 |
"""Project Euler - Problem 1 - http://projecteuler.net/problem=1"""
import sys
import time
import tools.timeutils as timeutils
def sum_numbers():
"""
Sums all natural numbers below 1000 that are multiples of 3 or 5
Returns: int
"""
a, b = 1, 0
while a < 1000:
if (a % 3 == 0) or (a % 5 == 0):
b += a
a += 1
return b
def main():
"""Main entry point for the script"""
start = time.time()
print(sum_numbers())
timeutils.elapsed_time(time.time() - start)
if __name__ == '__main__':
sys.exit(main())
| rentes/Euler | problem1.py | Python | mit | 577 |
"""This module provides the blueprint for some basic API endpoints.
For more information please refer to the documentation: http://bigchaindb.com/http-api
"""
import logging
from flask import current_app, request, jsonify
from flask_restful import Resource, reqparse
from bigchaindb.common.exceptions import SchemaValidationError, ValidationError
from bigchaindb.models import Transaction
from bigchaindb.web.views.base import make_error
from bigchaindb.web.views import parameters
logger = logging.getLogger(__name__)
class TransactionApi(Resource):
def get(self, tx_id):
"""API endpoint to get details about a transaction.
Args:
tx_id (str): the id of the transaction.
Return:
A JSON string containing the data about the transaction.
"""
pool = current_app.config['bigchain_pool']
with pool() as bigchain:
tx, status = bigchain.get_transaction(tx_id, include_status=True)
if not tx or status is not bigchain.TX_VALID:
return make_error(404)
return tx.to_dict()
class TransactionListApi(Resource):
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('operation', type=parameters.valid_operation)
parser.add_argument('asset_id', type=parameters.valid_txid,
required=True)
args = parser.parse_args()
with current_app.config['bigchain_pool']() as bigchain:
txs = bigchain.get_transactions_filtered(**args)
return [tx.to_dict() for tx in txs]
def post(self):
"""API endpoint to push transactions to the Federation.
Return:
A ``dict`` containing the data about the transaction.
"""
pool = current_app.config['bigchain_pool']
# `force` will try to format the body of the POST request even if the
# `content-type` header is not set to `application/json`
tx = request.get_json(force=True)
try:
tx_obj = Transaction.from_dict(tx)
except SchemaValidationError as e:
return make_error(
400,
message='Invalid transaction schema: {}'.format(
e.__cause__.message)
)
except ValidationError as e:
return make_error(
400,
'Invalid transaction ({}): {}'.format(type(e).__name__, e)
)
with pool() as bigchain:
bigchain.statsd.incr('web.tx.post')
try:
bigchain.validate_transaction(tx_obj)
except ValidationError as e:
return make_error(
400,
'Invalid transaction ({}): {}'.format(type(e).__name__, e)
)
else:
bigchain.write_transaction(tx_obj)
response = jsonify(tx)
response.status_code = 202
# NOTE: According to W3C, sending a relative URI is not allowed in the
# Location Header:
# - https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html
#
# Flask is autocorrecting relative URIs. With the following command,
# we're able to prevent this.
response.autocorrect_location_header = False
status_monitor = '../statuses?transaction_id={}'.format(tx_obj.id)
response.headers['Location'] = status_monitor
return response
| stanta/darfchain | darfchain_docker_vagrant/bigchaindb/web/views/transactions.py | Python | gpl-3.0 | 3,446 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import codecs
import shutil
import sys
import os
import commands
from time import time
ratio_10=0
tiempo_10=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('bzip2.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-10]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-10]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-10]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-10]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-10]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('bzip2 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('bzip2 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('bzip2 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('bzip2 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('bzip2 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
ratio_10=0
tiempo_10=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('brotli.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-10]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-10]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-10]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-10]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-10]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('bro 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('bro 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('bro 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('bro 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('bro 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
ratio_10=0
tiempo_10=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('LZMA.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-9]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-9]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-9]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-9]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-9]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('LZMA 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('LZMA 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('LZMA 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('LZMA 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('LZMA 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
ratio_10=0
tiempo_100=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('zopfli.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-9]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-9]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-9]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-9]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-9]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('Zopfli 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('Zopfli 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('Zopfli 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('Zopfli 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('Zopfli 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
ratio_10=0
tiempo_10=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('gzip.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-10]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-10]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-10]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-10]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-10]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('gzip 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('gzip 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('gzip 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('gzip 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('gzip 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
ratio_10=0
tiempo_10=0
ratio_100=0
tiempo_100=0
ratio_1000=0
tiempo_1000=0
ratio_10000=0
tiempo_10000=0
ratio_100000=0
tiempo_100000=0
file=open('lzhma.txt')
for line in file:
line=line.split(' ')
if((line[0])[:-12]=='10'):
ratio_10=ratio_10+float(line[3])/5
tiempo_10=tiempo_10+float(line[4])/5
if((line[0])[:-12]=='100'):
ratio_100=ratio_100+float(line[3])/5
tiempo_100=tiempo_100+float(line[4])/5
if((line[0])[:-12]=='1000'):
ratio_1000=ratio_1000+float(line[3])/5
tiempo_1000=tiempo_1000+float(line[4])/5
if((line[0])[:-12]=='10000'):
ratio_10000=ratio_10000+float(line[3])/5
tiempo_10000=tiempo_10000+float(line[4])/5
if((line[0])[:-12]=='100000'):
ratio_100000=ratio_100000+float(line[3])/5
tiempo_100000=tiempo_100000+float(line[4])/5
file.close()
print('lzhma 10 '+ str(ratio_10) + ' '+ str(tiempo_10))
print('lzhma 100 '+ str(ratio_100) + ' '+ str(tiempo_100))
print('lzhma 1000 '+ str(ratio_1000) + ' '+ str(tiempo_1000))
print('lzhma 10000 '+ str(ratio_10000) + ' '+ str(tiempo_10000))
print('lzhma 100000 '+ str(ratio_100000) + ' '+ str(tiempo_100000))
| pasilvagh/U | compresionTexto/extract.py | Python | mit | 7,682 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""References:
Szegedy, Christian, Wei Liu, Yangqing Jia, Pierre Sermanet, Scott Reed, Dragomir
Anguelov, Dumitru Erhan, Vincent Vanhoucke, and Andrew Rabinovich. "Going deeper
with convolutions." arXiv preprint arXiv:1409.4842 (2014).
"""
import mxnet as mx
def ConvFactory(data, num_filter, kernel, stride=(1,1), pad=(0, 0), name=None, suffix=''):
conv = mx.symbol.Convolution(data=data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad, name='conv_%s%s' %(name, suffix))
act = mx.symbol.Activation(data=conv, act_type='relu', name='relu_%s%s' %(name, suffix))
return act
def InceptionFactory(data, num_1x1, num_3x3red, num_3x3, num_d5x5red, num_d5x5, pool, proj, name):
# 1x1
c1x1 = ConvFactory(data=data, num_filter=num_1x1, kernel=(1, 1), name=('%s_1x1' % name))
# 3x3 reduce + 3x3
c3x3r = ConvFactory(data=data, num_filter=num_3x3red, kernel=(1, 1), name=('%s_3x3' % name), suffix='_reduce')
c3x3 = ConvFactory(data=c3x3r, num_filter=num_3x3, kernel=(3, 3), pad=(1, 1), name=('%s_3x3' % name))
# double 3x3 reduce + double 3x3
cd5x5r = ConvFactory(data=data, num_filter=num_d5x5red, kernel=(1, 1), name=('%s_5x5' % name), suffix='_reduce')
cd5x5 = ConvFactory(data=cd5x5r, num_filter=num_d5x5, kernel=(5, 5), pad=(2, 2), name=('%s_5x5' % name))
# pool + proj
pooling = mx.symbol.Pooling(data=data, kernel=(3, 3), stride=(1, 1), pad=(1, 1), pool_type=pool, name=('%s_pool_%s_pool' % (pool, name)))
cproj = ConvFactory(data=pooling, num_filter=proj, kernel=(1, 1), name=('%s_proj' % name))
# concat
concat = mx.symbol.Concat(*[c1x1, c3x3, cd5x5, cproj], name='ch_concat_%s_chconcat' % name)
return concat
def get_symbol(num_classes = 1000, **kwargs):
data = mx.sym.Variable("data")
conv1 = ConvFactory(data, 64, kernel=(7, 7), stride=(2,2), pad=(3, 3), name="conv1")
pool1 = mx.sym.Pooling(conv1, kernel=(3, 3), stride=(2, 2), pool_type="max")
conv2 = ConvFactory(pool1, 64, kernel=(1, 1), stride=(1,1), name="conv2")
conv3 = ConvFactory(conv2, 192, kernel=(3, 3), stride=(1, 1), pad=(1,1), name="conv3")
pool3 = mx.sym.Pooling(conv3, kernel=(3, 3), stride=(2, 2), pool_type="max")
in3a = InceptionFactory(pool3, 64, 96, 128, 16, 32, "max", 32, name="in3a")
in3b = InceptionFactory(in3a, 128, 128, 192, 32, 96, "max", 64, name="in3b")
pool4 = mx.sym.Pooling(in3b, kernel=(3, 3), stride=(2, 2), pool_type="max")
in4a = InceptionFactory(pool4, 192, 96, 208, 16, 48, "max", 64, name="in4a")
in4b = InceptionFactory(in4a, 160, 112, 224, 24, 64, "max", 64, name="in4b")
in4c = InceptionFactory(in4b, 128, 128, 256, 24, 64, "max", 64, name="in4c")
in4d = InceptionFactory(in4c, 112, 144, 288, 32, 64, "max", 64, name="in4d")
in4e = InceptionFactory(in4d, 256, 160, 320, 32, 128, "max", 128, name="in4e")
pool5 = mx.sym.Pooling(in4e, kernel=(3, 3), stride=(2, 2), pool_type="max")
in5a = InceptionFactory(pool5, 256, 160, 320, 32, 128, "max", 128, name="in5a")
in5b = InceptionFactory(in5a, 384, 192, 384, 48, 128, "max", 128, name="in5b")
pool6 = mx.sym.Pooling(in5b, kernel=(7, 7), stride=(1,1), pool_type="avg")
flatten = mx.sym.Flatten(data=pool6)
fc1 = mx.sym.FullyConnected(data=flatten, num_hidden=num_classes)
softmax = mx.symbol.SoftmaxOutput(data=fc1, name='softmax')
return softmax
| ucloud/uai-sdk | examples/mxnet/train/imagenet/code/symbols/googlenet.py | Python | apache-2.0 | 4,152 |
# Generated by Django 3.0.4 on 2020-10-21 15:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('books', '0121_auto_20201020_1707'),
]
operations = [
migrations.AlterField(
model_name='book',
name='savings',
field=models.IntegerField(blank=True, null=True),
),
]
| openstax/openstax-cms | books/migrations/0122_auto_20201021_1050.py | Python | agpl-3.0 | 394 |
import inspect
# from collections import OrderedDict as odict
import numpy as np
from galry import Manager, TextVisual, get_color, NavigationEventProcessor, \
DefaultEventProcessor, EventProcessor, GridEventProcessor, ordict, \
log_debug, log_info, log_warn
__all__ = ['InteractionManager']
class InteractionManager(Manager):
"""This class implements the processing of the raised interaction events.
To be overriden.
"""
# Initialization methods
# ----------------------
def __init__(self, parent):
super(InteractionManager, self).__init__(parent)
self.cursor = None
self.prev_event = None
self.processors = ordict()
self.initialize_default(
constrain_navigation=self.parent.constrain_navigation,
momentum=self.parent.momentum)
self.initialize()
def initialize(self):
"""Initialize the InteractionManager.
To be overriden.
"""
pass
def initialize_default(self, **kwargs):
pass
# Processor methods
# -----------------
def get_processors(self):
"""Return all processors."""
return self.processors
def get_processor(self, name):
"""Return a processor from its name."""
if name is None:
name = 'processor0'
return self.processors.get(name, None)
def add_processor(self, cls, *args, **kwargs):
"""Add a new processor, which handles processing of interaction events.
Several processors can be defined in an InteractionManager instance.
One event can be handled by several processors.
"""
# get the name of the visual from kwargs
name = kwargs.pop('name', 'processor%d' % (len(self.get_processors())))
if self.get_processor(name):
raise ValueError("Processor name '%s' already exists." % name)
activated = kwargs.pop('activated', True)
processor = cls(self, *args, **kwargs)
self.processors[name] = processor
processor.activate(activated)
return processor
def add_default_processor(self):
"""Add a default processor, useful to add handlers for events
in the InteractionManager without explicitely creating a new
processor."""
return self.add_processor(EventProcessor, name='default_processor')
def register(self, event, method):
"""Register a new handler for an event, using the manager's default
processor."""
processor = self.get_processor('default_processor')
if processor is None:
processor = self.add_default_processor()
processor.register(event, method)
# Event processing methods
# ------------------------
def process_event(self, event, parameter):
"""Process an event.
This is the main method of this class. It is called as soon as an
interaction event is raised by an user action.
Arguments:
* event: the event to process, an InteractionEvent string.
* parameter: the parameter returned by the param_getter function
specified in the related binding.
"""
# process None events in all processors
if event is None and self.prev_event is not None:
for name, processor in self.get_processors().iteritems():
processor.process_none()
self.cursor = None
# process events in all processors
if event is not None:
for name, processor in self.get_processors().iteritems():
if processor.activated and processor.registered(event):
# print name, event
processor.process(event, parameter)
cursor = processor.get_cursor()
if self.cursor is None:
self.cursor = cursor
self.prev_event = event
def get_cursor(self):
return self.cursor
| DavidTingley/ephys-processing-pipeline | installation/klustaviewa-0.3.0/galry/interactionmanager.py | Python | gpl-3.0 | 4,278 |
fib1 = 1
fib2 = 2
temp = 0
count = 3
while(len(str(fib2)) < 1000):
temp = fib2
fib2 = fib1 + fib2
fib1 = temp
count += 1
print count
| dlakata/projecteuler | p25.py | Python | gpl-2.0 | 138 |
#!/usr/bin/env python
'''Parser for the plain text version of congressional record documents
outputs the text marked up with xml
'''
import datetime
import os
import argparse
from .fdsys.cr_parser import parse_directory, parse_single
from .fdsys.simple_scrape import find_fdsys
def daterange(start, end, date_format=None):
delta = end - start
for i in range(abs(delta.days) + 1):
date = start + datetime.timedelta(days=i)
if date_format:
date = datetime.datetime.strftime(date, date_format)
yield date
def parsedate(s):
return datetime.datetime.strptime(s.strip(), "%Y-%m-%d")
def main():
default_outdir = os.path.join(os.getcwd(), 'output')
parser = argparse.ArgumentParser(
prog="parsecr",
description='Parse arguments for the Congressional Record Parser \n\
University of Tennessee (UTK) Parser Updates by: \n\
Jace Prince\n\
Dr. Nathan Kelly\n')
parser.add_argument('days', type=str, nargs='*',
help='A positional argument for dates. This can be a single date, a list \
of dates or a range of dates. Records will be Make sure dates are in \
YYYY-MM-DD format. Date ranges should be given as start date then end \
date YYYY-MM-DD:YYYY-MM-DD. For several specific days, write out the \
dates in the correct format with a space between each date.\n\
The parser will look for a previous file to see if it has been downloaded, \
if not, it will download the file from fdsys.')
parser.add_argument('-f', '--infile', dest='infile', action='store',
help='Parse a single txt or htm file.')
parser.add_argument('-id', '--indir', dest='indir', action='store',
help='An entire directory to traverse and parse, can replace infile')
parser.add_argument('-od', '--outdir', dest='outdir', action='store',
help='An output directory for the parsed content')
parser.add_argument('-l', '--logdir', dest='logdir', action='store',
help='An output directory for logs')
parser.add_argument('--interactive', dest='interactive', action='store_true',
help='Step through files and decide whether or not to parse each one')
parser.add_argument('--force', dest='force', action='store_true',
help='Force documents to be downloaded id the txt files already exist.')
parser.add_argument('--ntf', '-no_text_files', dest='notext', action='store_true',
help='Remove the text version of the documents.(The .htm version is automatically removed)\
EVERYING in the indir folder will be removed.')
args = parser.parse_args()
# Scrapes files and creates a directory from FDsys if no file exists in source folder
if args.days:
if not args.outdir:
args.outdir = default_outdir
no_record = []
dates = []
for date_arg in args.days:
if ':' in date_arg:
start_end = date_arg.split(':')
if len(start_end) == 1:
dates.append(date_range)
else:
begin = parsedate(start_end[0])
end = parsedate(start_end[1])
dates.extend(daterange(begin, end, "%Y-%m-%d"))
else:
dates.append(date_arg)
for day in dates:
doc_path = find_fdsys(day, force=args.force, outdir=args.outdir)
# did not return records
if doc_path is None:
no_record.append(day)
else:
file_path = os.path.dirname(doc_path)
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(file_path, '__log'))
parsed_path = os.path.realpath(os.path.join(file_path, '__parsed'))
parse_directory(doc_path, interactive=args.interactive,
logdir=args.logdir, outdir=parsed_path)
if args.notext:
for filename in os.listdir(doc_path):
if filename.endswith('.txt') or filename.endswith('.xml') or filename.endswith('.htm'):
file_path = os.path.join(doc_path, filename)
os.remove(file_path)
os.rmdir(doc_path)
if len(no_record) > 0:
print "No results were found for the following day/s: %s " % (no_record)
# Deal with directory case:
elif args.indir:
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(args.indir, '__log'))
if not args.outdir:
args.outdir = os.path.realpath(os.path.join(args.indir, '__parsed'))
parse_directory(args.indir, interactive=args.interactive,
logdir=args.logdir, outdir=args.outdir)
if args.notext:
for filename in os.listdir(doc_path):
if filename.endswith('.txt') or filename.endswith('.xml') or filename.endswith('.htm'):
file_path = os.path.join(doc_path, filename)
os.remove(file_path)
os.rmdir(doc_path)
# Deal with single file case:
elif args.infile:
if not args.logdir:
args.logdir = os.path.realpath(os.path.join(os.path.dirname(args.infile), '__log'))
if not args.outdir:
args.outdir = os.path.realpath(os.path.join(os.path.dirname(args.infile), '__parsed'))
parse_single(args.infile, logdir=args.logdir, outdir=args.outdir)
if args.notext:
os.remove(args.infile)
else:
msg = 'Either a date (YYYY-MM-DD), --infile argument or the --indir flag is required!'
parser.error(msg)
if __name__ == '__main__':
main()
| jprinc16/congressional-record | congressionalrecord/cli.py | Python | bsd-3-clause | 6,024 |
'''This gives a main() function that serves as a nice wrapper
around other commands and presents the ability to serve up multiple
command-line functions from a single python script.
'''
import os, os.path, tempfile, sys, shutil, logging, argparse
import util.version
__author__ = "[email protected]"
__version__ = util.version.get_version()
log = logging.getLogger()
tmpDir = None
def setup_logger(log_level):
loglevel = getattr(logging, log_level.upper(), None)
assert loglevel, "unrecognized log level: %s" % log_level
log.setLevel(loglevel)
h = logging.StreamHandler()
h.setFormatter(logging.Formatter("%(asctime)s - %(module)s:%(lineno)d:%(funcName)s - %(levelname)s - %(message)s"))
log.addHandler(h)
def script_name():
return os.path.basename(sys.argv[0]).rsplit('.',1)[0]
def common_args(parser, arglist=(('tmpDir',None), ('loglevel',None))):
for k,v in arglist:
if k=='loglevel':
if not v:
v = 'DEBUG'
parser.add_argument("--loglevel", dest="loglevel",
help="Verboseness of output. [default: %(default)s]",
default=v,
choices=('DEBUG','INFO','WARNING','ERROR','CRITICAL','EXCEPTION'))
elif k=='tmpDir':
if not v:
v = find_tmpDir()
parser.add_argument("--tmpDir", dest="tmpDir",
help="Base directory for temp files. [default: %(default)s]",
default=v)
parser.add_argument("--tmpDirKeep",
action="store_true", dest="tmpDirKeep",
help="""Keep the tmpDir if an exception occurs while
running. Default is to delete all temp files at
the end, even if there's a failure.""",
default=False)
elif k=='version':
if not v:
v=__version__
parser.add_argument('--version', '-V', action='version', version=v)
else:
raise Exception("unrecognized argument %s" % k)
return parser
def main_command(mainfunc):
''' This wraps a python method in another method that can be called
with an argparse.Namespace object. When called, it will pass all
the values of the object on as parameters to the function call.
'''
def _main(args):
args2 = dict((k,v) for k,v in vars(args).items() if k not in ('loglevel','tmpDir','tmpDirKeep','version','func_main','command'))
mainfunc(**args2)
_main.__doc__ = mainfunc.__doc__
return _main
def attach_main(parser, cmd_main, split_args=False):
''' This attaches the main function call to a parser object.
'''
if split_args:
cmd_main = main_command(cmd_main)
parser.description = cmd_main.__doc__
parser.set_defaults(func_main=cmd_main)
return parser
def make_parser(commands, description):
''' commands: a list of pairs containing the following:
1. name of command (string, no whitespace)
2. method to call (no arguments) that returns an argparse parser.
If commands contains exactly one member and the name of the
only command is None, then we get rid of the whole multi-command
thing and just present the options for that one function.
description: a long string to present as a description of your script
as a whole if the script is run with no arguments
'''
if len(commands)==1 and commands[0][0]==None:
# only one (nameless) command in this script, simplify
parser = commands[0][1]()
parser.set_defaults(command='')
else:
# multiple commands available
parser = argparse.ArgumentParser(description=description,
usage='%(prog)s subcommand', add_help=False)
parser.add_argument('--help', '-h', action='help', help=argparse.SUPPRESS)
parser.add_argument('--version', '-V', action='version', version=__version__, help=argparse.SUPPRESS)
subparsers = parser.add_subparsers(title='subcommands', dest='command')
for cmd_name, cmd_parser in commands:
p = subparsers.add_parser(cmd_name, help=cmd_parser.__doc__)
cmd_parser(p)
return parser
def main_argparse(commands, description):
parser = make_parser(commands, description)
# if called with no arguments, print help
if len(sys.argv)==1:
parser.parse_args(['--help'])
elif len(sys.argv)==2 and (len(commands)>1 or commands[0][0]!=None):
parser.parse_args([sys.argv[1], '--help'])
args = parser.parse_args()
setup_logger(not hasattr(args, 'loglevel') and 'DEBUG' or args.loglevel)
log.info("software version: %s, python version: %s", __version__, sys.version)
log.info("command: %s %s %s",
sys.argv[0], sys.argv[1],
' '.join(["%s=%s" % (k,v) for k,v in vars(args).items() if k not in ('command', 'func_main')]))
if hasattr(args, 'tmpDir'):
"""
If this command has a tmpDir option, use that as a base directory
and create a subdirectory within it which we will then destroy at
the end of execution.
"""
proposed_dir = 'tmp-%s-%s' % (script_name(),args.command)
if 'LSB_JOBID' in os.environ:
proposed_dir = 'tmp-%s-%s-%s-%s' % (script_name(),args.command,os.environ['LSB_JOBID'],os.environ['LSB_JOBINDEX'])
tempfile.tempdir = tempfile.mkdtemp(prefix='%s-'%proposed_dir, dir=args.tmpDir)
log.debug("using tempDir: %s" % tempfile.tempdir)
os.environ['TMPDIR'] = tempfile.tempdir # this is for running R
try:
ret = args.func_main(args)
except:
if hasattr(args, 'tmpDirKeep') and args.tmpDirKeep and not (tempfile.tempdir.startswith('/tmp') or tempfile.tempdir.startswith('/local')):
log.exception("Exception occurred while running %s, saving tmpDir at %s" , args.command, tempfile.tempdir)
else:
shutil.rmtree(tempfile.tempdir)
raise
else:
shutil.rmtree(tempfile.tempdir)
else:
# otherwise just run the command
ret = args.func_main(args)
if ret is None:
ret = 0
return ret
def find_tmpDir():
''' This provides a suggested base directory for a temp dir for use in your
argparse-based tmpDir option.
'''
tmpdir = '/tmp'
if os.access('/local/scratch', os.X_OK | os.W_OK | os.R_OK) and os.path.isdir('/local/scratch'):
tmpdir = '/local/scratch'
if 'LSB_JOBID' in os.environ:
# this directory often exists for LSF jobs, but not always.
# for example, if the job is part of a job array, this directory is called
# something unpredictable and unfindable, so just use /local/scratch
proposed_dir = '/local/scratch/%s.tmpdir' % os.environ['LSB_JOBID']
if os.access(proposed_dir, os.X_OK | os.W_OK | os.R_OK):
tmpdir = proposed_dir
elif 'TMPDIR' in os.environ and os.path.isdir(os.environ['TMPDIR']):
tmpdir = os.environ['TMPDIR']
return tmpdir
| broadinstitute/cms | cms/util/cmd.py | Python | bsd-2-clause | 7,134 |
import os
import mimetypes
import logging
from datetime import datetime, date
from dateutil.tz import tzutc
import dateutil.parser
from sqlalchemy import and_, or_, func, asc as ascending, desc as descending, event
from sqlalchemy.types import *
from sqlalchemy.sql.functions import coalesce
from sqlalchemy.orm import scoped_session, sessionmaker, relationship, aliased, mapper
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.schema import UniqueConstraint, Table, Column
from batteries.model import Model, initialize_model
from batteries.model.hashable import Hashable, HashableReference, HashableKey, HashableAssociation
from batteries.model.serializable import Serializable
from batteries.model.identifiable import Identifiable
from batteries.model.recordable import Recordable
from batteries.model.types import UTCDateTime, Ascii
logger = logging.getLogger(__name__)
_session = None
Model.metadata.naming_convention = {
'ix': 'ix_%(column_0_label)s',
'uq': 'uq_%(table_name)s_%(column_0_name)s',
'ck': 'ck_%(table_name)s_%(constraint_name)s',
'fk': 'fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s',
'pk': 'pk_%(table_name)s'
}
def get_session():
return _session
def initialize(engine, create=False, drop=False):
global _session
_session = scoped_session(sessionmaker())
initialize_model(_session, engine)
if drop:
logger.warning("dropping all tables in {engine.url!s}".format(engine=engine))
Model.metadata.drop_all()
if create:
logger.info("creating tables in {engine.url!s}".format(engine=engine))
Model.metadata.create_all(engine)
return _session
def _handle_property(instance, name):
mapper = instance.__mapper__
if (mapper.has_property(name) or
hasattr(instance.__class__, name) or
not hasattr(instance, '_sa_instance_state') or
'AssociationProxy' in name):
return False
else:
return True
class PropertyContainer(object):
def __getattr__(self, k):
if _handle_property(self, k):
if k in self._properties:
return self._properties[k].value
else:
raise AttributeError(k)
else:
return Model.__getattribute__(self, k)
def __setattr__(self, k, v):
if _handle_property(self, k):
if k in self._properties:
self._properties[k].value = v
else:
self._properties[k] = Property(name=k, value=v)
else:
Model.__setattr__(self, k, v)
class Site(Hashable, Identifiable, Model):
__identifiers__ = ('slug', 'name')
named_with = ('slug',)
_key = HashableKey()
_slug = Column('slug', Ascii(100), unique=True)
name = Column(UnicodeText, unique=True)
url = Column(UnicodeText)
_content = relationship('Content', lazy='dynamic')
_assets = relationship('Asset', lazy='dynamic')
_properties = relationship('Property',
secondary='site_property',
collection_class=attribute_mapped_collection('name'),
single_parent=True,
cascade='all, delete-orphan')
@property
def content(self):
return PropertyQuery(self._content, Content, content_property)
@property
def assets(self):
return PropertyQuery(self._assets, Asset, asset_property)
@property
def tags(self):
return Tag.query.distinct().\
join(Tag.content).\
filter(Content.site_key == self.key).\
order_by(Tag.name.asc())
class Content(Hashable, Identifiable, PropertyContainer, Model, Recordable):
__identifiers__ = ('slug', 'title')
named_with = ('title',)
_key = HashableKey()
site_key = HashableReference('site', name='site_key_constraint')
_slug = Column('slug', Ascii(100), unique=True)
title = Column(UnicodeText, nullable=False)
body = Column(UnicodeText, nullable=False)
publish_time = Column(UTCDateTime)
path = Column(UnicodeText, nullable=False)
# checksum = Column(Integer)
site = relationship('Site')
tags = relationship('Tag', secondary='content_tag')
_properties = relationship('Property',
secondary='content_property',
collection_class=attribute_mapped_collection('name'),
single_parent=True,
cascade='all, delete-orphan')
def as_dict(self):
d = {}
for k in ('key', 'slug', 'title', 'body', 'publish_time', 'path'):
d[k] = getattr(self, k)
d['tags'] = [t.name for t in self.tags]
# d.update(self.properties)
d.update({k: p.value for k, p in self._properties.items()})
return d
class Asset(Hashable, Identifiable, PropertyContainer, Model, Recordable):
__identifiers__ = ('slug', 'path')
named_with = ('filename',)
_key = HashableKey()
site_key = HashableReference('site', name='site_key_constraint')
_slug = Column('slug', Ascii(100), unique=True)
type = Column(Ascii(100))
body = Column(UnicodeText)
path = Column(UnicodeText, nullable=False)
checksum = Column(Integer, nullable=False)
site = relationship('Site')
tags = relationship('Tag', secondary='asset_tag')
_properties = relationship('Property',
secondary='asset_property',
collection_class=attribute_mapped_collection('name'),
single_parent=True,
cascade='all, delete-orphan')
@property
def filename(self):
basename = os.path.basename(self.path)
return os.path.splitext(basename)
@property
def mimetype(self):
m = mimetypes.guess_type('.'.join(self.filename))
return m
def as_dict(self):
d = {}
for k in ('key', 'slug', 'body', 'path', 'mimetype'):
d[k] = getattr(self, k)
d['tags'] = [t.name for t in self.tags]
d.update({k: p.value for k, p in self._properties.items()})
return d
@staticmethod
def on_before_insert(mapper, connection, target):
m = target.mimetype[0]
if m:
target.type = m.split('/')[0]
class Tag(Hashable, Identifiable, Model):
__identifiers__ = ('slug', 'name')
named_with = ('name',)
_key = HashableKey()
_slug = Column('slug', Ascii(100), unique=True)
name = Column(UnicodeText, nullable=False)
content = relationship('Content', secondary='content_tag')
def as_dict(self):
d = {}
for k in ('key', 'slug', 'name'):
d[k] = getattr(self, k)
return d
class Property(Hashable, Model):
__identifiers__ = ('name', 'value')
serializable = ('content_key', 'name', 'value', 'type')
_key = HashableKey()
name = Column(Unicode(100), nullable=False, primary_key=True)
bool_value = Column(Boolean(name='bool_value_constraint'))
int_value = Column(Integer)
float_value = Column(Numeric(24, scale=6))
date_value = Column(Date)
datetime_value = Column(UTCDateTime)
str_value = Column(UnicodeText)
@property
def type(self):
if self.bool_value:
return bool
if self.int_value:
return int
if self.float_value:
return float
if self.date_value:
return date
if self.datetime_value:
return datetime
if self.str_value:
return unicode
def _reset_value(self):
for k in ('bool', 'date', 'datetime', 'int', 'float', 'str'):
prop = k + '_value'
setattr(self, prop, None)
@hybrid_property
def value(self):
for k in ('bool', 'date', 'datetime', 'int', 'float', 'str'):
prop = k + '_value'
v = getattr(self, prop)
if v is not None:
return v
return None
@value.setter
def value(self, v):
self._reset_value()
# null
if v is None:
return
# bool
elif isinstance(v, bool):
self.bool_value = v
return
elif isinstance(v, basestring):
# literal false values
if v.lower() in ('false', 'no', 'off'):
self.bool_value = False
# literal true values
elif v.lower() in ('true', 'yes', 'on'):
self.bool_value = True
# quoted string
elif (v.startswith('"') and v.endswith('"')) or\
(v.startswith("'") and v.endswith("'")):
self.str_value = v[1:-1]
return
# int
try:
v = int(v)
self.int_value = v
return
except ValueError:
pass
# float
try:
v = float(v)
self.float_value = v
return
except ValueError:
pass
# date
try:
v = datetime.strptime(v, '%Y-%m-%d').date()
self.date_value = v
return
except ValueError:
pass
# datetime
try:
v = dateutil.parser.parse(v)
if v.tzinfo is None:
v = v.replace(tzinfo=tzutc())
self.datetime_value = v
return
except ValueError:
pass
# default str
self.str_value = v
def as_dict(self):
d = {}
for k in ('content_key', 'name', 'value', 'type'):
d[k] = getattr(self, k)
return d
class PropertyQuery(object):
def __init__(self, relationship, model, assoc_table):
self.query = relationship.join(model._properties)
self.model = model
self.assoc_table = assoc_table
@classmethod
def _derive_property_type(cls, v):
if isinstance(v, bool):
return Property.bool_value
if isinstance(v, int):
return Property.int_value
if isinstance(v, float):
return Property.float_value
if isinstance(v, date):
return Property.date_value
if isinstance(v, datetime):
return Property.datetime_value
if isinstance(v, basestring):
return Property.str_value
if isinstance(v, (list, tuple)):
return map(cls._derive_property_type, v)[0]
if isinstance(v, dict):
return map(cls._derive_property_type, v.values())[0]
raise ValueError(type(v))
@classmethod
def _parse_numeric_criteria(cls, column, c):
if isinstance(c, (int, float)):
return column == c
if isinstance(c, list):
return column.in_(c)
if isinstance(c, dict):
clauses = []
for k, v in c.items():
if k == 'eq':
clauses.append(column == v)
if k == 'neq':
clauses.append(column != v)
if k == 'gt':
clauses.append(column > v)
if k == 'gteq':
clauses.append(column >= v)
if k == 'lt':
clauses.append(column < v)
if k == 'lteq':
clauses.append(column <= v)
return and_(*clauses)
raise ValueError(c)
@classmethod
def _parse_temporal_criteria(cls, column, c):
if isinstance(c, basestring):
c = dateutil.parser.parse(c)
return column == c
if isinstance(c, (date, datetime)):
return column == c
if isinstance(c, list):
return column.in_(c)
if isinstance(c, dict):
clauses = []
for k, v in c.items():
if k == 'is':
clauses.append(column == v)
if k == 'isnot':
clauses.append(column != v)
if k == 'after':
clauses.append(column > v)
if k == 'on_after':
clauses.append(column >= v)
if k == 'before':
clauses.append(column < v)
if k == 'on_before':
clauses.append(column <= v)
return and_(*clauses)
raise ValueError(c)
@classmethod
def _parse_textual_criteria(cls, column, c):
if isinstance(c, (basestring)):
return column == c
if isinstance(c, list):
return column.in_(c)
if isinstance(c, dict):
insensitive = c.get('insensitive')
if insensitive:
column = func.lower(column)
clauses = []
for k, v in c.items():
if k == 'is':
if insensitive:
v = v.lower()
clauses.append(column == v)
if k == 'in':
v = [s.lower() if insensitive else s for s in v]
clauses.append(column.in_(v))
if k == 'contains':
if insensitive:
v = v.lower()
clauses.append(column.contains(v))
if k == 'startswith':
if insensitive:
v = v.lower()
clauses.append(column.startswith(v))
if 'endswith' in criteria:
if insensitive:
v = v.lower()
clauses.append(column.endswith(v))
return and_(*clauses)
raise ValueError(c)
@classmethod
def _parse_criteria(cls, column, c):
if column.key == 'bool_value':
return column == c
if column.key in ('int_value', 'float_value'):
return cls._parse_numeric_criteria(column, c)
if column.key in ('date_value', 'datetime_value'):
return cls._parse_temporal_criteria(column, c)
if column.key in ('str_value'):
return cls._parse_textual_criteria(column, c)
if column.key in ('key',):
if isinstance(c, list):
return column.in_(c)
return column == c
if column.key in ('title', 'body', 'path'):
return cls._parse_textual_criteria(column, c)
if column.key in ('publish_time',):
return cls._parse_temporal_criteria(column, c)
if column.key in ('type',):
return and_(column == c)
raise ValueError(column.key)
def filter(self, **kwargs):
for k, v in kwargs.items():
if self.model.__mapper__.has_property(k):
if k not in ('tags',):
column = getattr(self.model, k)
c = PropertyQuery._parse_criteria(column, v)
self.query = self.query.filter(c)
elif k in ('tags',):
if not isinstance(v, list):
v = [v]
tag_keys = []
for t in v:
if isinstance(t, Tag):
tag_keys.append(t.key)
else:
t = Tag.get(slug=t.lower())
if t:
tag_keys.append(t.key)
clauses = [Tag.key == k for k in tag_keys]
clause = and_(*(self.model.tags.any(c) for c in clauses))
self.query = self.query.filter(clause).group_by(self.model.key)
else:
clause = []
clause.append(Property.name == k)
column = PropertyQuery._derive_property_type(v)
clause.append(PropertyQuery._parse_criteria(column, v))
self.query = self.query.filter(and_(*clause))
return self
def all(self):
return self.query.all()
def one(self):
return self.query.one()
def limit(self, l):
self.query = self.query.limit(l)
return self
def offset(self, o):
self.query = self.query.offset(o)
return self
def order_by(self, asc=None, desc=None):
if asc:
if not isinstance(asc, list):
asc = [asc]
else:
asc = []
if desc:
if not isinstance(desc, list):
desc = [desc]
else:
desc = []
sorts = []
for c in asc + desc:
if self.model.__mapper__.has_property(c):
column = getattr(self.model, c)
sorts.append(column)
else:
aliased_assoc = aliased(self.assoc_table)
aliased_property = aliased(Property, name=c)
fk_name = '_'.join(self.assoc_table.name.split('_')[:-1] + ['key'])
fk = getattr(aliased_assoc.c, fk_name)
self.query = self.query.\
outerjoin(aliased_assoc,
fk == self.model.key).\
outerjoin(aliased_property,
and_(aliased_assoc.c.property_key == Property.key,
Property.name == c))
sorts.append((c, aliased_property))
for s in sorts:
if isinstance(s, tuple):
c, alias = s
if c in asc:
f = ascending
if c in desc:
f = descending
ordering = f(coalesce(
alias.bool_value,
alias.int_value,
alias.float_value,
alias.date_value,
alias.datetime_value,
alias.str_value))
self.query = self.query.order_by(ordering)
else:
if s.key in asc:
self.query = self.query.order_by(s.asc())
if s.key in desc:
self.query = self.query.order_by(s.desc())
return self
def __unicode__(self):
return unicode(self.query)
def __str__(self):
return str(self.query)
content_tag = HashableAssociation('content', 'tag')
asset_tag = HashableAssociation('asset', 'tag')
content_property = HashableAssociation('content', 'property')
asset_property = HashableAssociation('asset', 'property')
site_property = HashableAssociation('site', 'property')
| jessedhillon/roxy | roxy/model.py | Python | mit | 18,838 |
# -*- coding: utf-8 -*-
from mock import Mock, patch
from flask.ext.login import login_user
from feedback_test.unit.test_base import BaseTestCase
from feedback_test.unit.util import insert_a_user
from feedback.user.models import User
class TestLoginAuth(BaseTestCase):
render_template = True
def setUp(self):
super(TestLoginAuth, self).setUp()
self.email = '[email protected]'
insert_a_user(email=self.email)
def test_login_route(self):
'''
Test the login route works propertly
'''
request = self.client.get('/login')
self.assert200(request)
self.assert_template_used('user/login.html')
@patch('urllib2.urlopen')
def test_auth_persona_failure(self, urlopen):
'''
Test that we reject when persona throws bad statuses to us
'''
mock_open = Mock()
mock_open.read.side_effect = ['{"status": "error"}']
urlopen.return_value = mock_open
post = self.client.post('/auth', data=dict(
assertion='test'
))
self.assert403(post)
@patch('urllib2.urlopen')
def test_auth_no_user(self, urlopen):
'''
Test that we reject bad email addresses
'''
mock_open = Mock()
mock_open.read.side_effect = ['{"status": "okay", "email": "not_a_valid_email"}']
urlopen.return_value = mock_open
post = self.client.post('/auth', data=dict(
assertion='test'
))
self.assert403(post)
@patch('urllib2.urlopen')
def test_logout(self, urlopen):
'''
Test that we can logout properly
'''
login_user(User.query.all()[0])
logout = self.client.get('/logout', follow_redirects=True)
self.assertTrue('You are logged out' in logout.data)
self.assert_template_used('user/logout.html')
login_user(User.query.all()[0])
logout = self.client.post('/logout?persona=True', follow_redirects=True)
self.assertTrue(logout.data, 'OK')
| codeforamerica/mdc-feedback | feedback_test/unit/public/test_public.py | Python | mit | 2,042 |
"""Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import tkinter
from tkinter.constants import *
tk = tkinter.Tk()
frame = tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 67095 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
from tkinter import _fix
import _tkinter # If this fails your Python may not be configured for Tk
TclError = _tkinter.TclError
from tkinter.constants import *
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
def _flatten(seq):
"""Internal function."""
res = ()
for item in seq:
if isinstance(item, (tuple, list)):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if isinstance(cnfs, dict):
return cnfs
elif isinstance(cnfs, (type(None), str)):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError) as msg:
print("_cnfmerge: fallback due to:", msg)
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code='0'):
"""Internal function. Calling it will throw the exception SystemExit."""
raise SystemExit(code)
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, str):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use."""
if 'displayof' not in kw: kw['displayof'] = self._w
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if isinstance(data, str):
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if isinstance(func, str):
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if isinstance(item, int):
nv.append(str(item))
elif isinstance(item, str):
nv.append(('{%s}' if ' ' in item else '%s') % item)
else:
break
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.__func__
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_info()
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, str):
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def keys(self):
"""Return a list of all resource names of this widget."""
return map(lambda x: x[0][1:],
self.tk.split(self.tk.call(self._w, 'configure')))
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if isinstance(cnf, str) and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit as msg:
raise SystemExit(msg)
except:
self.widget._report_exception()
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return map(self._nametowidget, self.tk.call(args))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an appliation. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError("tk.h version (%s) doesn't match libtk.a version (%s)"
% (_tkinter.TK_VERSION, tk_version))
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError("tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version))
if TkVersion < 4.0:
raise RuntimeError("Tk 4.0 or higher is required; found Tk %s"
% str(TkVersion))
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls exec on the contents of BASENAME.py and
CLASSNAME.py if such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec('from tkinter import *', dir)
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
exec(open(class_py).read(), dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
exec(open(base_py).read(), dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa ([email protected])
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = [(k, v) for k, v in cnf.items() if isinstance(k, type)]
for k, v in classes:
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in list(self.children.values()): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if isinstance(cnf, (dict, tuple)):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
def xview(self, *args):
"""Query and change horizontal position of the view."""
if not args:
return self._getdoubles(self.tk.call(self._w, 'xview'))
self.tk.call((self._w, 'xview') + args)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
def yview(self, *args):
"""Query and change vertical position of the view."""
if not args:
return self._getdoubles(self.tk.call(self._w, 'yview'))
self.tk.call((self._w, 'yview') + args)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return whether the widget has the selection."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
def xview(self, index):
"""Query and change horizontal position of the view."""
self.tk.call(self._w, 'xview', index)
def xview_moveto(self, fraction):
"""Adjust the view in the window so that FRACTION of the
total width of the entry is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def xview(self, *what):
"""Query and change horizontal position of the view."""
if not what:
return self._getdoubles(self.tk.call(self._w, 'xview'))
self.tk.call((self._w, 'xview') + what)
def xview_moveto(self, fraction):
"""Adjust the view in the window so that FRACTION of the
total width of the entry is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
def yview(self, *what):
"""Query and change vertical position of the view."""
if not what:
return self._getdoubles(self.tk.call(self._w, 'yview'))
self.tk.call((self._w, 'yview') + what)
def yview_moveto(self, fraction):
"""Adjust the view in the window so that FRACTION of the
total width of the entry is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return self.tk.call(tuple(args))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def xview(self, *what):
"""Query and change horizontal position of the view."""
if not what:
return self._getdoubles(self.tk.call(self._w, 'xview'))
self.tk.call((self._w, 'xview') + what)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured
in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
def yview(self, *what):
"""Query and change vertical position of the view."""
if not what:
return self._getdoubles(self.tk.call(self._w, 'yview'))
self.tk.call((self._w, 'yview') + what)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured
in "units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError('unknown option -'+kwargs.keys()[0])
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError('Too early to create image')
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formated colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. Options may have any of the values
accepted by the configure subcommand.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, str) and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
text += "\nThis should be a cedilla: \xe7"
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.0/Lib/tkinter/__init__.py | Python | mit | 158,068 |
Subsets and Splits