prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>version_info_updater.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/login/version_info_updater.h"
#include <vector>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/chromeos/chromeos_version.h"
#include "base/string_util.h"
#include "base/stringprintf.h"
#include "base/utf_string_conversions.h"
#include "chrome/browser/browser_process.h"
#include "chrome/browser/chromeos/settings/cros_settings.h"
#include "chrome/browser/chromeos/settings/cros_settings_names.h"
#include "chrome/browser/policy/browser_policy_connector.h"
#include "chrome/browser/policy/device_cloud_policy_manager_chromeos.h"
#include "chrome/common/chrome_notification_types.h"<|fim▁hole|>#include "grit/chromium_strings.h"
#include "grit/generated_resources.h"
#include "grit/theme_resources.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/base/resource/resource_bundle.h"
namespace chromeos {
namespace {
const char* kReportingFlags[] = {
chromeos::kReportDeviceVersionInfo,
chromeos::kReportDeviceActivityTimes,
chromeos::kReportDeviceBootMode,
chromeos::kReportDeviceLocation,
};
}
///////////////////////////////////////////////////////////////////////////////
// VersionInfoUpdater public:
VersionInfoUpdater::VersionInfoUpdater(Delegate* delegate)
: cros_settings_(chromeos::CrosSettings::Get()),
delegate_(delegate),
ALLOW_THIS_IN_INITIALIZER_LIST(weak_pointer_factory_(this)) {
}
VersionInfoUpdater::~VersionInfoUpdater() {
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
g_browser_process->browser_policy_connector()->
GetDeviceCloudPolicyManager();
if (policy_manager)
policy_manager->core()->store()->RemoveObserver(this);
for (unsigned int i = 0; i < arraysize(kReportingFlags); ++i)
cros_settings_->RemoveSettingsObserver(kReportingFlags[i], this);
}
void VersionInfoUpdater::StartUpdate(bool is_official_build) {
if (base::chromeos::IsRunningOnChromeOS()) {
version_loader_.GetVersion(
is_official_build ? VersionLoader::VERSION_SHORT_WITH_DATE
: VersionLoader::VERSION_FULL,
base::Bind(&VersionInfoUpdater::OnVersion,
weak_pointer_factory_.GetWeakPtr()),
&tracker_);
boot_times_loader_.GetBootTimes(
base::Bind(is_official_build ? &VersionInfoUpdater::OnBootTimesNoop
: &VersionInfoUpdater::OnBootTimes,
weak_pointer_factory_.GetWeakPtr()),
&tracker_);
} else {
UpdateVersionLabel();
}
policy::CloudPolicySubsystem* cloud_policy =
g_browser_process->browser_policy_connector()->
device_cloud_policy_subsystem();
if (cloud_policy) {
// Two-step reset because we want to construct new ObserverRegistrar after
// destruction of old ObserverRegistrar to avoid DCHECK violation because
// of adding existing observer.
cloud_policy_registrar_.reset();
cloud_policy_registrar_.reset(
new policy::CloudPolicySubsystem::ObserverRegistrar(
cloud_policy, this));
// Ensure that we have up-to-date enterprise info in case enterprise policy
// is already fetched and has finished initialization.
UpdateEnterpriseInfo();
}
policy::DeviceCloudPolicyManagerChromeOS* policy_manager =
g_browser_process->browser_policy_connector()->
GetDeviceCloudPolicyManager();
if (policy_manager) {
policy_manager->core()->store()->AddObserver(this);
// Ensure that we have up-to-date enterprise info in case enterprise policy
// is already fetched and has finished initialization.
UpdateEnterpriseInfo();
}
// Watch for changes to the reporting flags.
for (unsigned int i = 0; i < arraysize(kReportingFlags); ++i)
cros_settings_->AddSettingsObserver(kReportingFlags[i], this);
}
void VersionInfoUpdater::UpdateVersionLabel() {
if (version_text_.empty())
return;
chrome::VersionInfo version_info;
std::string label_text = l10n_util::GetStringFUTF8(
IDS_LOGIN_VERSION_LABEL_FORMAT,
l10n_util::GetStringUTF16(IDS_PRODUCT_NAME),
UTF8ToUTF16(version_info.Version()),
UTF8ToUTF16(version_text_));
// Workaround over incorrect width calculation in old fonts.
// TODO(glotov): remove the following line when new fonts are used.
label_text += ' ';
if (delegate_)
delegate_->OnOSVersionLabelTextUpdated(label_text);
}
void VersionInfoUpdater::UpdateEnterpriseInfo() {
SetEnterpriseInfo(
g_browser_process->browser_policy_connector()->GetEnterpriseDomain());
}
void VersionInfoUpdater::SetEnterpriseInfo(const std::string& domain_name) {
if (domain_name != enterprise_domain_text_) {
enterprise_domain_text_ = domain_name;
UpdateVersionLabel();
// Update the notification about device status reporting.
if (delegate_) {
std::string enterprise_info;
if (!domain_name.empty()) {
enterprise_info = l10n_util::GetStringFUTF8(
IDS_DEVICE_OWNED_BY_NOTICE,
UTF8ToUTF16(domain_name));
delegate_->OnEnterpriseInfoUpdated(enterprise_info);
}
}
}
}
void VersionInfoUpdater::OnVersion(const std::string& version) {
version_text_ = version;
UpdateVersionLabel();
}
void VersionInfoUpdater::OnBootTimesNoop(
const BootTimesLoader::BootTimes& boot_times) {}
void VersionInfoUpdater::OnBootTimes(
const BootTimesLoader::BootTimes& boot_times) {
const char* kBootTimesNoChromeExec =
"Non-firmware boot took %.2f seconds (kernel %.2fs, system %.2fs)";
const char* kBootTimesChromeExec =
"Non-firmware boot took %.2f seconds "
"(kernel %.2fs, system %.2fs, chrome %.2fs)";
std::string boot_times_text;
if (boot_times.chrome > 0) {
boot_times_text =
base::StringPrintf(
kBootTimesChromeExec,
boot_times.total,
boot_times.pre_startup,
boot_times.system,
boot_times.chrome);
} else {
boot_times_text =
base::StringPrintf(
kBootTimesNoChromeExec,
boot_times.total,
boot_times.pre_startup,
boot_times.system);
}
// Use UTF8ToWide once this string is localized.
if (delegate_)
delegate_->OnBootTimesLabelTextUpdated(boot_times_text);
}
void VersionInfoUpdater::OnPolicyStateChanged(
policy::CloudPolicySubsystem::PolicySubsystemState state,
policy::CloudPolicySubsystem::ErrorDetails error_details) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::OnStoreLoaded(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::OnStoreError(policy::CloudPolicyStore* store) {
UpdateEnterpriseInfo();
}
void VersionInfoUpdater::Observe(
int type,
const content::NotificationSource& source,
const content::NotificationDetails& details) {
if (type == chrome::NOTIFICATION_SYSTEM_SETTING_CHANGED)
UpdateEnterpriseInfo();
else
NOTREACHED();
}
} // namespace chromeos<|fim▁end|> | #include "chrome/common/chrome_version_info.h" |
<|file_name|>impostazioni.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';
import { NavController, NavParams } from 'ionic-angular';
import { TavoliPage } from '../tavoli/tavoli';
import { PiattiPage } from '../piatti/piatti';
import { MenuPage } from '../menu/menu';
@Component({
selector: 'page-impostazioni',
templateUrl: 'impostazioni.html'
})
export class ImpostazioniPage {
constructor(public navCtrl: NavController, public navParams: NavParams) {}
tavoli() {
this.navCtrl.push(TavoliPage);
}
piatti() {
this.navCtrl.push(PiattiPage, { modal: false });
}
menu() {
this.navCtrl.push(MenuPage);
}<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>calculateTransform.js<|end_file_name|><|fim▁begin|>import { Transform } from './transform.js';
/**
* Calculate the transform for a Cornerstone enabled element
*
* @param {EnabledElement} enabledElement The Cornerstone Enabled Element<|fim▁hole|>
const transform = new Transform();
transform.translate(enabledElement.canvas.width / 2, enabledElement.canvas.height / 2);
// Apply the rotation before scaling for non square pixels
const angle = enabledElement.viewport.rotation;
if (angle !== 0) {
transform.rotate(angle * Math.PI / 180);
}
// Apply the scale
let widthScale = enabledElement.viewport.scale;
let heightScale = enabledElement.viewport.scale;
if (enabledElement.image.rowPixelSpacing < enabledElement.image.columnPixelSpacing) {
widthScale *= (enabledElement.image.columnPixelSpacing / enabledElement.image.rowPixelSpacing);
} else if (enabledElement.image.columnPixelSpacing < enabledElement.image.rowPixelSpacing) {
heightScale *= (enabledElement.image.rowPixelSpacing / enabledElement.image.columnPixelSpacing);
}
transform.scale(widthScale, heightScale);
// Unrotate to so we can translate unrotated
if (angle !== 0) {
transform.rotate(-angle * Math.PI / 180);
}
// Apply the pan offset
transform.translate(enabledElement.viewport.translation.x, enabledElement.viewport.translation.y);
// Rotate again so we can apply general scale
if (angle !== 0) {
transform.rotate(angle * Math.PI / 180);
}
if (scale !== undefined) {
// Apply the font scale
transform.scale(scale, scale);
}
// Apply Flip if required
if (enabledElement.viewport.hflip) {
transform.scale(-1, 1);
}
if (enabledElement.viewport.vflip) {
transform.scale(1, -1);
}
// Translate the origin back to the corner of the image so the event handlers can draw in image coordinate system
transform.translate(-enabledElement.image.width / 2, -enabledElement.image.height / 2);
return transform;
}<|fim▁end|> | * @param {Number} [scale] The viewport scale
* @return {Transform} The current transform
*/
export default function (enabledElement, scale) { |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Utilities and helper functions."""
import abc
import contextlib
import datetime
import functools
import hashlib
import inspect
import logging as py_logging
import os
import pyclbr
import random
import re
import shutil
import socket
import stat
import sys
import tempfile
import time
import types
from xml.dom import minidom
from xml.parsers import expat
from xml import sax
from xml.sax import expatreader
from xml.sax import saxutils
from os_brick.initiator import connector
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import encodeutils
from oslo_utils import importutils
from oslo_utils import strutils
from oslo_utils import timeutils
import retrying
import six
from cinder import exception
from cinder.i18n import _, _LE, _LW
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
ISO_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S"
PERFECT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f"
VALID_TRACE_FLAGS = {'method', 'api'}
TRACE_METHOD = False
TRACE_API = False
synchronized = lockutils.synchronized_with_prefix('cinder-')
def find_config(config_path):
"""Find a configuration file using the given hint.
:param config_path: Full or relative path to the config.
:returns: Full path of the config, if it exists.
:raises: `cinder.exception.ConfigNotFound`
"""
possible_locations = [
config_path,
os.path.join(CONF.state_path, "etc", "cinder", config_path),
os.path.join(CONF.state_path, "etc", config_path),
os.path.join(CONF.state_path, config_path),
"/etc/cinder/%s" % config_path,
]
for path in possible_locations:
if os.path.exists(path):
return os.path.abspath(path)
raise exception.ConfigNotFound(path=os.path.abspath(config_path))
def as_int(obj, quiet=True):
# Try "2" -> 2
try:
return int(obj)
except (ValueError, TypeError):
pass
# Try "2.5" -> 2
try:
return int(float(obj))
except (ValueError, TypeError):
pass
# Eck, not sure what this is then.
if not quiet:
raise TypeError(_("Can not translate %s to integer.") % (obj))
return obj
def is_int_like(val):
"""Check if a value looks like an int."""
try:
return str(int(val)) == str(val)
except Exception:
return False
def check_exclusive_options(**kwargs):
"""Checks that only one of the provided options is actually not-none.
Iterates over all the kwargs passed in and checks that only one of said
arguments is not-none, if more than one is not-none then an exception will
be raised with the names of those arguments who were not-none.
"""
if not kwargs:
return
pretty_keys = kwargs.pop("pretty_keys", True)
exclusive_options = {}
for (k, v) in kwargs.items():
if v is not None:
exclusive_options[k] = True
if len(exclusive_options) > 1:
# Change the format of the names from pythonic to
# something that is more readable.
#
# Ex: 'the_key' -> 'the key'
if pretty_keys:
names = [k.replace('_', ' ') for k in kwargs.keys()]
else:
names = kwargs.keys()
names = ", ".join(sorted(names))
msg = (_("May specify only one of %s") % (names))
raise exception.InvalidInput(reason=msg)
def execute(*cmd, **kwargs):
"""Convenience wrapper around oslo's execute() method."""
if 'run_as_root' in kwargs and 'root_helper' not in kwargs:
kwargs['root_helper'] = get_root_helper()
return processutils.execute(*cmd, **kwargs)
def check_ssh_injection(cmd_list):
ssh_injection_pattern = ['`', '$', '|', '||', ';', '&', '&&', '>', '>>',
'<']
# Check whether injection attacks exist
for arg in cmd_list:
arg = arg.strip()
# Check for matching quotes on the ends
is_quoted = re.match('^(?P<quote>[\'"])(?P<quoted>.*)(?P=quote)$', arg)
if is_quoted:
# Check for unescaped quotes within the quoted argument
quoted = is_quoted.group('quoted')
if quoted:
if (re.match('[\'"]', quoted) or
re.search('[^\\\\][\'"]', quoted)):
raise exception.SSHInjectionThreat(command=cmd_list)
else:
# We only allow spaces within quoted arguments, and that
# is the only special character allowed within quotes
if len(arg.split()) > 1:
raise exception.SSHInjectionThreat(command=cmd_list)
# Second, check whether danger character in command. So the shell
# special operator must be a single argument.
for c in ssh_injection_pattern:
if c not in arg:
continue
result = arg.find(c)
if not result == -1:
if result == 0 or not arg[result - 1] == '\\':
raise exception.SSHInjectionThreat(command=cmd_list)
def create_channel(client, width, height):
"""Invoke an interactive shell session on server."""
channel = client.invoke_shell()
channel.resize_pty(width, height)
return channel
def cinderdir():
import cinder
return os.path.abspath(cinder.__file__).split('cinder/__init__.py')[0]
def last_completed_audit_period(unit=None):
"""This method gives you the most recently *completed* audit period.
arguments:
units: string, one of 'hour', 'day', 'month', 'year'
Periods normally begin at the beginning (UTC) of the
period unit (So a 'day' period begins at midnight UTC,
a 'month' unit on the 1st, a 'year' on Jan, 1)
unit string may be appended with an optional offset
like so: 'day@18' This will begin the period at 18:00
UTC. 'month@15' starts a monthly period on the 15th,
and year@3 begins a yearly one on March 1st.
returns: 2 tuple of datetimes (begin, end)
The begin timestamp of this audit period is the same as the
end of the previous.
"""
if not unit:
unit = CONF.volume_usage_audit_period
offset = 0
if '@' in unit:
unit, offset = unit.split("@", 1)
offset = int(offset)
rightnow = timeutils.utcnow()
if unit not in ('month', 'day', 'year', 'hour'):
raise ValueError('Time period must be hour, day, month or year')
if unit == 'month':
if offset == 0:
offset = 1
end = datetime.datetime(day=offset,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
year = rightnow.year
if 1 >= rightnow.month:
year -= 1
month = 12 + (rightnow.month - 1)
else:
month = rightnow.month - 1
end = datetime.datetime(day=offset,
month=month,
year=year)
year = end.year
if 1 >= end.month:
year -= 1
month = 12 + (end.month - 1)
else:
month = end.month - 1
begin = datetime.datetime(day=offset, month=month, year=year)
elif unit == 'year':
if offset == 0:
offset = 1
end = datetime.datetime(day=1, month=offset, year=rightnow.year)
if end >= rightnow:
end = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 2)
else:
begin = datetime.datetime(day=1,
month=offset,
year=rightnow.year - 1)
elif unit == 'day':
end = datetime.datetime(hour=offset,
day=rightnow.day,
month=rightnow.month,
year=rightnow.year)
if end >= rightnow:
end = end - datetime.timedelta(days=1)
begin = end - datetime.timedelta(days=1)
elif unit == 'hour':
end = rightnow.replace(minute=offset, second=0, microsecond=0)
if end >= rightnow:
end = end - datetime.timedelta(hours=1)
begin = end - datetime.timedelta(hours=1)
return (begin, end)
def list_of_dicts_to_dict(seq, key):
"""Convert list of dicts to a indexted dict.
Takes a list of dicts, and converts it a nested dict
indexed by <key>
:param seq: list of dicts
:parm key: key in dicts to index by
example:
lst = [{'id': 1, ...}, {'id': 2, ...}...]
key = 'id'
returns {1:{'id': 1, ...}, 2:{'id':2, ...}
"""
return {d[key]: dict(d, index=d[key]) for (i, d) in enumerate(seq)}
class ProtectedExpatParser(expatreader.ExpatParser):
"""An expat parser which disables DTD's and entities by default."""
def __init__(self, forbid_dtd=True, forbid_entities=True,
*args, **kwargs):
# Python 2.x old style class
expatreader.ExpatParser.__init__(self, *args, **kwargs)
self.forbid_dtd = forbid_dtd
self.forbid_entities = forbid_entities
def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
raise ValueError("Inline DTD forbidden")
def entity_decl(self, entityName, is_parameter_entity, value, base,
systemId, publicId, notationName):
raise ValueError("<!ENTITY> forbidden")
def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
# expat 1.2
raise ValueError("<!ENTITY> forbidden")
def reset(self):
expatreader.ExpatParser.reset(self)
if self.forbid_dtd:
self._parser.StartDoctypeDeclHandler = self.start_doctype_decl
if self.forbid_entities:
self._parser.EntityDeclHandler = self.entity_decl
self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
def safe_minidom_parse_string(xml_string):
"""Parse an XML string using minidom safely.
"""
try:
return minidom.parseString(xml_string, parser=ProtectedExpatParser())
except sax.SAXParseException:
raise expat.ExpatError()
def xhtml_escape(value):
"""Escapes a string so it is valid within XML or XHTML."""
return saxutils.escape(value, {'"': '"', "'": '''})
def get_from_path(items, path):
"""Returns a list of items matching the specified path.
Takes an XPath-like expression e.g. prop1/prop2/prop3, and for each item
in items, looks up items[prop1][prop2][prop3]. Like XPath, if any of the
intermediate results are lists it will treat each list item individually.
A 'None' in items or any child expressions will be ignored, this function
will not throw because of None (anywhere) in items. The returned list
will contain no None values.
"""
if path is None:
raise exception.Error('Invalid mini_xpath')
(first_token, sep, remainder) = path.partition('/')
if first_token == '':
raise exception.Error('Invalid mini_xpath')
results = []
if items is None:
return results
if not isinstance(items, list):
# Wrap single objects in a list
items = [items]
for item in items:
if item is None:
continue
get_method = getattr(item, 'get', None)
if get_method is None:
continue
child = get_method(first_token)
if child is None:
continue
if isinstance(child, list):
# Flatten intermediate lists
for x in child:
results.append(x)
else:
results.append(child)
if not sep:
# No more tokens
return results
else:
return get_from_path(results, remainder)
def is_valid_boolstr(val):
"""Check if the provided string is a valid bool string or not."""
val = str(val).lower()
return (val == 'true' or val == 'false' or
val == 'yes' or val == 'no' or
val == 'y' or val == 'n' or
val == '1' or val == '0')
def is_none_string(val):
"""Check if a string represents a None value."""
if not isinstance(val, six.string_types):
return False
return val.lower() == 'none'
def monkey_patch():
"""Patches decorators for all functions in a specified module.
If the CONF.monkey_patch set as True,
this function patches a decorator
for all functions in specified modules.
You can set decorators for each modules
using CONF.monkey_patch_modules.
The format is "Module path:Decorator function".
Example: 'cinder.api.ec2.cloud:' \
cinder.openstack.common.notifier.api.notify_decorator'
Parameters of the decorator is as follows.
(See cinder.openstack.common.notifier.api.notify_decorator)
:param name: name of the function
:param function: object of the function
"""
# If CONF.monkey_patch is not True, this function do nothing.
if not CONF.monkey_patch:
return
# Get list of modules and decorators
for module_and_decorator in CONF.monkey_patch_modules:
module, decorator_name = module_and_decorator.split(':')
# import decorator function
decorator = importutils.import_class(decorator_name)
__import__(module)
# Retrieve module information using pyclbr
module_data = pyclbr.readmodule_ex(module)
for key in module_data.keys():
# set the decorator for the class methods
if isinstance(module_data[key], pyclbr.Class):
clz = importutils.import_class("%s.%s" % (module, key))
for method, func in inspect.getmembers(clz, inspect.ismethod):
setattr(
clz, method,
decorator("%s.%s.%s" % (module, key, method), func))
# set the decorator for the function
if isinstance(module_data[key], pyclbr.Function):
func = importutils.import_class("%s.%s" % (module, key))
setattr(sys.modules[module], key,
decorator("%s.%s" % (module, key), func))
def make_dev_path(dev, partition=None, base='/dev'):
"""Return a path to a particular device.
>>> make_dev_path('xvdc')
/dev/xvdc
>>> make_dev_path('xvdc', 1)
/dev/xvdc1
"""
path = os.path.join(base, dev)
if partition:
path += str(partition)
return path
def sanitize_hostname(hostname):
"""Return a hostname which conforms to RFC-952 and RFC-1123 specs."""
if six.PY3:
hostname = hostname.encode('latin-1', 'ignore')
hostname = hostname.decode('latin-1')
else:
if isinstance(hostname, six.text_type):
hostname = hostname.encode('latin-1', 'ignore')
hostname = re.sub('[ _]', '-', hostname)
hostname = re.sub('[^\w.-]+', '', hostname)
hostname = hostname.lower()
hostname = hostname.strip('.-')
return hostname
def hash_file(file_like_object):
"""Generate a hash for the contents of a file."""
checksum = hashlib.sha1()
any(map(checksum.update, iter(lambda: file_like_object.read(32768), b'')))
return checksum.hexdigest()
def service_is_up(service):
"""Check whether a service is up based on last heartbeat."""
last_heartbeat = service['updated_at'] or service['created_at']
# Timestamps in DB are UTC.
elapsed = (timeutils.utcnow(with_timezone=True) -
last_heartbeat).total_seconds()
return abs(elapsed) <= CONF.service_down_time
def read_file_as_root(file_path):
"""Secure helper to read file as root."""
try:
out, _err = execute('cat', file_path, run_as_root=True)
return out
except processutils.ProcessExecutionError:
raise exception.FileNotFound(file_path=file_path)
@contextlib.contextmanager
def temporary_chown(path, owner_uid=None):
"""Temporarily chown a path.
:params owner_uid: UID of temporary owner (defaults to current user)<|fim▁hole|> """
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
execute('chown', owner_uid, path, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
execute('chown', orig_uid, path, run_as_root=True)
@contextlib.contextmanager
def tempdir(**kwargs):
tmpdir = tempfile.mkdtemp(**kwargs)
try:
yield tmpdir
finally:
try:
shutil.rmtree(tmpdir)
except OSError as e:
LOG.debug('Could not remove tmpdir: %s',
six.text_type(e))
def walk_class_hierarchy(clazz, encountered=None):
"""Walk class hierarchy, yielding most derived classes first."""
if not encountered:
encountered = []
for subclass in clazz.__subclasses__():
if subclass not in encountered:
encountered.append(subclass)
# drill down to leaves first
for subsubclass in walk_class_hierarchy(subclass, encountered):
yield subsubclass
yield subclass
def get_root_helper():
return 'sudo cinder-rootwrap %s' % CONF.rootwrap_config
def brick_get_connector_properties(multipath=False, enforce_multipath=False):
"""Wrapper to automatically set root_helper in brick calls.
:param multipath: A boolean indicating whether the connector can
support multipath.
:param enforce_multipath: If True, it raises exception when multipath=True
is specified but multipathd is not running.
If False, it falls back to multipath=False
when multipathd is not running.
"""
root_helper = get_root_helper()
return connector.get_connector_properties(root_helper,
CONF.my_ip,
multipath,
enforce_multipath)
def brick_get_connector(protocol, driver=None,
execute=processutils.execute,
use_multipath=False,
device_scan_attempts=3,
*args, **kwargs):
"""Wrapper to get a brick connector object.
This automatically populates the required protocol as well
as the root_helper needed to execute commands.
"""
root_helper = get_root_helper()
return connector.InitiatorConnector.factory(protocol, root_helper,
driver=driver,
execute=execute,
use_multipath=use_multipath,
device_scan_attempts=
device_scan_attempts,
*args, **kwargs)
def require_driver_initialized(driver):
"""Verifies if `driver` is initialized
If the driver is not initialized, an exception will be raised.
:params driver: The driver instance.
:raises: `exception.DriverNotInitialized`
"""
# we can't do anything if the driver didn't init
if not driver.initialized:
driver_name = driver.__class__.__name__
LOG.error(_LE("Volume driver %s not initialized"), driver_name)
raise exception.DriverNotInitialized()
def get_file_mode(path):
"""This primarily exists to make unit testing easier."""
return stat.S_IMODE(os.stat(path).st_mode)
def get_file_gid(path):
"""This primarily exists to make unit testing easier."""
return os.stat(path).st_gid
def get_file_size(path):
"""Returns the file size."""
return os.stat(path).st_size
def _get_disk_of_partition(devpath, st=None):
"""Gets a disk device path and status from partition path.
Returns a disk device path from a partition device path, and stat for
the device. If devpath is not a partition, devpath is returned as it is.
For example, '/dev/sda' is returned for '/dev/sda1', and '/dev/disk1' is
for '/dev/disk1p1' ('p' is prepended to the partition number if the disk
name ends with numbers).
"""
diskpath = re.sub('(?:(?<=\d)p)?\d+$', '', devpath)
if diskpath != devpath:
try:
st_disk = os.stat(diskpath)
if stat.S_ISBLK(st_disk.st_mode):
return (diskpath, st_disk)
except OSError:
pass
# devpath is not a partition
if st is None:
st = os.stat(devpath)
return (devpath, st)
def get_bool_param(param_string, params):
param = params.get(param_string, False)
if not is_valid_boolstr(param):
msg = _('Value %(param)s for %(param_string)s is not a '
'boolean.') % {'param': param, 'param_string': param_string}
raise exception.InvalidParameterValue(err=msg)
return strutils.bool_from_string(param, strict=True)
def get_blkdev_major_minor(path, lookup_for_file=True):
"""Get 'major:minor' number of block device.
Get the device's 'major:minor' number of a block device to control
I/O ratelimit of the specified path.
If lookup_for_file is True and the path is a regular file, lookup a disk
device which the file lies on and returns the result for the device.
"""
st = os.stat(path)
if stat.S_ISBLK(st.st_mode):
path, st = _get_disk_of_partition(path, st)
return '%d:%d' % (os.major(st.st_rdev), os.minor(st.st_rdev))
elif stat.S_ISCHR(st.st_mode):
# No I/O ratelimit control is provided for character devices
return None
elif lookup_for_file:
# lookup the mounted disk which the file lies on
out, _err = execute('df', path)
devpath = out.split("\n")[1].split()[0]
if devpath[0] is not '/':
# the file is on a network file system
return None
return get_blkdev_major_minor(devpath, False)
else:
msg = _("Unable to get a block device for file \'%s\'") % path
raise exception.Error(msg)
def check_string_length(value, name, min_length=0, max_length=None):
"""Check the length of specified string.
:param value: the value of the string
:param name: the name of the string
:param min_length: the min_length of the string
:param max_length: the max_length of the string
"""
if not isinstance(value, six.string_types):
msg = _("%s is not a string or unicode") % name
raise exception.InvalidInput(message=msg)
if len(value) < min_length:
msg = _("%(name)s has a minimum character requirement of "
"%(min_length)s.") % {'name': name, 'min_length': min_length}
raise exception.InvalidInput(message=msg)
if max_length and len(value) > max_length:
msg = _("%(name)s has more than %(max_length)s "
"characters.") % {'name': name, 'max_length': max_length}
raise exception.InvalidInput(message=msg)
_visible_admin_metadata_keys = ['readonly', 'attached_mode']
def add_visible_admin_metadata(volume):
"""Add user-visible admin metadata to regular metadata.
Extracts the admin metadata keys that are to be made visible to
non-administrators, and adds them to the regular metadata structure for the
passed-in volume.
"""
visible_admin_meta = {}
if volume.get('volume_admin_metadata'):
if isinstance(volume['volume_admin_metadata'], dict):
volume_admin_metadata = volume['volume_admin_metadata']
for key in volume_admin_metadata:
if key in _visible_admin_metadata_keys:
visible_admin_meta[key] = volume_admin_metadata[key]
else:
for item in volume['volume_admin_metadata']:
if item['key'] in _visible_admin_metadata_keys:
visible_admin_meta[item['key']] = item['value']
# avoid circular ref when volume is a Volume instance
elif (volume.get('admin_metadata') and
isinstance(volume.get('admin_metadata'), dict)):
for key in _visible_admin_metadata_keys:
if key in volume['admin_metadata'].keys():
visible_admin_meta[key] = volume['admin_metadata'][key]
if not visible_admin_meta:
return
# NOTE(zhiyan): update visible administration metadata to
# volume metadata, administration metadata will rewrite existing key.
if volume.get('volume_metadata'):
orig_meta = list(volume.get('volume_metadata'))
for item in orig_meta:
if item['key'] in visible_admin_meta.keys():
item['value'] = visible_admin_meta.pop(item['key'])
for key, value in visible_admin_meta.items():
orig_meta.append({'key': key, 'value': value})
volume['volume_metadata'] = orig_meta
# avoid circular ref when vol is a Volume instance
elif (volume.get('metadata') and
isinstance(volume.get('metadata'), dict)):
volume['metadata'].update(visible_admin_meta)
else:
volume['metadata'] = visible_admin_meta
def remove_invalid_filter_options(context, filters,
allowed_search_options):
"""Remove search options that are not valid for non-admin API/context."""
if context.is_admin:
# Allow all options
return
# Otherwise, strip out all unknown options
unknown_options = [opt for opt in filters
if opt not in allowed_search_options]
bad_options = ", ".join(unknown_options)
LOG.debug("Removing options '%s' from query.", bad_options)
for opt in unknown_options:
del filters[opt]
def is_blk_device(dev):
try:
if stat.S_ISBLK(os.stat(dev).st_mode):
return True
return False
except Exception:
LOG.debug('Path %s not found in is_blk_device check', dev)
return False
def retry(exceptions, interval=1, retries=3, backoff_rate=2,
wait_random=False):
def _retry_on_exception(e):
return isinstance(e, exceptions)
def _backoff_sleep(previous_attempt_number, delay_since_first_attempt_ms):
exp = backoff_rate ** previous_attempt_number
wait_for = interval * exp
if wait_random:
random.seed()
wait_val = random.randrange(interval * 1000.0, wait_for * 1000.0)
else:
wait_val = wait_for * 1000.0
LOG.debug("Sleeping for %s seconds", (wait_val / 1000.0))
return wait_val
def _print_stop(previous_attempt_number, delay_since_first_attempt_ms):
delay_since_first_attempt = delay_since_first_attempt_ms / 1000.0
LOG.debug("Failed attempt %s", previous_attempt_number)
LOG.debug("Have been at this for %s seconds",
delay_since_first_attempt)
return previous_attempt_number == retries
if retries < 1:
raise ValueError('Retries must be greater than or '
'equal to 1 (received: %s). ' % retries)
def _decorator(f):
@six.wraps(f)
def _wrapper(*args, **kwargs):
r = retrying.Retrying(retry_on_exception=_retry_on_exception,
wait_func=_backoff_sleep,
stop_func=_print_stop)
return r.call(f, *args, **kwargs)
return _wrapper
return _decorator
def convert_version_to_int(version):
try:
if isinstance(version, six.string_types):
version = convert_version_to_tuple(version)
if isinstance(version, tuple):
return six.moves.reduce(lambda x, y: (x * 1000) + y, version)
except Exception:
msg = _("Version %s is invalid.") % version
raise exception.CinderException(msg)
def convert_version_to_str(version_int):
version_numbers = []
factor = 1000
while version_int != 0:
version_number = version_int - (version_int // factor * factor)
version_numbers.insert(0, six.text_type(version_number))
version_int = version_int // factor
return '.'.join(map(str, version_numbers))
def convert_version_to_tuple(version_str):
return tuple(int(part) for part in version_str.split('.'))
def convert_str(text):
"""Convert to native string.
Convert bytes and Unicode strings to native strings:
* convert to bytes on Python 2:
encode Unicode using encodeutils.safe_encode()
* convert to Unicode on Python 3: decode bytes from UTF-8
"""
if six.PY2:
return encodeutils.safe_encode(text)
else:
if isinstance(text, bytes):
return text.decode('utf-8')
else:
return text
def trace_method(f):
"""Decorates a function if TRACE_METHOD is true."""
@functools.wraps(f)
def trace_method_logging_wrapper(*args, **kwargs):
if TRACE_METHOD:
return trace(f)(*args, **kwargs)
return f(*args, **kwargs)
return trace_method_logging_wrapper
def trace_api(f):
"""Decorates a function if TRACE_API is true."""
@functools.wraps(f)
def trace_api_logging_wrapper(*args, **kwargs):
if TRACE_API:
return trace(f)(*args, **kwargs)
return f(*args, **kwargs)
return trace_api_logging_wrapper
def trace(f):
"""Trace calls to the decorated function.
This decorator should always be defined as the outermost decorator so it
is defined last. This is important so it does not interfere
with other decorators.
Using this decorator on a function will cause its execution to be logged at
`DEBUG` level with arguments, return values, and exceptions.
:returns a function decorator
"""
func_name = f.__name__
@functools.wraps(f)
def trace_logging_wrapper(*args, **kwargs):
if len(args) > 0:
maybe_self = args[0]
else:
maybe_self = kwargs.get('self', None)
if maybe_self and hasattr(maybe_self, '__module__'):
logger = logging.getLogger(maybe_self.__module__)
else:
logger = LOG
# NOTE(ameade): Don't bother going any further if DEBUG log level
# is not enabled for the logger.
if not logger.isEnabledFor(py_logging.DEBUG):
return f(*args, **kwargs)
all_args = inspect.getcallargs(f, *args, **kwargs)
logger.debug('==> %(func)s: call %(all_args)r',
{'func': func_name, 'all_args': all_args})
start_time = time.time() * 1000
try:
result = f(*args, **kwargs)
except Exception as exc:
total_time = int(round(time.time() * 1000)) - start_time
logger.debug('<== %(func)s: exception (%(time)dms) %(exc)r',
{'func': func_name,
'time': total_time,
'exc': exc})
raise
total_time = int(round(time.time() * 1000)) - start_time
logger.debug('<== %(func)s: return (%(time)dms) %(result)r',
{'func': func_name,
'time': total_time,
'result': result})
return result
return trace_logging_wrapper
class TraceWrapperMetaclass(type):
"""Metaclass that wraps all methods of a class with trace_method.
This metaclass will cause every function inside of the class to be
decorated with the trace_method decorator.
To use the metaclass you define a class like so:
@six.add_metaclass(utils.TraceWrapperMetaclass)
class MyClass(object):
"""
def __new__(meta, classname, bases, classDict):
newClassDict = {}
for attributeName, attribute in classDict.items():
if isinstance(attribute, types.FunctionType):
# replace it with a wrapped version
attribute = functools.update_wrapper(trace_method(attribute),
attribute)
newClassDict[attributeName] = attribute
return type.__new__(meta, classname, bases, newClassDict)
class TraceWrapperWithABCMetaclass(abc.ABCMeta, TraceWrapperMetaclass):
"""Metaclass that wraps all methods of a class with trace."""
pass
def setup_tracing(trace_flags):
"""Set global variables for each trace flag.
Sets variables TRACE_METHOD and TRACE_API, which represent
whether to log method and api traces.
:param trace_flags: a list of strings
"""
global TRACE_METHOD
global TRACE_API
try:
trace_flags = [flag.strip() for flag in trace_flags]
except TypeError: # Handle when trace_flags is None or a test mock
trace_flags = []
for invalid_flag in (set(trace_flags) - VALID_TRACE_FLAGS):
LOG.warning(_LW('Invalid trace flag: %s'), invalid_flag)
TRACE_METHOD = 'method' in trace_flags
TRACE_API = 'api' in trace_flags
def resolve_hostname(hostname):
"""Resolves host name to IP address.
Resolves a host name (my.data.point.com) to an IP address (10.12.143.11).
This routine also works if the data passed in hostname is already an IP.
In this case, the same IP address will be returned.
:param hostname: Host name to resolve.
:return: IP Address for Host name.
"""
result = socket.getaddrinfo(hostname, None)[0]
(family, socktype, proto, canonname, sockaddr) = result
LOG.debug('Asked to resolve hostname %(host)s and got IP %(ip)s.',
{'host': hostname, 'ip': sockaddr[0]})
return sockaddr[0]<|fim▁end|> | |
<|file_name|>MS_ValidationError.java<|end_file_name|><|fim▁begin|>package lv.emes.libraries.utilities.validation;
/**
* Actions for error that occur in validation process.
*
* @author eMeS
* @version 1.2.
*/
public interface MS_ValidationError<T> {<|fim▁hole|>
/**
* Returns message of validation error using pre-defined method to form message.
* @return formatted message describing essence of this particular validation error.
*/
String getMessage();
Integer getNumber();
T getObject();
/**
* @param object an object to validate.
* @return reference to validation error itself.
*/
MS_ValidationError withObject(T object);
}<|fim▁end|> |
MS_ValidationError withErrorMessageFormingAction(IFuncFormValidationErrorMessage action); |
<|file_name|>regions-outlives-nominal-type-struct-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that a nominal type (like `Foo<'a>`) outlives `'b` if its
// arguments (like `'a`) outlive `'b`.
//
// Rule OutlivesNominalType from RFC 1214.
<|fim▁hole|>
mod variant_struct_type {
struct Foo<T> {
x: T
}
struct Bar<'a,'b> {
f: &'a Foo<&'b i32> //~ ERROR reference has a longer lifetime
}
}
fn main() { }<|fim▁end|> | // compile-pass
#![feature(rustc_attrs)]
#![allow(dead_code)] |
<|file_name|>0002_auto__add_field_app_created_at.py<|end_file_name|><|fim▁begin|># encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):<|fim▁hole|>
def backwards(self, orm):
# Deleting field 'App.created_at'
db.delete_column('mobile_apps_app', 'created_at')
models = {
'core.level': {
'Meta': {'ordering': "['order']", 'object_name': 'Level'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '45'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'order': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'mobile_apps.app': {
'Meta': {'object_name': 'App'},
'content_areas': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'content_areas'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Level']"}),
'cost': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'levels': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'levels'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Level']"}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['mobile_apps.Type']"})
},
'mobile_apps.type': {
'Meta': {'object_name': 'Type'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['mobile_apps']<|fim▁end|> |
# Adding field 'App.created_at'
db.add_column('mobile_apps_app', 'created_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True), keep_default=False) |
<|file_name|>Dbase31TableFixture.cpp<|end_file_name|><|fim▁begin|>//
// Created by Chris Richards on 28/04/2016.
//<|fim▁hole|>class Dbase31TableFixture : public ::testing::Test {
protected:
virtual void SetUp() {
dbf_table_ = DbfTablePtr(new DbfTable("/Users/chrisr/Development/ClionProjects/dbf2csv/dbf_tests/fixtures/dbase_31.dbf"));
}
virtual void TearDown() {
dbf_table_->close();
}
public:
Dbase31TableFixture() : Test() {
}
virtual ~Dbase31TableFixture() {
}
DbfTablePtr dbf_table_;
};
TEST_F(Dbase31TableFixture, good_check) {
EXPECT_TRUE(dbf_table_->good());
}
TEST_F(Dbase31TableFixture, has_memo_check) {
EXPECT_FALSE(dbf_table_->has_memo_file());
}<|fim▁end|> |
#include <gtest/gtest.h>
#include <DbfTable.h>
|
<|file_name|>profile_repository.js<|end_file_name|><|fim▁begin|>import AbstractRepository from "./abstract_repository"
export default class ProfileRepository extends AbstractRepository {
constructor(db) {
super(db);
}<|fim▁hole|> tableName() {
return "profiles";
}
initialColumns() {
return [
// Note the specific ordering, this is critical for the sql
// queries to work.
["email", "VARCHAR(100)"],
["zip_code", "VARCHAR(20)"],
["race_of_bees", "TEXT"],
["full_name", "TEXT"]
];
};
migratedColumns() {
return [];
};
}<|fim▁end|> | |
<|file_name|>16.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export { Notebook16 as default } from "../../"; |
<|file_name|>optimizable_comprehension.py<|end_file_name|><|fim▁begin|>"""
OptimizableComp finds whether a comprehension can be optimized.
"""
from pythran.analyses.identifiers import Identifiers
from pythran.passmanager import NodeAnalysis
class OptimizableComprehension(NodeAnalysis):<|fim▁hole|> self.result = set()
super(OptimizableComprehension, self).__init__(Identifiers)
def check_comprehension(self, iters):
targets = {gen.target.id for gen in iters}
optimizable = True
for it in iters:
ids = self.gather(Identifiers, it)
optimizable &= all(((ident == it.target.id) |
(ident not in targets)) for ident in ids)
return optimizable
def visit_ListComp(self, node):
if (self.check_comprehension(node.generators)):
self.result.add(node)
def visit_GeneratorExp(self, node):
if (self.check_comprehension(node.generators)):
self.result.add(node)<|fim▁end|> | """Find whether a comprehension can be optimized."""
def __init__(self): |
<|file_name|>train_multi.py<|end_file_name|><|fim▁begin|>from __future__ import division
import argparse
import multiprocessing
import numpy as np
import PIL
import chainer
import chainer.functions as F
import chainer.links as L
from chainer.optimizer_hooks import WeightDecay
from chainer import serializers
from chainer import training
from chainer.training import extensions
import chainermn
from chainercv.chainer_experimental.datasets.sliceable import TransformDataset
from chainercv.chainer_experimental.training.extensions import make_shift
from chainercv.links.model.fpn.misc import scale_img
from chainercv import transforms
from chainercv.datasets import coco_instance_segmentation_label_names
from chainercv.datasets import COCOInstanceSegmentationDataset
from chainercv.links import MaskRCNNFPNResNet101
from chainercv.links import MaskRCNNFPNResNet50
from chainercv.datasets import coco_bbox_label_names
from chainercv.datasets import COCOBboxDataset
from chainercv.links import FasterRCNNFPNResNet101
from chainercv.links import FasterRCNNFPNResNet50
from chainercv.links.model.fpn import bbox_head_loss_post
from chainercv.links.model.fpn import bbox_head_loss_pre
from chainercv.links.model.fpn import mask_head_loss_post
from chainercv.links.model.fpn import mask_head_loss_pre
from chainercv.links.model.fpn import rpn_loss
# https://docs.chainer.org/en/stable/tips.html#my-training-process-gets-stuck-when-using-multiprocessiterator
try:
import cv2
cv2.setNumThreads(0)
except ImportError:
pass
class TrainChain(chainer.Chain):
def __init__(self, model):
super(TrainChain, self).__init__()
with self.init_scope():
self.model = model
def forward(self, imgs, bboxes, labels, masks=None):
B = len(imgs)
pad_size = np.array(
[im.shape[1:] for im in imgs]).max(axis=0)
pad_size = (
np.ceil(
pad_size / self.model.stride) * self.model.stride).astype(int)
x = np.zeros(
(len(imgs), 3, pad_size[0], pad_size[1]), dtype=np.float32)
for i, img in enumerate(imgs):
_, H, W = img.shape
x[i, :, :H, :W] = img
x = self.xp.array(x)
bboxes = [self.xp.array(bbox) for bbox in bboxes]
labels = [self.xp.array(label) for label in labels]
sizes = [img.shape[1:] for img in imgs]
with chainer.using_config('train', False):
hs = self.model.extractor(x)
rpn_locs, rpn_confs = self.model.rpn(hs)
anchors = self.model.rpn.anchors(h.shape[2:] for h in hs)
rpn_loc_loss, rpn_conf_loss = rpn_loss(
rpn_locs, rpn_confs, anchors, sizes, bboxes)
rois, roi_indices = self.model.rpn.decode(
rpn_locs, rpn_confs, anchors, x.shape)
rois = self.xp.vstack([rois] + bboxes)
roi_indices = self.xp.hstack(
[roi_indices]
+ [self.xp.array((i,) * len(bbox))
for i, bbox in enumerate(bboxes)])
rois, roi_indices = self.model.bbox_head.distribute(rois, roi_indices)
rois, roi_indices, head_gt_locs, head_gt_labels = bbox_head_loss_pre(
rois, roi_indices, self.model.bbox_head.std, bboxes, labels)
head_locs, head_confs = self.model.bbox_head(hs, rois, roi_indices)
head_loc_loss, head_conf_loss = bbox_head_loss_post(
head_locs, head_confs,
roi_indices, head_gt_locs, head_gt_labels, B)
mask_loss = 0
if masks is not None:
# For reducing unnecessary CPU/GPU copy, `masks` is kept in CPU.
pad_masks = [
np.zeros(
(mask.shape[0], pad_size[0], pad_size[1]), dtype=np.bool)
for mask in masks]
for i, mask in enumerate(masks):
_, H, W = mask.shape
pad_masks[i][:, :H, :W] = mask
masks = pad_masks
mask_rois, mask_roi_indices, gt_segms, gt_mask_labels =\
mask_head_loss_pre(
rois, roi_indices, masks, bboxes,
head_gt_labels, self.model.mask_head.segm_size)
n_roi = sum([len(roi) for roi in mask_rois])
if n_roi > 0:
segms = self.model.mask_head(hs, mask_rois, mask_roi_indices)
mask_loss = mask_head_loss_post(
segms, mask_roi_indices, gt_segms, gt_mask_labels, B)
else:
# Compute dummy variables to complete the computational graph
mask_rois[0] = self.xp.array([[0, 0, 1, 1]], dtype=np.float32)
mask_roi_indices[0] = self.xp.array([0], dtype=np.int32)
segms = self.model.mask_head(hs, mask_rois, mask_roi_indices)
mask_loss = 0 * F.sum(segms)
loss = (rpn_loc_loss + rpn_conf_loss +
head_loc_loss + head_conf_loss + mask_loss)
chainer.reporter.report({
'loss': loss,
'loss/rpn/loc': rpn_loc_loss, 'loss/rpn/conf': rpn_conf_loss,
'loss/bbox_head/loc': head_loc_loss,
'loss/bbox_head/conf': head_conf_loss,
'loss/mask_head': mask_loss},
self)
return loss
class Transform(object):
def __init__(self, min_size, max_size, mean):
self.min_size = min_size
self.max_size = max_size
self.mean = mean
def __call__(self, in_data):
if len(in_data) == 4:
img, mask, label, bbox = in_data
else:
img, bbox, label = in_data
# Flipping
img, params = transforms.random_flip(
img, x_random=True, return_param=True)
x_flip = params['x_flip']
bbox = transforms.flip_bbox(
bbox, img.shape[1:], x_flip=x_flip)
# Scaling and mean subtraction
img, scale = scale_img(
img, self.min_size, self.max_size)
img -= self.mean
bbox = bbox * scale
if len(in_data) == 4:
mask = transforms.flip(mask, x_flip=x_flip)
mask = transforms.resize(
mask.astype(np.float32),
img.shape[1:],
interpolation=PIL.Image.NEAREST).astype(np.bool)
return img, bbox, label, mask
else:
return img, bbox, label
def converter(batch, device=None):
# do not send data to gpu (device is ignored)
return tuple(list(v) for v in zip(*batch))
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
'--model',
choices=('mask_rcnn_fpn_resnet50', 'mask_rcnn_fpn_resnet101',
'faster_rcnn_fpn_resnet50', 'faster_rcnn_fpn_resnet101'),
default='faster_rcnn_fpn_resnet50')
parser.add_argument('--batchsize', type=int, default=16)
parser.add_argument('--iteration', type=int, default=90000)
parser.add_argument('--step', type=int, nargs='*', default=[60000, 80000])
parser.add_argument('--out', default='result')
parser.add_argument('--resume')
args = parser.parse_args()
# https://docs.chainer.org/en/stable/chainermn/tutorial/tips_faqs.html#using-multiprocessiterator
if hasattr(multiprocessing, 'set_start_method'):
multiprocessing.set_start_method('forkserver')
p = multiprocessing.Process()
p.start()
p.join()
comm = chainermn.create_communicator('pure_nccl')
device = comm.intra_rank
if args.model == 'faster_rcnn_fpn_resnet50':
mode = 'bbox'
model = FasterRCNNFPNResNet50(
n_fg_class=len(coco_bbox_label_names),
pretrained_model='imagenet')
elif args.model == 'faster_rcnn_fpn_resnet101':
mode = 'bbox'
model = FasterRCNNFPNResNet101(
n_fg_class=len(coco_bbox_label_names),
pretrained_model='imagenet')
elif args.model == 'mask_rcnn_fpn_resnet50':
mode = 'instance_segmentation'
model = MaskRCNNFPNResNet50(
n_fg_class=len(coco_instance_segmentation_label_names),
pretrained_model='imagenet')
elif args.model == 'mask_rcnn_fpn_resnet101':
mode = 'instance_segmentation'
model = MaskRCNNFPNResNet101(
n_fg_class=len(coco_instance_segmentation_label_names),
pretrained_model='imagenet')
model.use_preset('evaluate')
train_chain = TrainChain(model)
chainer.cuda.get_device_from_id(device).use()
train_chain.to_gpu()
if mode == 'bbox':
train = TransformDataset(
COCOBboxDataset(year='2017', split='train'),
('img', 'bbox', 'label'),
Transform(800, 1333, model.extractor.mean))
elif mode == 'instance_segmentation':
train = TransformDataset(
COCOInstanceSegmentationDataset(split='train', return_bbox=True),
('img', 'bbox', 'label', 'mask'),
Transform(800, 1333, model.extractor.mean))
if comm.rank == 0:
indices = np.arange(len(train))
else:
indices = None
indices = chainermn.scatter_dataset(indices, comm, shuffle=True)
train = train.slice[indices]
train_iter = chainer.iterators.MultiprocessIterator(
train, args.batchsize // comm.size,
n_processes=args.batchsize // comm.size,
shared_mem=100 * 1000 * 1000 * 4)
optimizer = chainermn.create_multi_node_optimizer(
chainer.optimizers.MomentumSGD(), comm)
optimizer.setup(train_chain)
optimizer.add_hook(WeightDecay(0.0001))
model.extractor.base.conv1.disable_update()
model.extractor.base.res2.disable_update()
for link in model.links():
if isinstance(link, L.BatchNormalization):
link.disable_update()
n_iteration = args.iteration * 16 / args.batchsize
updater = training.updaters.StandardUpdater(
train_iter, optimizer, converter=converter, device=device)
trainer = training.Trainer(
updater, (n_iteration, 'iteration'), args.out)
@make_shift('lr')
def lr_schedule(trainer):
base_lr = 0.02 * args.batchsize / 16
warm_up_duration = 500
warm_up_rate = 1 / 3
iteration = trainer.updater.iteration
if iteration < warm_up_duration:
rate = warm_up_rate \
+ (1 - warm_up_rate) * iteration / warm_up_duration
else:
rate = 1
for step in args.step:
if iteration >= step * 16 / args.batchsize:
rate *= 0.1
return base_lr * rate
trainer.extend(lr_schedule)
if comm.rank == 0:
log_interval = 10, 'iteration'
trainer.extend(extensions.LogReport(trigger=log_interval))
trainer.extend(extensions.observe_lr(), trigger=log_interval)
trainer.extend(extensions.PrintReport(
['epoch', 'iteration', 'lr', 'main/loss',
'main/loss/rpn/loc', 'main/loss/rpn/conf',
'main/loss/bbox_head/loc', 'main/loss/bbox_head/conf',
'main/loss/mask_head'
]),
trigger=log_interval)
trainer.extend(extensions.ProgressBar(update_interval=10))
trainer.extend(extensions.snapshot(), trigger=(10000, 'iteration'))
trainer.extend(
extensions.snapshot_object(
model, 'model_iter_{.updater.iteration}'),
trigger=(n_iteration, 'iteration'))
if args.resume:<|fim▁hole|>
if __name__ == '__main__':
main()<|fim▁end|> | serializers.load_npz(args.resume, trainer, strict=False)
trainer.run() |
<|file_name|>markov.py<|end_file_name|><|fim▁begin|>import wordtools
import random
from forms.form import Form
class MarkovForm(Form):
<|fim▁hole|> self.limiter=0
def validate(self,tweet):
cleaned = wordtools.clean(tweet)
if wordtools.validate(cleaned) and len(cleaned)>=2:
return cleaned
else:
return None
def save(self,a):
a.insert(0,"")
a.append("")
for i in range(0,len(a)-1):
if not a[i] in self.data:
self.data[a[i]]={}
if a[i+1] in self.data[a[i]]:
self.data[a[i]][a[i+1]]+=1
else:
self.data[a[i]][a[i+1]]=1
def build(self):
self.limiter+=1
if self.limiter < 1000 or not self.limiter%300==0:
return None
s = ""
lastWord = ""
while True:
total = 0
for word in self.data[lastWord]:
total+=self.data[lastWord][word]
choice = random.randint(0,total-1)
total = 0
for word in self.data[lastWord]:
total+=self.data[lastWord][word]
if total>choice:
lastWord=word
s+=word+" "
break
if lastWord=="":
break
return s.lower()<|fim▁end|> | def __init__(self):
self.data={}
self.data[""]={} |
<|file_name|>mean.js<|end_file_name|><|fim▁begin|>export var meanDocs = {
name: 'mean',<|fim▁hole|> seealso: ['max', 'median', 'min', 'prod', 'std', 'sum', 'variance']
};<|fim▁end|> | category: 'Statistics',
syntax: ['mean(a, b, c, ...)', 'mean(A)', 'mean(A, dim)'],
description: 'Compute the arithmetic mean of a list of values.',
examples: ['mean(2, 3, 4, 1)', 'mean([2, 3, 4, 1])', 'mean([2, 5; 4, 3])', 'mean([2, 5; 4, 3], 1)', 'mean([2, 5; 4, 3], 2)', 'mean([1.0, 2.7, 3.2, 4.0])'], |
<|file_name|>de-BE.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
(function(global) {
global.ng = global.ng || {};
global.ng.common = global.ng.common || {};
global.ng.common.locales = global.ng.common.locales || {};
const u = undefined;
function plural(n) {
let i = Math.floor(Math.abs(n)), v = n.toString().replace(/^[^.]*\.?/, '').length;
if (i === 1 && v === 0) return 1;
return 5;
}
root.ng.common.locales['de-be'] = [
'de-BE',
[['AM', 'PM'], u, u],
u,
[
['S', 'M', 'D', 'M', 'D', 'F', 'S'], ['So.', 'Mo.', 'Di.', 'Mi.', 'Do.', 'Fr.', 'Sa.'],
['Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag'],
['So.', 'Mo.', 'Di.', 'Mi.', 'Do.', 'Fr.', 'Sa.']
],
[
['S', 'M', 'D', 'M', 'D', 'F', 'S'], ['So', 'Mo', 'Di', 'Mi', 'Do', 'Fr', 'Sa'],
['Sonntag', 'Montag', 'Dienstag', 'Mittwoch', 'Donnerstag', 'Freitag', 'Samstag'],
['So.', 'Mo.', 'Di.', 'Mi.', 'Do.', 'Fr.', 'Sa.']
],
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
[
'Jan.', 'Feb.', 'März', 'Apr.', 'Mai', 'Juni', 'Juli', 'Aug.', 'Sept.', 'Okt.', 'Nov.',
'Dez.'
],
[
'Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September',
'Oktober', 'November', 'Dezember'
]
],
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
['Jan', 'Feb', 'Mär', 'Apr', 'Mai', 'Jun', 'Jul', 'Aug', 'Sep', 'Okt', 'Nov', 'Dez'],
[
'Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September',
'Oktober', 'November', 'Dezember'
]
],
[['v. Chr.', 'n. Chr.'], u, u],
1,
[6, 0],
['dd.MM.yy', 'dd.MM.y', 'd. MMMM y', 'EEEE, d. MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'],
['{1}, {0}', u, '{1} \'um\' {0}', u],
[',', '.', ';', '%', '+', '-', 'E', '·', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0 %', '#,##0.00 ¤', '#E0'],
'€',
'Euro',
{
'ATS': ['öS'],
'AUD': ['AU$', '$'],
'BGM': ['BGK'],
'BGO': ['BGJ'],
'CUC': [u, 'Cub$'],
'DEM': ['DM'],
'FKP': [u, 'Fl£'],
'GNF': [u, 'F.G.'],
'KMF': [u, 'FC'],
'RON': [u, 'L'],
'RWF': [u, 'F.Rw'],
'SYP': [],
'THB': ['฿'],
'TWD': ['NT$'],
'XXX': [],
'ZMW': [u, 'K']
},
plural,
[
[
['Mitternacht', 'morgens', 'vorm.', 'mittags', 'nachm.', 'abends', 'nachts'], u,
['Mitternacht', 'morgens', 'vormittags', 'mittags', 'nachmittags', 'abends', 'nachts']
],
[<|fim▁hole|> ],
[
'00:00', ['05:00', '10:00'], ['10:00', '12:00'], ['12:00', '13:00'], ['13:00', '18:00'],
['18:00', '24:00'], ['00:00', '05:00']
]
]
];
})(typeof globalThis !== 'undefined' && globalThis || typeof global !== 'undefined' && global ||
typeof window !== 'undefined' && window);<|fim▁end|> | ['Mitternacht', 'Morgen', 'Vorm.', 'Mittag', 'Nachm.', 'Abend', 'Nacht'], u,
['Mitternacht', 'Morgen', 'Vormittag', 'Mittag', 'Nachmittag', 'Abend', 'Nacht'] |
<|file_name|>client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Internet Relay Chat (IRC) protocol client library.
This library is intended to encapsulate the IRC protocol in Python.
It provides an event-driven IRC client framework. It has
a fairly thorough support for the basic IRC protocol, CTCP, and DCC chat.
To best understand how to make an IRC client, the reader more
or less must understand the IRC specifications. They are available
here: [IRC specifications].
The main features of the IRC client framework are:
* Abstraction of the IRC protocol.
* Handles multiple simultaneous IRC server connections.
* Handles server PONGing transparently.
* Messages to the IRC server are done by calling methods on an IRC
connection object.
* Messages from an IRC server triggers events, which can be caught
by event handlers.
* Reading from and writing to IRC server sockets are normally done
by an internal select() loop, but the select()ing may be done by
an external main loop.
* Functions can be registered to execute at specified times by the
event-loop.
* Decodes CTCP tagging correctly (hopefully); I haven't seen any
other IRC client implementation that handles the CTCP
specification subtleties.
* A kind of simple, single-server, object-oriented IRC client class
that dispatches events to instance methods is included.
Current limitations:
* Data is not written asynchronously to the server, i.e. the write()
may block if the TCP buffers are stuffed.
* DCC file transfers are not supported.
* RFCs 2810, 2811, 2812, and 2813 have not been considered.
Notes:
* connection.quit() only sends QUIT to the server.
* ERROR from the server triggers the error event and the disconnect event.
* dropping of the connection triggers the disconnect event.
.. [IRC specifications] http://www.irchelp.org/irchelp/rfc/
"""
import bisect
import re
import select
import socket
import time
import struct
import logging
import threading
import abc
import collections
import functools
import itertools
import contextlib
import warnings
import jaraco.functools
from jaraco.functools import Throttler
from jaraco.stream import buffer
from more_itertools import consume, always_iterable, repeatfunc
from . import connection
from . import events
from . import features
from . import ctcp
from . import message
from . import schedule
log = logging.getLogger(__name__)
class IRCError(Exception):
"An IRC exception"
class InvalidCharacters(ValueError):
"Invalid characters were encountered in the message"
class MessageTooLong(ValueError):
"Message is too long"
class Connection(metaclass=abc.ABCMeta):
"""
Base class for IRC connections.
"""
transmit_encoding = 'utf-8'
"encoding used for transmission"
@abc.abstractproperty
def socket(self):
"The socket for this connection"
def __init__(self, reactor):
self.reactor = reactor
def encode(self, msg):
"""Encode a message for transmission."""
return msg.encode(self.transmit_encoding)
class ServerConnectionError(IRCError):
pass
class ServerNotConnectedError(ServerConnectionError):
pass
class ServerConnection(Connection):
"""
An IRC server connection.
ServerConnection objects are instantiated by calling the server
method on a Reactor object.
"""
buffer_class = buffer.DecodingLineBuffer
socket = None
connected = False
def __init__(self, reactor):
super().__init__(reactor)
self.features = features.FeatureSet()
# save the method args to allow for easier reconnection.
@jaraco.functools.save_method_args
def connect(
self,
server,
port,
nickname,
password=None,
username=None,
ircname=None,
connect_factory=connection.Factory(),
):
"""Connect/reconnect to a server.
Arguments:
* server - Server name
* port - Port number
* nickname - The nickname
* password - Password (if any)
* username - The username
* ircname - The IRC name ("realname")
* server_address - The remote host/port of the server
* connect_factory - A callable that takes the server address and
returns a connection (with a socket interface)
This function can be called to reconnect a closed connection.
Returns the ServerConnection object.
"""
log.debug(
"connect(server=%r, port=%r, nickname=%r, ...)", server, port, nickname
)<|fim▁hole|> self.disconnect("Changing servers")
self.buffer = self.buffer_class()
self.handlers = {}
self.real_server_name = ""
self.real_nickname = nickname
self.server = server
self.port = port
self.server_address = (server, port)
self.nickname = nickname
self.username = username or nickname
self.ircname = ircname or nickname
self.password = password
self.connect_factory = connect_factory
try:
self.socket = self.connect_factory(self.server_address)
except socket.error as ex:
raise ServerConnectionError("Couldn't connect to socket: %s" % ex)
self.connected = True
self.reactor._on_connect(self.socket)
# Log on...
if self.password:
self.pass_(self.password)
self.nick(self.nickname)
self.user(self.username, self.ircname)
return self
def reconnect(self):
"""
Reconnect with the last arguments passed to self.connect()
"""
self.connect(*self._saved_connect.args, **self._saved_connect.kwargs)
def close(self):
"""Close the connection.
This method closes the connection permanently; after it has
been called, the object is unusable.
"""
# Without this thread lock, there is a window during which
# select() can find a closed socket, leading to an EBADF error.
with self.reactor.mutex:
self.disconnect("Closing object")
self.reactor._remove_connection(self)
def get_server_name(self):
"""Get the (real) server name.
This method returns the (real) server name, or, more
specifically, what the server calls itself.
"""
return self.real_server_name or ""
def get_nickname(self):
"""Get the (real) nick name.
This method returns the (real) nickname. The library keeps
track of nick changes, so it might not be the nick name that
was passed to the connect() method.
"""
return self.real_nickname
@contextlib.contextmanager
def as_nick(self, name):
"""
Set the nick for the duration of the context.
"""
orig = self.get_nickname()
self.nick(name)
try:
yield orig
finally:
self.nick(orig)
def process_data(self):
"read and process input from self.socket"
try:
reader = getattr(self.socket, 'read', self.socket.recv)
new_data = reader(2**14)
except socket.error:
# The server hung up.
self.disconnect("Connection reset by peer")
return
if not new_data:
# Read nothing: connection must be down.
self.disconnect("Connection reset by peer")
return
self.buffer.feed(new_data)
# process each non-empty line after logging all lines
for line in self.buffer:
log.debug("FROM SERVER: %s", line)
if not line:
continue
self._process_line(line)
def _process_line(self, line):
event = Event("all_raw_messages", self.get_server_name(), None, [line])
self._handle_event(event)
grp = _rfc_1459_command_regexp.match(line).group
source = NickMask.from_group(grp("prefix"))
command = self._command_from_group(grp("command"))
arguments = message.Arguments.from_group(grp('argument'))
tags = message.Tag.from_group(grp('tags'))
if source and not self.real_server_name:
self.real_server_name = source
if command == "nick":
if source.nick == self.real_nickname:
self.real_nickname = arguments[0]
elif command == "welcome":
# Record the nickname in case the client changed nick
# in a nicknameinuse callback.
self.real_nickname = arguments[0]
elif command == "featurelist":
self.features.load(arguments)
handler = (
self._handle_message
if command in ["privmsg", "notice"]
else self._handle_other
)
handler(arguments, command, source, tags)
def _handle_message(self, arguments, command, source, tags):
target, msg = arguments[:2]
messages = ctcp.dequote(msg)
if command == "privmsg":
if is_channel(target):
command = "pubmsg"
else:
if is_channel(target):
command = "pubnotice"
else:
command = "privnotice"
for m in messages:
if isinstance(m, tuple):
if command in ["privmsg", "pubmsg"]:
command = "ctcp"
else:
command = "ctcpreply"
m = list(m)
log.debug(
"command: %s, source: %s, target: %s, " "arguments: %s, tags: %s",
command,
source,
target,
m,
tags,
)
event = Event(command, source, target, m, tags)
self._handle_event(event)
if command == "ctcp" and m[0] == "ACTION":
event = Event("action", source, target, m[1:], tags)
self._handle_event(event)
else:
log.debug(
"command: %s, source: %s, target: %s, " "arguments: %s, tags: %s",
command,
source,
target,
[m],
tags,
)
event = Event(command, source, target, [m], tags)
self._handle_event(event)
def _handle_other(self, arguments, command, source, tags):
target = None
if command == "quit":
arguments = [arguments[0]]
elif command == "ping":
target = arguments[0]
else:
target = arguments[0] if arguments else None
arguments = arguments[1:]
if command == "mode":
if not is_channel(target):
command = "umode"
log.debug(
"command: %s, source: %s, target: %s, " "arguments: %s, tags: %s",
command,
source,
target,
arguments,
tags,
)
event = Event(command, source, target, arguments, tags)
self._handle_event(event)
@staticmethod
def _command_from_group(group):
command = group.lower()
# Translate numerics into more readable strings.
return events.numeric.get(command, command)
def _handle_event(self, event):
"""[Internal]"""
self.reactor._handle_event(self, event)
if event.type in self.handlers:
for fn in self.handlers[event.type]:
fn(self, event)
def is_connected(self):
"""Return connection status.
Returns true if connected, otherwise false.
"""
return self.connected
def add_global_handler(self, *args):
"""Add global handler.
See documentation for IRC.add_global_handler.
"""
self.reactor.add_global_handler(*args)
def remove_global_handler(self, *args):
"""Remove global handler.
See documentation for IRC.remove_global_handler.
"""
self.reactor.remove_global_handler(*args)
def action(self, target, action):
"""Send a CTCP ACTION command."""
self.ctcp("ACTION", target, action)
def admin(self, server=""):
"""Send an ADMIN command."""
self.send_items('ADMIN', server)
def cap(self, subcommand, *args):
"""
Send a CAP command according to `the spec
<http://ircv3.atheme.org/specification/capability-negotiation-3.1>`_.
Arguments:
subcommand -- LS, LIST, REQ, ACK, CLEAR, END
args -- capabilities, if required for given subcommand
Example:
.cap('LS')
.cap('REQ', 'multi-prefix', 'sasl')
.cap('END')
"""
cap_subcommands = set('LS LIST REQ ACK NAK CLEAR END'.split())
client_subcommands = set(cap_subcommands) - {'NAK'}
assert subcommand in client_subcommands, "invalid subcommand"
def _multi_parameter(args):
"""
According to the spec::
If more than one capability is named, the RFC1459 designated
sentinel (:) for a multi-parameter argument must be present.
It's not obvious where the sentinel should be present or if it
must be omitted for a single parameter, so follow convention and
only include the sentinel prefixed to the first parameter if more
than one parameter is present.
"""
if len(args) > 1:
return (':' + args[0],) + args[1:]
return args
self.send_items('CAP', subcommand, *_multi_parameter(args))
def ctcp(self, ctcptype, target, parameter=""):
"""Send a CTCP command."""
ctcptype = ctcptype.upper()
tmpl = "\001{ctcptype} {parameter}\001" if parameter else "\001{ctcptype}\001"
self.privmsg(target, tmpl.format(**vars()))
def ctcp_reply(self, target, parameter):
"""Send a CTCP REPLY command."""
self.notice(target, "\001%s\001" % parameter)
def disconnect(self, message=""):
"""Hang up the connection.
Arguments:
message -- Quit message.
"""
try:
del self.connected
except AttributeError:
return
self.quit(message)
try:
self.socket.shutdown(socket.SHUT_WR)
self.socket.close()
except socket.error:
pass
del self.socket
self._handle_event(Event("disconnect", self.server, "", [message]))
def globops(self, text):
"""Send a GLOBOPS command."""
self.send_items('GLOBOPS', ':' + text)
def info(self, server=""):
"""Send an INFO command."""
self.send_items('INFO', server)
def invite(self, nick, channel):
"""Send an INVITE command."""
self.send_items('INVITE', nick, channel)
def ison(self, nicks):
"""Send an ISON command.
Arguments:
nicks -- List of nicks.
"""
self.send_items('ISON', *tuple(nicks))
def join(self, channel, key=""):
"""Send a JOIN command."""
self.send_items('JOIN', channel, key)
def kick(self, channel, nick, comment=""):
"""Send a KICK command."""
self.send_items('KICK', channel, nick, comment and ':' + comment)
def links(self, remote_server="", server_mask=""):
"""Send a LINKS command."""
self.send_items('LINKS', remote_server, server_mask)
def list(self, channels=None, server=""):
"""Send a LIST command."""
self.send_items('LIST', ','.join(always_iterable(channels)), server)
def lusers(self, server=""):
"""Send a LUSERS command."""
self.send_items('LUSERS', server)
def mode(self, target, command):
"""Send a MODE command."""
self.send_items('MODE', target, command)
def motd(self, server=""):
"""Send an MOTD command."""
self.send_items('MOTD', server)
def names(self, channels=None):
"""Send a NAMES command."""
self.send_items('NAMES', ','.join(always_iterable(channels)))
def nick(self, newnick):
"""Send a NICK command."""
self.send_items('NICK', newnick)
def notice(self, target, text):
"""Send a NOTICE command."""
# Should limit len(text) here!
self.send_items('NOTICE', target, ':' + text)
def oper(self, nick, password):
"""Send an OPER command."""
self.send_items('OPER', nick, password)
def part(self, channels, message=""):
"""Send a PART command."""
self.send_items('PART', ','.join(always_iterable(channels)), message)
def pass_(self, password):
"""Send a PASS command."""
self.send_items('PASS', password)
def ping(self, target, target2=""):
"""Send a PING command."""
self.send_items('PING', target, target2)
def pong(self, target, target2=""):
"""Send a PONG command."""
self.send_items('PONG', target, target2)
def privmsg(self, target, text):
"""Send a PRIVMSG command."""
self.send_items('PRIVMSG', target, ':' + text)
def privmsg_many(self, targets, text):
"""Send a PRIVMSG command to multiple targets."""
target = ','.join(targets)
return self.privmsg(target, text)
def quit(self, message=""):
"""Send a QUIT command."""
# Note that many IRC servers don't use your QUIT message
# unless you've been connected for at least 5 minutes!
self.send_items('QUIT', message and ':' + message)
def _prep_message(self, string):
# The string should not contain any carriage return other than the
# one added here.
if '\n' in string:
msg = "Carriage returns not allowed in privmsg(text)"
raise InvalidCharacters(msg)
bytes = self.encode(string) + b'\r\n'
# According to the RFC http://tools.ietf.org/html/rfc2812#page-6,
# clients should not transmit more than 512 bytes.
if len(bytes) > 512:
msg = "Messages limited to 512 bytes including CR/LF"
raise MessageTooLong(msg)
return bytes
def send_items(self, *items):
"""
Send all non-empty items, separated by spaces.
"""
self.send_raw(' '.join(filter(None, items)))
def send_raw(self, string):
"""Send raw string to the server.
The string will be padded with appropriate CR LF.
"""
if self.socket is None:
raise ServerNotConnectedError("Not connected.")
sender = getattr(self.socket, 'write', self.socket.send)
try:
sender(self._prep_message(string))
log.debug("TO SERVER: %s", string)
except socket.error:
# Ouch!
self.disconnect("Connection reset by peer.")
def squit(self, server, comment=""):
"""Send an SQUIT command."""
self.send_items('SQUIT', server, comment and ':' + comment)
def stats(self, statstype, server=""):
"""Send a STATS command."""
self.send_items('STATS', statstype, server)
def time(self, server=""):
"""Send a TIME command."""
self.send_items('TIME', server)
def topic(self, channel, new_topic=None):
"""Send a TOPIC command."""
self.send_items('TOPIC', channel, new_topic and ':' + new_topic)
def trace(self, target=""):
"""Send a TRACE command."""
self.send_items('TRACE', target)
def user(self, username, realname):
"""Send a USER command."""
cmd = 'USER {username} 0 * :{realname}'.format(**locals())
self.send_raw(cmd)
def userhost(self, nicks):
"""Send a USERHOST command."""
self.send_items('USERHOST', ",".join(nicks))
def users(self, server=""):
"""Send a USERS command."""
self.send_items('USERS', server)
def version(self, server=""):
"""Send a VERSION command."""
self.send_items('VERSION', server)
def wallops(self, text):
"""Send a WALLOPS command."""
self.send_items('WALLOPS', ':' + text)
def who(self, target="", op=""):
"""Send a WHO command."""
self.send_items('WHO', target, op and 'o')
def whois(self, targets):
"""Send a WHOIS command."""
self.send_items('WHOIS', ",".join(always_iterable(targets)))
def whowas(self, nick, max="", server=""):
"""Send a WHOWAS command."""
self.send_items('WHOWAS', nick, max, server)
def set_rate_limit(self, frequency):
"""
Set a `frequency` limit (messages per second) for this connection.
Any attempts to send faster than this rate will block.
"""
self.send_raw = Throttler(self.send_raw, frequency)
def set_keepalive(self, interval):
"""
Set a keepalive to occur every `interval` on this `ServerConnection`.
:param interval: `int` in seconds, or `datetime.timedelta`
"""
pinger = functools.partial(self.ping, 'keep-alive')
self.reactor.scheduler.execute_every(period=interval, func=pinger)
class PrioritizedHandler(collections.namedtuple('Base', ('priority', 'callback'))):
def __lt__(self, other):
"when sorting prioritized handlers, only use the priority"
return self.priority < other.priority
class Reactor:
"""
Processes events from one or more IRC server connections.
This class implements a reactor in the style of the `reactor pattern
<http://en.wikipedia.org/wiki/Reactor_pattern>`_.
When a Reactor object has been instantiated, it can be used to create
Connection objects that represent the IRC connections. The
responsibility of the reactor object is to provide an event-driven
framework for the connections and to keep the connections alive.
It runs a select loop to poll each connection's TCP socket and
hands over the sockets with incoming data for processing by the
corresponding connection.
The methods of most interest for an IRC client writer are server,
add_global_handler, remove_global_handler,
process_once, and process_forever.
This is functionally an event-loop which can either use it's own
internal polling loop, or tie into an external event-loop, by
having the external event-system periodically call `process_once`
on the instantiated reactor class. This will allow the reactor
to process any queued data and/or events.
Calling `process_forever` will hand off execution to the reactor's
internal event-loop, which will not return for the life of the
reactor.
Here is an example:
client = irc.client.Reactor()
server = client.server()
server.connect("irc.some.where", 6667, "my_nickname")
server.privmsg("a_nickname", "Hi there!")
client.process_forever()
This will connect to the IRC server irc.some.where on port 6667
using the nickname my_nickname and send the message "Hi there!"
to the nickname a_nickname.
The methods of this class are thread-safe; accesses to and modifications
of its internal lists of connections, handlers, and delayed commands
are guarded by a mutex.
"""
scheduler_class = schedule.DefaultScheduler
connection_class = ServerConnection
def __do_nothing(*args, **kwargs):
pass
def __init__(self, on_connect=__do_nothing, on_disconnect=__do_nothing):
"""Constructor for Reactor objects.
on_connect: optional callback invoked when a new connection
is made.
on_disconnect: optional callback invoked when a socket is
disconnected.
The arguments mainly exist to be able to use an external
main loop (for example Tkinter's or PyGTK's main app loop)
instead of calling the process_forever method.
An alternative is to just call ServerConnection.process_once()
once in a while.
"""
self._on_connect = on_connect
self._on_disconnect = on_disconnect
scheduler = self.scheduler_class()
assert isinstance(scheduler, schedule.IScheduler)
self.scheduler = scheduler
self.connections = []
self.handlers = {}
# Modifications to these shared lists and dict need to be thread-safe
self.mutex = threading.RLock()
self.add_global_handler("ping", _ping_ponger, -42)
def server(self):
"""Creates and returns a ServerConnection object."""
conn = self.connection_class(self)
with self.mutex:
self.connections.append(conn)
return conn
def process_data(self, sockets):
"""Called when there is more data to read on connection sockets.
Arguments:
sockets -- A list of socket objects.
See documentation for Reactor.__init__.
"""
with self.mutex:
log.log(logging.DEBUG - 2, "process_data()")
for sock, conn in itertools.product(sockets, self.connections):
if sock == conn.socket:
conn.process_data()
def process_timeout(self):
"""Called when a timeout notification is due.
See documentation for Reactor.__init__.
"""
with self.mutex:
self.scheduler.run_pending()
@property
def sockets(self):
with self.mutex:
return [
conn.socket
for conn in self.connections
if conn is not None and conn.socket is not None
]
def process_once(self, timeout=0):
"""Process data from connections once.
Arguments:
timeout -- How long the select() call should wait if no
data is available.
This method should be called periodically to check and process
incoming data, if there are any. If that seems boring, look
at the process_forever method.
"""
log.log(logging.DEBUG - 2, "process_once()")
sockets = self.sockets
if sockets:
in_, out, err = select.select(sockets, [], [], timeout)
self.process_data(in_)
else:
time.sleep(timeout)
self.process_timeout()
def process_forever(self, timeout=0.2):
"""Run an infinite loop, processing data from connections.
This method repeatedly calls process_once.
Arguments:
timeout -- Parameter to pass to process_once.
"""
# This loop should specifically *not* be mutex-locked.
# Otherwise no other thread would ever be able to change
# the shared state of a Reactor object running this function.
log.debug("process_forever(timeout=%s)", timeout)
one = functools.partial(self.process_once, timeout=timeout)
consume(repeatfunc(one))
def disconnect_all(self, message=""):
"""Disconnects all connections."""
with self.mutex:
for conn in self.connections:
conn.disconnect(message)
def add_global_handler(self, event, handler, priority=0):
"""Adds a global handler function for a specific event type.
Arguments:
event -- Event type (a string). Check the values of
numeric_events for possible event types.
handler -- Callback function taking 'connection' and 'event'
parameters.
priority -- A number (the lower number, the higher priority).
The handler function is called whenever the specified event is
triggered in any of the connections. See documentation for
the Event class.
The handler functions are called in priority order (lowest
number is highest priority). If a handler function returns
"NO MORE", no more handlers will be called.
"""
handler = PrioritizedHandler(priority, handler)
with self.mutex:
event_handlers = self.handlers.setdefault(event, [])
bisect.insort(event_handlers, handler)
def remove_global_handler(self, event, handler):
"""Removes a global handler function.
Arguments:
event -- Event type (a string).
handler -- Callback function.
Returns 1 on success, otherwise 0.
"""
with self.mutex:
if event not in self.handlers:
return 0
for h in self.handlers[event]:
if handler == h.callback:
self.handlers[event].remove(h)
return 1
def dcc(self, dcctype="chat"):
"""Creates and returns a DCCConnection object.
Arguments:
dcctype -- "chat" for DCC CHAT connections or "raw" for
DCC SEND (or other DCC types). If "chat",
incoming data will be split in newline-separated
chunks. If "raw", incoming data is not touched.
"""
with self.mutex:
conn = DCCConnection(self, dcctype)
self.connections.append(conn)
return conn
def _handle_event(self, connection, event):
"""
Handle an Event event incoming on ServerConnection connection.
"""
with self.mutex:
matching_handlers = sorted(
self.handlers.get("all_events", []) + self.handlers.get(event.type, [])
)
for handler in matching_handlers:
result = handler.callback(connection, event)
if result == "NO MORE":
return
def _remove_connection(self, connection):
"""[Internal]"""
with self.mutex:
self.connections.remove(connection)
self._on_disconnect(connection.socket)
_cmd_pat = (
"^(@(?P<tags>[^ ]*) )?(:(?P<prefix>[^ ]+) +)?"
"(?P<command>[^ ]+)( *(?P<argument> .+))?"
)
_rfc_1459_command_regexp = re.compile(_cmd_pat)
class DCCConnectionError(IRCError):
pass
class DCCConnection(Connection):
"""
A DCC (Direct Client Connection).
DCCConnection objects are instantiated by calling the dcc
method on a Reactor object.
"""
socket = None
connected = False
passive = False
peeraddress = None
peerport = None
def __init__(self, reactor, dcctype):
super().__init__(reactor)
self.dcctype = dcctype
def connect(self, address, port):
"""Connect/reconnect to a DCC peer.
Arguments:
address -- Host/IP address of the peer.
port -- The port number to connect to.
Returns the DCCConnection object.
"""
self.peeraddress = socket.gethostbyname(address)
self.peerport = port
self.buffer = buffer.LineBuffer()
self.handlers = {}
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.socket.connect((self.peeraddress, self.peerport))
except socket.error as x:
raise DCCConnectionError("Couldn't connect to socket: %s" % x)
self.connected = True
self.reactor._on_connect(self.socket)
return self
def listen(self, addr=None):
"""Wait for a connection/reconnection from a DCC peer.
Returns the DCCConnection object.
The local IP address and port are available as
self.localaddress and self.localport. After connection from a
peer, the peer address and port are available as
self.peeraddress and self.peerport.
"""
self.buffer = buffer.LineBuffer()
self.handlers = {}
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.passive = True
default_addr = socket.gethostbyname(socket.gethostname()), 0
try:
self.socket.bind(addr or default_addr)
self.localaddress, self.localport = self.socket.getsockname()
self.socket.listen(10)
except socket.error as x:
raise DCCConnectionError("Couldn't bind socket: %s" % x)
return self
def disconnect(self, message=""):
"""Hang up the connection and close the object.
Arguments:
message -- Quit message.
"""
try:
del self.connected
except AttributeError:
return
try:
self.socket.shutdown(socket.SHUT_WR)
self.socket.close()
except socket.error:
pass
del self.socket
self.reactor._handle_event(
self, Event("dcc_disconnect", self.peeraddress, "", [message])
)
self.reactor._remove_connection(self)
def process_data(self):
"""[Internal]"""
if self.passive and not self.connected:
conn, (self.peeraddress, self.peerport) = self.socket.accept()
self.socket.close()
self.socket = conn
self.connected = True
log.debug("DCC connection from %s:%d", self.peeraddress, self.peerport)
self.reactor._handle_event(
self, Event("dcc_connect", self.peeraddress, None, None)
)
return
try:
new_data = self.socket.recv(2**14)
except socket.error:
# The server hung up.
self.disconnect("Connection reset by peer")
return
if not new_data:
# Read nothing: connection must be down.
self.disconnect("Connection reset by peer")
return
if self.dcctype == "chat":
self.buffer.feed(new_data)
chunks = list(self.buffer)
if len(self.buffer) > 2**14:
# Bad peer! Naughty peer!
log.info(
"Received >16k from a peer without a newline; " "disconnecting."
)
self.disconnect()
return
else:
chunks = [new_data]
command = "dccmsg"
prefix = self.peeraddress
target = None
for chunk in chunks:
log.debug("FROM PEER: %s", chunk)
arguments = [chunk]
log.debug(
"command: %s, source: %s, target: %s, arguments: %s",
command,
prefix,
target,
arguments,
)
event = Event(command, prefix, target, arguments)
self.reactor._handle_event(self, event)
def privmsg(self, text):
"""
Send text to DCC peer.
The text will be padded with a newline if it's a DCC CHAT session.
"""
if self.dcctype == 'chat':
text += '\n'
return self.send_bytes(self.encode(text))
def send_bytes(self, bytes):
"""
Send data to DCC peer.
"""
try:
self.socket.send(bytes)
log.debug("TO PEER: %r\n", bytes)
except socket.error:
self.disconnect("Connection reset by peer.")
class SimpleIRCClient:
"""A simple single-server IRC client class.
This is an example of an object-oriented wrapper of the IRC
framework. A real IRC client can be made by subclassing this
class and adding appropriate methods.
The method on_join will be called when a "join" event is created
(which is done when the server sends a JOIN messsage/command),
on_privmsg will be called for "privmsg" events, and so on. The
handler methods get two arguments: the connection object (same as
self.connection) and the event object.
Functionally, any of the event names in `events.py` may be subscribed
to by prefixing them with `on_`, and creating a function of that
name in the child-class of `SimpleIRCClient`. When the event of
`event_name` is received, the appropriately named method will be
called (if it exists) by runtime class introspection.
See `_dispatcher()`, which takes the event name, postpends it to
`on_`, and then attemps to look up the class member function by
name and call it.
Instance attributes that can be used by sub classes:
reactor -- The Reactor instance.
connection -- The ServerConnection instance.
dcc_connections -- A list of DCCConnection instances.
"""
reactor_class = Reactor
def __init__(self):
self.reactor = self.reactor_class()
self.connection = self.reactor.server()
self.dcc_connections = []
self.reactor.add_global_handler("all_events", self._dispatcher, -10)
self.reactor.add_global_handler("dcc_disconnect", self._dcc_disconnect, -10)
def _dispatcher(self, connection, event):
"""
Dispatch events to on_<event.type> method, if present.
"""
log.debug("_dispatcher: %s", event.type)
def do_nothing(connection, event):
return None
method = getattr(self, "on_" + event.type, do_nothing)
method(connection, event)
def _dcc_disconnect(self, connection, event):
self.dcc_connections.remove(connection)
def connect(self, *args, **kwargs):
"""Connect using the underlying connection"""
self.connection.connect(*args, **kwargs)
def dcc(self, *args, **kwargs):
"""Create and associate a new DCCConnection object.
Use the returned object to listen for or connect to
a DCC peer.
"""
dcc = self.reactor.dcc(*args, **kwargs)
self.dcc_connections.append(dcc)
return dcc
def dcc_connect(self, address, port, dcctype="chat"):
"""Connect to a DCC peer.
Arguments:
address -- IP address of the peer.
port -- Port to connect to.
Returns a DCCConnection instance.
"""
warnings.warn("Use self.dcc(type).connect()", DeprecationWarning)
return self.dcc(dcctype).connect(address, port)
def dcc_listen(self, dcctype="chat"):
"""Listen for connections from a DCC peer.
Returns a DCCConnection instance.
"""
warnings.warn("Use self.dcc(type).listen()", DeprecationWarning)
return self.dcc(dcctype).listen()
def start(self):
"""Start the IRC client."""
self.reactor.process_forever()
class Event:
"""
An IRC event.
>>> print(Event('privmsg', '@somebody', '#channel'))
type: privmsg, source: @somebody, target: #channel, arguments: [], tags: []
"""
def __init__(self, type, source, target, arguments=None, tags=None):
"""
Initialize an Event.
Arguments:
type -- A string describing the event.
source -- The originator of the event (a nick mask or a server).
target -- The target of the event (a nick or a channel).
arguments -- Any event-specific arguments.
"""
self.type = type
self.source = source
self.target = target
if arguments is None:
arguments = []
self.arguments = arguments
if tags is None:
tags = []
self.tags = tags
def __str__(self):
tmpl = (
"type: {type}, "
"source: {source}, "
"target: {target}, "
"arguments: {arguments}, "
"tags: {tags}"
)
return tmpl.format(**vars(self))
def is_channel(string):
"""Check if a string is a channel name.
Returns true if the argument is a channel name, otherwise false.
"""
return string and string[0] in "#&+!"
def ip_numstr_to_quad(num):
"""
Convert an IP number as an integer given in ASCII
representation to an IP address string.
>>> ip_numstr_to_quad('3232235521')
'192.168.0.1'
>>> ip_numstr_to_quad(3232235521)
'192.168.0.1'
"""
packed = struct.pack('>L', int(num))
bytes = struct.unpack('BBBB', packed)
return ".".join(map(str, bytes))
def ip_quad_to_numstr(quad):
"""
Convert an IP address string (e.g. '192.168.0.1') to an IP
number as a base-10 integer given in ASCII representation.
>>> ip_quad_to_numstr('192.168.0.1')
'3232235521'
"""
bytes = map(int, quad.split("."))
packed = struct.pack('BBBB', *bytes)
return str(struct.unpack('>L', packed)[0])
class NickMask(str):
"""
A nickmask (the source of an Event)
>>> nm = NickMask('[email protected]')
>>> nm.nick
'pinky'
>>> nm.host
'example.com'
>>> nm.user
'username'
>>> isinstance(nm, str)
True
>>> nm = NickMask('красный[email protected]')
>>> isinstance(nm.nick, str)
True
Some messages omit the userhost. In that case, None is returned.
>>> nm = NickMask('irc.server.net')
>>> nm.nick
'irc.server.net'
>>> nm.userhost
>>> nm.host
>>> nm.user
"""
@classmethod
def from_params(cls, nick, user, host):
return cls('{nick}!{user}@{host}'.format(**vars()))
@property
def nick(self):
nick, sep, userhost = self.partition("!")
return nick
@property
def userhost(self):
nick, sep, userhost = self.partition("!")
return userhost or None
@property
def host(self):
nick, sep, userhost = self.partition("!")
user, sep, host = userhost.partition('@')
return host or None
@property
def user(self):
nick, sep, userhost = self.partition("!")
user, sep, host = userhost.partition('@')
return user or None
@classmethod
def from_group(cls, group):
return cls(group) if group else None
def _ping_ponger(connection, event):
"A global handler for the 'ping' event"
connection.pong(event.target)<|fim▁end|> |
if self.connected: |
<|file_name|>RegexToNFA.java<|end_file_name|><|fim▁begin|>package ch.unibe.scg.regex;
import static java.util.Collections.singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import ch.unibe.scg.regex.ParserProvider.Node;
import ch.unibe.scg.regex.ParserProvider.Node.Basic;
import ch.unibe.scg.regex.ParserProvider.Node.Group;
import ch.unibe.scg.regex.ParserProvider.Node.NonGreedyStar;
import ch.unibe.scg.regex.ParserProvider.Node.Optional;
import ch.unibe.scg.regex.ParserProvider.Node.Plus;
import ch.unibe.scg.regex.ParserProvider.Node.PositiveSet;
import ch.unibe.scg.regex.ParserProvider.Node.SetItem;
import ch.unibe.scg.regex.ParserProvider.Node.Simple;
import ch.unibe.scg.regex.ParserProvider.Node.Star;
import ch.unibe.scg.regex.ParserProvider.Node.Union;
import ch.unibe.scg.regex.TNFA.Builder;
import ch.unibe.scg.regex.Transition.Priority;
/**
* Not thread-safe! Use only from one thread at a time!
*
* @author nes
*/
class RegexToNFA {
final InputRangeCleanup inputRangeCleanup = new InputRangeCleanup();
TNFA convert(final Node node) {
Collection<InputRange> allInputRanges = new ArrayList<>();
allInputRanges.add(InputRange.ANY); // All regexes contain this implicitly.
findRanges(node, allInputRanges);
final Builder builder = Builder.make(allInputRanges);
builder.registerCaptureGroup(builder.captureGroupMaker.entireMatch);
final MiniAutomaton m =
makeInitialMiniAutomaton(builder, builder.captureGroupMaker.entireMatch);
final MiniAutomaton a = make(m, builder, node, builder.captureGroupMaker.entireMatch);
final State endTagger = builder.makeState();
builder.addEndTagTransition(a.finishing, endTagger, builder.captureGroupMaker.entireMatch,
Priority.NORMAL);
builder.setAsAccepting(endTagger);
return builder.build();
}
private void findRanges(Node n, Collection<InputRange> out) {
if (n instanceof Node.SetItem) {
out.add(((SetItem) n).inputRange);
}<|fim▁hole|> }
static class MiniAutomaton {
final Collection<State> finishing;
final Collection<State> initial;
MiniAutomaton(final Collection<State> initial, final Collection<State> finishing) {
if (initial.iterator().next() == null) {
assert false;
}
this.initial = initial;
this.finishing = finishing;
}
MiniAutomaton(final Collection<State> initial, final State finishing) {
this(initial, singleton(finishing));
}
@Override
public String toString() {
return "" + initial + " -> " + finishing;
}
}
MiniAutomaton make(final MiniAutomaton last, final Builder builder, final Node node,
CaptureGroup captureGroup) {
MiniAutomaton ret;
if (node instanceof Node.Any) {
ret = makeAny(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.Simple) {
ret = makeSimple(last, builder, (Node.Simple) node, captureGroup);
} else if (node instanceof Node.Optional) {
ret = makeOptional(last, builder, (Node.Optional) node, captureGroup);
} else if (node instanceof Node.NonGreedyStar) {
ret = makeNonGreedyStar(last, builder, (Node.NonGreedyStar) node, captureGroup);
} else if (node instanceof Node.Star) {
ret = makeStar(last, builder, (Star) node, captureGroup);
} else if (node instanceof Node.Plus) {
ret = makePlus(last, builder, (Node.Plus) node, captureGroup);
} else if (node instanceof Node.Group) {
ret = makeGroup(last, builder, (Node.Group) node, captureGroup);
} else if (node instanceof Node.Eos) {
ret = makeEos(last, builder);
} else if (node instanceof Node.Char) {
ret = makeChar(last, builder, (Node.Char) node);
} else if (node instanceof Node.PositiveSet) {
ret = makePositiveSet(last, builder, (Node.PositiveSet) node);
} else if (node instanceof Node.Union) {
ret = makeUnion(last, builder, (Node.Union) node, captureGroup);
} else {
throw new AssertionError("Unknown node type: " + node);
}
assert !ret.initial.contains(null);
assert !ret.finishing.contains(null);
return ret;
}
MiniAutomaton makeAny(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.ANY, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeChar(final MiniAutomaton last, final Builder b, final Node.Char character) {
final State a = b.makeState();
final MiniAutomaton ret = new MiniAutomaton(last.finishing, a);
b.addUntaggedTransition(character.inputRange, ret.initial, a);
return ret;
}
MiniAutomaton makeEos(final MiniAutomaton last, final Builder builder) {
final State a = builder.makeState();
builder.addUntaggedTransition(InputRange.EOS, last.finishing, a);
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeGroup(final MiniAutomaton last, final Builder builder, final Group group,
CaptureGroup parentCaptureGroup) {
final CaptureGroup cg = builder.makeCaptureGroup(parentCaptureGroup);
builder.registerCaptureGroup(cg);
final State startGroup = builder.makeState();
builder.addStartTagTransition(last.finishing, startGroup, cg, Priority.NORMAL);
final MiniAutomaton startGroupAutomaton = new MiniAutomaton(singleton(startGroup), singleton(startGroup));
final MiniAutomaton body = make(startGroupAutomaton, builder, group.body, cg);
final State endTag = builder.makeState();
builder.addEndTagTransition(body.finishing, endTag, cg, Priority.NORMAL);
return new MiniAutomaton(last.finishing, endTag);
}
MiniAutomaton makeInitialMiniAutomaton(final Builder builder, CaptureGroup entireMatch) {
final State init = builder.makeInitialState();
final State startTagger = builder.makeState();
builder.addStartTagTransition(singleton(init), startTagger, entireMatch, Priority.NORMAL);
return new MiniAutomaton(singleton(init), singleton(startTagger));
}
MiniAutomaton makeOptional(final MiniAutomaton last, final Builder builder,
final Optional optional, CaptureGroup captureGroup) {
final MiniAutomaton ma = make(last, builder, optional.elementary, captureGroup);
final List<State> f = new ArrayList<>(last.finishing);
f.addAll(ma.finishing);
return new MiniAutomaton(last.finishing, f);
}
MiniAutomaton makePlus(final MiniAutomaton last, final Builder builder, final Plus plus,
CaptureGroup captureGroup) {
final MiniAutomaton inner = make(last, builder, plus.elementary, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, out, Priority.LOW);
final MiniAutomaton ret = new MiniAutomaton(last.finishing, out);
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing,
inner.initial, Priority.NORMAL);
return ret;
}
MiniAutomaton makeUnion(MiniAutomaton last, Builder builder, Union union,
CaptureGroup captureGroup) {
MiniAutomaton left = make(last, builder, union.left, captureGroup);
MiniAutomaton right = make(last, builder, union.right, captureGroup);
Collection<State> out = singleton(builder.makeState());
builder.makeUntaggedEpsilonTransitionFromTo(left.finishing, out, Priority.NORMAL);
builder.makeUntaggedEpsilonTransitionFromTo(right.finishing, out, Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makePositiveSet(final MiniAutomaton last, final Builder builder,
final PositiveSet set) {
final List<SetItem> is = set.items;
final SortedSet<InputRange> ranges = new TreeSet<>();
for (final SetItem i : is) {
ranges.add(i.inputRange);
}
final List<InputRange> rangesList = new ArrayList<>(ranges);
final List<InputRange> cleanedRanges = inputRangeCleanup.cleanUp(rangesList);
final State a = builder.makeState();
for (InputRange range : cleanedRanges) {
builder.addUntaggedTransition(range, last.finishing, a);
}
return new MiniAutomaton(last.finishing, a);
}
MiniAutomaton makeSimple(final MiniAutomaton last, final Builder b, final Simple simple,
CaptureGroup captureGroup) {
final List<? extends Basic> bs = simple.basics;
MiniAutomaton lm = last;
for (final Basic e : bs) {
lm = make(lm, b, e, captureGroup);
}
return new MiniAutomaton(last.finishing, lm.finishing);
}
MiniAutomaton makeNonGreedyStar(MiniAutomaton last, Builder builder, NonGreedyStar nonGreedyStar,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(last.finishing, start);
final MiniAutomaton inner = make(innerLast, builder, nonGreedyStar.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.LOW);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.NORMAL);
return new MiniAutomaton(last.finishing, out);
}
MiniAutomaton makeStar(final MiniAutomaton last, final Builder builder, final Star star,
CaptureGroup captureGroup) {
// Make start state and connect.
State start = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(last.finishing, singleton(start), Priority.NORMAL);
// Make inner machine.
MiniAutomaton innerLast = new MiniAutomaton(singleton(start), start);
final MiniAutomaton inner = make(innerLast, builder, star.elementary, captureGroup);
// Connect inner machine back to start.
builder.makeUntaggedEpsilonTransitionFromTo(inner.finishing, singleton(start), Priority.NORMAL);
// Make and connect `out` state.
State out = builder.makeState();
builder.makeUntaggedEpsilonTransitionFromTo(singleton(start), singleton(out), Priority.LOW);
return new MiniAutomaton(last.finishing, out);
}
}<|fim▁end|> | for (Node c : n.getChildren()) {
findRanges(c, out);
} |
<|file_name|>providers_feature.ts<|end_file_name|><|fim▁begin|>/**
* @license<|fim▁hole|> * found in the LICENSE file at https://angular.io/license
*/
import {Provider} from '../../di/provider';
import {providersResolver} from '../di_setup';
import {DirectiveDef} from '../interfaces/definition';
/**
* This feature resolves the providers of a directive (or component),
* and publish them into the DI system, making it visible to others for injection.
*
* For example:
* class ComponentWithProviders {
* constructor(private greeter: GreeterDE) {}
*
* static ngComponentDef = defineComponent({
* type: ComponentWithProviders,
* selectors: [['component-with-providers']],
* factory: () => new ComponentWithProviders(directiveInject(GreeterDE as any)),
* consts: 1,
* vars: 1,
* template: function(fs: RenderFlags, ctx: ComponentWithProviders) {
* if (fs & RenderFlags.Create) {
* text(0);
* }
* if (fs & RenderFlags.Update) {
* textBinding(0, bind(ctx.greeter.greet()));
* }
* },
* features: [ProvidersFeature([GreeterDE])]
* });
* }
*
* @param definition
*/
export function ProvidersFeature<T>(providers: Provider[], viewProviders: Provider[] = []) {
return (definition: DirectiveDef<T>) => {
definition.providersResolver = (def: DirectiveDef<T>) =>
providersResolver(def, providers, viewProviders);
};
}<|fim▁end|> | * Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be |
<|file_name|>box.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% from data import Keyword, Method, to_rust_ident, to_camel_case%>
<% data.new_style_struct("Box",
inherited=False,
gecko_name="Display") %>
// We allow "display" to apply to placeholders because we need to make the
// placeholder pseudo-element an inline-block in the UA stylesheet in Gecko.
${helpers.predefined_type(
"display",
"Display",
"computed::Display::inline()",
initial_specified_value="specified::Display::inline()",
animation_value_type="discrete",
flags="APPLIES_TO_PLACEHOLDER",
spec="https://drafts.csswg.org/css-display/#propdef-display",
servo_restyle_damage="rebuild_and_reflow",
needs_context=product == "gecko"
)}
${helpers.single_keyword(
"-moz-top-layer",
"none top",
gecko_constant_prefix="NS_STYLE_TOP_LAYER",
gecko_ffi_name="mTopLayer",
products="gecko",
animation_value_type="none",
enabled_in="ua",
spec="Internal (not web-exposed)",
)}
${helpers.single_keyword(
"position",
"static absolute relative fixed sticky",
animation_value_type="discrete",
flags="CREATES_STACKING_CONTEXT ABSPOS_CB",
spec="https://drafts.csswg.org/css-position/#position-property",
servo_restyle_damage="rebuild_and_reflow",
)}
${helpers.predefined_type(
"float",
"Float",
"computed::Float::None",
initial_specified_value="specified::Float::None",
spec="https://drafts.csswg.org/css-box/#propdef-float",
animation_value_type="discrete",
needs_context=False,
flags="APPLIES_TO_FIRST_LETTER",
servo_restyle_damage="rebuild_and_reflow",
gecko_ffi_name="mFloat",
)}
${helpers.predefined_type(
"clear",
"Clear",
"computed::Clear::None",
animation_value_type="discrete",
needs_context=False,
gecko_ffi_name="mBreakType",
spec="https://drafts.csswg.org/css-box/#propdef-clear",
servo_restyle_damage="rebuild_and_reflow",
)}
${helpers.predefined_type(
"vertical-align",
"VerticalAlign",
"computed::VerticalAlign::baseline()",
animation_value_type="ComputedValue",
flags="APPLIES_TO_FIRST_LETTER APPLIES_TO_FIRST_LINE APPLIES_TO_PLACEHOLDER",
spec="https://www.w3.org/TR/CSS2/visudet.html#propdef-vertical-align",
servo_restyle_damage = "reflow",
)}
// CSS 2.1, Section 11 - Visual effects
${helpers.single_keyword("-servo-overflow-clip-box", "padding-box content-box",
products="servo", animation_value_type="none", enabled_in="ua",
spec="Internal, not web-exposed, \
may be standardized in the future (https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-clip-box)")}
% for direction in ["inline", "block"]:
${helpers.predefined_type(
"overflow-clip-box-" + direction,
"OverflowClipBox",
"computed::OverflowClipBox::PaddingBox",
products="gecko",
enabled_in="ua",
needs_context=False,
flags="APPLIES_TO_PLACEHOLDER",
gecko_pref="layout.css.overflow-clip-box.enabled",
animation_value_type="discrete",
spec="Internal, may be standardized in the future: \
https://developer.mozilla.org/en-US/docs/Web/CSS/overflow-clip-box",
)}
% endfor
<%
overflow_custom_consts = { "-moz-hidden-unscrollable": "CLIP" }
%>
// FIXME(pcwalton, #2742): Implement scrolling for `scroll` and `auto`.
//
// We allow it to apply to placeholders for UA sheets, which set it !important.
${helpers.single_keyword(
"overflow-x",
"visible hidden scroll auto",
animation_value_type="discrete",
extra_gecko_values="-moz-hidden-unscrollable",
custom_consts=overflow_custom_consts,
gecko_constant_prefix="NS_STYLE_OVERFLOW",
flags="APPLIES_TO_PLACEHOLDER",
spec="https://drafts.csswg.org/css-overflow/#propdef-overflow-x",
servo_restyle_damage = "reflow",
)}
// FIXME(pcwalton, #2742): Implement scrolling for `scroll` and `auto`.
//
// We allow it to apply to placeholders for UA sheets, which set it !important.
<%helpers:longhand name="overflow-y" animation_value_type="discrete"
flags="APPLIES_TO_PLACEHOLDER",
spec="https://drafts.csswg.org/css-overflow/#propdef-overflow-y"
servo_restyle_damage = "reflow">
pub use super::overflow_x::{SpecifiedValue, parse, get_initial_value, computed_value};
</%helpers:longhand>
<% transition_extra_prefixes = "moz:layout.css.prefixes.transitions webkit" %>
${helpers.predefined_type(
"transition-duration",
"Time",
"computed::Time::zero()",
initial_specified_value="specified::Time::zero()",
parse_method="parse_non_negative",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=transition_extra_prefixes,
spec="https://drafts.csswg.org/css-transitions/#propdef-transition-duration",
)}
${helpers.predefined_type(
"transition-timing-function",
"TimingFunction",
"computed::TimingFunction::ease()",
initial_specified_value="specified::TimingFunction::ease()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=transition_extra_prefixes,
spec="https://drafts.csswg.org/css-transitions/#propdef-transition-timing-function",
)}
${helpers.predefined_type(
"transition-property",
"TransitionProperty",
"computed::TransitionProperty::all()",
initial_specified_value="specified::TransitionProperty::all()",
vector=True,
allow_empty="NotInitial",
need_index=True,
animation_value_type="none",
extra_prefixes=transition_extra_prefixes,
spec="https://drafts.csswg.org/css-transitions/#propdef-transition-property",
)}
${helpers.predefined_type(
"transition-delay",
"Time",
"computed::Time::zero()",
initial_specified_value="specified::Time::zero()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=transition_extra_prefixes,
spec="https://drafts.csswg.org/css-transitions/#propdef-transition-delay",
)}
<% animation_extra_prefixes = "moz:layout.css.prefixes.animations webkit" %>
${helpers.predefined_type(
"animation-name",
"AnimationName",
"computed::AnimationName::none()",
initial_specified_value="specified::AnimationName::none()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=animation_extra_prefixes,
allowed_in_keyframe_block=False,
spec="https://drafts.csswg.org/css-animations/#propdef-animation-name",
)}
${helpers.predefined_type(
"animation-duration",
"Time",
"computed::Time::zero()",
initial_specified_value="specified::Time::zero()",
parse_method="parse_non_negative",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=animation_extra_prefixes,
spec="https://drafts.csswg.org/css-transitions/#propdef-transition-duration",
)}
// animation-timing-function is the exception to the rule for allowed_in_keyframe_block:
// https://drafts.csswg.org/css-animations/#keyframes
${helpers.predefined_type(
"animation-timing-function",
"TimingFunction",
"computed::TimingFunction::ease()",
initial_specified_value="specified::TimingFunction::ease()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=animation_extra_prefixes,
allowed_in_keyframe_block=True,
spec="https://drafts.csswg.org/css-transitions/#propdef-animation-timing-function",
)}
${helpers.predefined_type(
"animation-iteration-count",
"AnimationIterationCount",
"computed::AnimationIterationCount::one()",
initial_specified_value="specified::AnimationIterationCount::one()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=animation_extra_prefixes,
allowed_in_keyframe_block=False,
spec="https://drafts.csswg.org/css-animations/#propdef-animation-iteration-count",
)}
<% animation_direction_custom_consts = { "alternate-reverse": "Alternate_reverse" } %>
${helpers.single_keyword(
"animation-direction",
"normal reverse alternate alternate-reverse",
need_index=True,
animation_value_type="none",
vector=True,
gecko_enum_prefix="PlaybackDirection",
custom_consts=animation_direction_custom_consts,
extra_prefixes=animation_extra_prefixes,
gecko_inexhaustive=True,
spec="https://drafts.csswg.org/css-animations/#propdef-animation-direction",
allowed_in_keyframe_block=False,
)}
${helpers.single_keyword(
"animation-play-state",
"running paused",
need_index=True,
animation_value_type="none",
vector=True,
extra_prefixes=animation_extra_prefixes,
gecko_enum_prefix="StyleAnimationPlayState",
spec="https://drafts.csswg.org/css-animations/#propdef-animation-play-state",
allowed_in_keyframe_block=False,
)}
${helpers.single_keyword(
"animation-fill-mode",
"none forwards backwards both",
need_index=True,
animation_value_type="none",
vector=True,
gecko_enum_prefix="FillMode",
extra_prefixes=animation_extra_prefixes,
gecko_inexhaustive=True,
spec="https://drafts.csswg.org/css-animations/#propdef-animation-fill-mode",
allowed_in_keyframe_block=False,
)}
${helpers.predefined_type(
"animation-delay",
"Time",
"computed::Time::zero()",
initial_specified_value="specified::Time::zero()",
vector=True,
need_index=True,
animation_value_type="none",
extra_prefixes=animation_extra_prefixes,
spec="https://drafts.csswg.org/css-animations/#propdef-animation-delay",
allowed_in_keyframe_block=False,
)}
% for axis in ["x", "y"]:
${helpers.predefined_type(
"scroll-snap-points-" + axis,
"ScrollSnapPoint",
"computed::ScrollSnapPoint::none()",
animation_value_type="discrete",
gecko_pref="layout.css.scroll-snap.enabled",
products="gecko",
spec="Nonstandard (https://www.w3.org/TR/2015/WD-css-snappoints-1-20150326/#scroll-snap-points)",
)}
% endfor
${helpers.predefined_type(
"scroll-snap-destination",
"Position",
"computed::Position::zero()",
products="gecko",
gecko_pref="layout.css.scroll-snap.enabled",
boxed=True,
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-destination)",
animation_value_type="discrete",
)}
${helpers.predefined_type(
"scroll-snap-coordinate",
"Position",
"computed::Position::zero()",
vector=True,
products="gecko",
gecko_pref="layout.css.scroll-snap.enabled",
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-destination)",
animation_value_type="discrete",
allow_empty="NotInitial",
)}
<% transform_extra_prefixes = "moz:layout.css.prefixes.transforms webkit" %>
${helpers.predefined_type(
"transform",
"Transform",
"generics::transform::Transform::none()",
extra_prefixes=transform_extra_prefixes,
animation_value_type="ComputedValue",
gecko_ffi_name="mSpecifiedTransform",
flags="CREATES_STACKING_CONTEXT FIXPOS_CB \
GETCS_NEEDS_LAYOUT_FLUSH CAN_ANIMATE_ON_COMPOSITOR",
spec="https://drafts.csswg.org/css-transforms/#propdef-transform",
servo_restyle_damage="reflow_out_of_flow",
)}
${helpers.predefined_type(
"rotate",
"Rotate",
"generics::transform::Rotate::None",
animation_value_type="ComputedValue",
boxed=True,
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
gecko_pref="layout.css.individual-transform.enabled",
spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms",
servo_restyle_damage = "reflow_out_of_flow",
)}
${helpers.predefined_type(
"scale",
"Scale",
"generics::transform::Scale::None",
animation_value_type="ComputedValue",
boxed=True,
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
gecko_pref="layout.css.individual-transform.enabled",
spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms",
servo_restyle_damage = "reflow_out_of_flow",
)}
${helpers.predefined_type(
"translate",
"Translate",
"generics::transform::Translate::None",
animation_value_type="ComputedValue",
boxed=True,
flags="CREATES_STACKING_CONTEXT FIXPOS_CB GETCS_NEEDS_LAYOUT_FLUSH",
gecko_pref="layout.css.individual-transform.enabled",
spec="https://drafts.csswg.org/css-transforms-2/#individual-transforms",
servo_restyle_damage="reflow_out_of_flow",
)}
// Motion Path Module Level 1
${helpers.predefined_type(
"offset-path",
"OffsetPath",
"computed::OffsetPath::none()",
products="gecko",
animation_value_type="ComputedValue",
gecko_pref="layout.css.motion-path.enabled",
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
spec="https://drafts.fxtf.org/motion-1/#offset-path-property",
)}
// CSSOM View Module
// https://www.w3.org/TR/cssom-view-1/
${helpers.single_keyword(
"scroll-behavior",
"auto smooth",
gecko_pref="layout.css.scroll-behavior.property-enabled",
products="gecko",
spec="https://drafts.csswg.org/cssom-view/#propdef-scroll-behavior",
animation_value_type="discrete",
)}
% for axis in ["x", "y"]:
${helpers.predefined_type(
"scroll-snap-type-" + axis,
"ScrollSnapType",
"computed::ScrollSnapType::None",
products="gecko",
needs_context=False,
gecko_pref="layout.css.scroll-snap.enabled",
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/scroll-snap-type-x)",
animation_value_type="discrete",
)}
% endfor
% for axis in ["x", "y"]:
${helpers.predefined_type(
"overscroll-behavior-" + axis,
"OverscrollBehavior",
"computed::OverscrollBehavior::Auto",
products="gecko",
needs_context=False,
gecko_pref="layout.css.overscroll-behavior.enabled",
spec="https://wicg.github.io/overscroll-behavior/#overscroll-behavior-properties",
animation_value_type="discrete",
)}
% endfor
// Compositing and Blending Level 1
// http://www.w3.org/TR/compositing-1/
${helpers.single_keyword(
"isolation",
"auto isolate",
products="gecko",
gecko_pref="layout.css.isolation.enabled",
spec="https://drafts.fxtf.org/compositing/#isolation",
flags="CREATES_STACKING_CONTEXT",
animation_value_type="discrete",
)}
// TODO add support for logical values recto and verso
${helpers.single_keyword(
"page-break-after",
"auto always avoid left right",
products="gecko",
spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-after",
animation_value_type="discrete",
)}
${helpers.single_keyword(
"page-break-before",
"auto always avoid left right",
products="gecko",
spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-before",
animation_value_type="discrete",
)}
${helpers.single_keyword(
"page-break-inside",
"auto avoid",
products="gecko",
gecko_ffi_name="mBreakInside",
gecko_constant_prefix="NS_STYLE_PAGE_BREAK",
spec="https://drafts.csswg.org/css2/page.html#propdef-page-break-inside",
animation_value_type="discrete",
)}
// CSS Basic User Interface Module Level 3
// http://dev.w3.org/csswg/css-ui
//
// This is APPLIES_TO_PLACEHOLDER so we can override, in the UA sheet, the
// 'resize' property we'd inherit from textarea otherwise. Basically, just
// makes the UA rules easier to write.
${helpers.predefined_type(
"resize",
"Resize",
"computed::Resize::None",
products="gecko",
animation_value_type="discrete",
needs_context=False,
gecko_ffi_name="mResize",
flags="APPLIES_TO_PLACEHOLDER",
spec="https://drafts.csswg.org/css-ui/#propdef-resize",
)}
${helpers.predefined_type(
"perspective",
"Perspective",
"computed::Perspective::none()",
gecko_ffi_name="mChildPerspective",
spec="https://drafts.csswg.org/css-transforms/#perspective",
extra_prefixes=transform_extra_prefixes,
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
animation_value_type="AnimatedPerspective",
servo_restyle_damage = "reflow_out_of_flow",
)}
${helpers.predefined_type(
"perspective-origin",
"Position",
"computed::position::Position::center()",
boxed=True,
extra_prefixes=transform_extra_prefixes,
spec="https://drafts.csswg.org/css-transforms-2/#perspective-origin-property",
flags="GETCS_NEEDS_LAYOUT_FLUSH",
animation_value_type="ComputedValue",
servo_restyle_damage="reflow_out_of_flow"
)}
${helpers.single_keyword(
"backface-visibility",
"visible hidden",
spec="https://drafts.csswg.org/css-transforms/#backface-visibility-property",
extra_prefixes=transform_extra_prefixes,
animation_value_type="discrete",
)}
${helpers.single_keyword(
"transform-box",
"border-box fill-box view-box",
gecko_enum_prefix="StyleGeometryBox",
products="gecko",
gecko_pref="svg.transform-box.enabled",
spec="https://drafts.csswg.org/css-transforms/#transform-box",
gecko_inexhaustive="True",
animation_value_type="discrete",
)}
${helpers.predefined_type(
"transform-style",
"TransformStyle",
"computed::TransformStyle::" + ("Auto" if product == "servo" else "Flat"),
spec="https://drafts.csswg.org/css-transforms-2/#transform-style-property",
needs_context=False,
extra_prefixes=transform_extra_prefixes,
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
animation_value_type="discrete",
servo_restyle_damage = "reflow_out_of_flow",
)}
${helpers.predefined_type(
"transform-origin",
"TransformOrigin",
"computed::TransformOrigin::initial_value()",
animation_value_type="ComputedValue",
extra_prefixes=transform_extra_prefixes,
gecko_ffi_name="mTransformOrigin",
boxed=True,
flags="GETCS_NEEDS_LAYOUT_FLUSH",
spec="https://drafts.csswg.org/css-transforms/#transform-origin-property",
servo_restyle_damage="reflow_out_of_flow",
)}
${helpers.predefined_type(
"contain",
"Contain",
"specified::Contain::empty()",
animation_value_type="none",
products="gecko",
flags="CREATES_STACKING_CONTEXT FIXPOS_CB",
gecko_pref="layout.css.contain.enabled",
spec="https://drafts.csswg.org/css-contain/#contain-property",
)}
// Non-standard
${helpers.predefined_type(
"-moz-appearance",
"Appearance",
"computed::Appearance::None",
products="gecko",
alias="-webkit-appearance:layout.css.webkit-appearance.enabled",
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-appearance)",
animation_value_type="discrete",
)}
${helpers.predefined_type(
"-moz-binding",
"url::UrlOrNone",
"computed::url::UrlOrNone::none()",
products="gecko",
animation_value_type="none",
gecko_ffi_name="mBinding",
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-binding)",
)}
${helpers.single_keyword(
"-moz-orient",
"inline block horizontal vertical",
products="gecko",
gecko_ffi_name="mOrient",
gecko_enum_prefix="StyleOrient",
spec="Nonstandard (https://developer.mozilla.org/en-US/docs/Web/CSS/-moz-orient)",
animation_value_type="discrete",
)}<|fim▁hole|> "will-change",
"WillChange",
"computed::WillChange::auto()",
products="gecko",
animation_value_type="none",
spec="https://drafts.csswg.org/css-will-change/#will-change",
)}
${helpers.predefined_type(
"shape-image-threshold", "Opacity", "0.0",
products="gecko",
gecko_pref="layout.css.shape-outside.enabled",
animation_value_type="ComputedValue",
flags="APPLIES_TO_FIRST_LETTER",
spec="https://drafts.csswg.org/css-shapes/#shape-image-threshold-property",
)}
${helpers.predefined_type(
"shape-margin",
"NonNegativeLengthOrPercentage",
"computed::NonNegativeLengthOrPercentage::zero()",
products="gecko",
gecko_pref="layout.css.shape-outside.enabled",
animation_value_type="NonNegativeLengthOrPercentage",
flags="APPLIES_TO_FIRST_LETTER",
spec="https://drafts.csswg.org/css-shapes/#shape-margin-property",
)}
${helpers.predefined_type(
"shape-outside",
"basic_shape::FloatAreaShape",
"generics::basic_shape::ShapeSource::None",
products="gecko",
boxed=True,
gecko_pref="layout.css.shape-outside.enabled",
animation_value_type="ComputedValue",
flags="APPLIES_TO_FIRST_LETTER",
spec="https://drafts.csswg.org/css-shapes/#shape-outside-property",
)}
${helpers.predefined_type(
"touch-action",
"TouchAction",
"computed::TouchAction::auto()",
products="gecko",
gecko_pref="layout.css.touch_action.enabled",
animation_value_type="discrete",
spec="https://compat.spec.whatwg.org/#touch-action",
)}<|fim▁end|> |
${helpers.predefined_type( |
<|file_name|>mdquery.py<|end_file_name|><|fim▁begin|>"""
pybufrkit.mdquery
~~~~~~~~~~~~~~~~~
"""
from __future__ import absolute_import
from __future__ import print_function
import logging
from pybufrkit.errors import MetadataExprParsingError
__all__ = ['MetadataExprParser', 'MetadataQuerent', 'METADATA_QUERY_INDICATOR_CHAR']
log = logging.getLogger(__file__)
METADATA_QUERY_INDICATOR_CHAR = '%'
class MetadataExprParser(object):
def parse(self, metadata_expr):
"""
:param str metadata_expr: The metadata expression string to parse
:return: A 2-element tuple of section index and metadata name
:rtype: (int, str)
"""
metadata_expr = metadata_expr.strip()
if metadata_expr[0] != METADATA_QUERY_INDICATOR_CHAR:
raise MetadataExprParsingError('Metadata expression must start with "%"')
if '.' in metadata_expr:
section_index, metadata_name = metadata_expr[1:].split('.')
try:<|fim▁hole|>
else:
section_index = None
metadata_name = metadata_expr[1:]
return section_index, metadata_name
class MetadataQuerent(object):
"""
:param MetadataExprParser metadata_expr_parser: Parser for metadata expression
"""
def __init__(self, metadata_expr_parser):
self.metadata_expr_parser = metadata_expr_parser
def query(self, bufr_message, metadata_expr):
section_index, metadata_name = self.metadata_expr_parser.parse(metadata_expr)
sections = [s for s in bufr_message.sections
if s.get_metadata('index') == section_index or section_index is None]
for section in sections:
for parameter in section:
if parameter.name == metadata_name:
return parameter.value
return None<|fim▁end|> | section_index = int(section_index)
except ValueError:
raise MetadataExprParsingError('Invalid section index: {}'.format(section_index)) |
<|file_name|>source_util.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use codemap::{Pos, Span};
use codemap;
use ext::base::*;
use ext::base;
use ext::build::AstBuilder;
use parse::token;
use parse;
use print::pprust;
use ptr::P;
use util::small_vector::SmallVector;
use std::fs::File;
use std::io::prelude::*;
use std::path::{Path, PathBuf};
use std::rc::Rc;
// These macros all relate to the file system; they either return
// the column/row/filename of the expression, or they include
// a given file into the current one.
/// line!(): expands to the current line number
pub fn expand_line(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "line!");
let topmost = cx.expansion_cause();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
}
<|fim▁hole|>
let topmost = cx.expansion_cause();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32))
}
/// file!(): expands to the current filename */
/// The filemap (`loc.file`) contains a bunch more information we could spit
/// out if we wanted.
pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "file!");
let topmost = cx.expansion_cause();
let loc = cx.codemap().lookup_char_pos(topmost.lo);
let filename = token::intern_and_get_ident(&loc.file.name);
base::MacEager::expr(cx.expr_str(topmost, filename))
}
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let s = pprust::tts_to_string(tts);
base::MacEager::expr(cx.expr_str(sp,
token::intern_and_get_ident(&s[..])))
}
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path()
.iter()
.map(|x| token::get_ident(*x).to_string())
.collect::<Vec<String>>()
.join("::");
base::MacEager::expr(cx.expr_str(
sp,
token::intern_and_get_ident(&string[..])))
}
/// include! : parse the given file as an expr
/// This is generally a bad idea because it's going to behave
/// unhygienically.
pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> {
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
Some(f) => f,
None => return DummyResult::expr(sp),
};
// The file will be added to the code map by the parser
let p =
parse::new_sub_parser_from_file(cx.parse_sess(),
cx.cfg(),
&res_rel_file(cx,
sp,
Path::new(&file)),
true,
None,
sp);
struct ExpandResult<'a> {
p: parse::parser::Parser<'a>,
}
impl<'a> base::MacResult for ExpandResult<'a> {
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
Some(self.p.parse_expr())
}
fn make_items(mut self: Box<ExpandResult<'a>>)
-> Option<SmallVector<P<ast::Item>>> {
let mut ret = SmallVector::zero();
while self.p.token != token::Eof {
match self.p.parse_item() {
Some(item) => ret.push(item),
None => panic!(self.p.span_fatal(
self.p.span,
&format!("expected item, found `{}`",
self.p.this_token_to_string())
))
}
}
Some(ret)
}
}
Box::new(ExpandResult { p: p })
}
// include_str! : read the given file, insert it as a literal string expr
pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
Some(f) => f,
None => return DummyResult::expr(sp)
};
let file = res_rel_file(cx, sp, Path::new(&file));
let mut bytes = Vec::new();
match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
Ok(..) => {}
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}",
file.display(),
e));
return DummyResult::expr(sp);
}
};
match String::from_utf8(bytes) {
Ok(src) => {
// Add this input file to the code map to make it available as
// dependency information
let filename = format!("{}", file.display());
let interned = token::intern_and_get_ident(&src[..]);
cx.codemap().new_filemap(filename, src);
base::MacEager::expr(cx.expr_str(sp, interned))
}
Err(_) => {
cx.span_err(sp,
&format!("{} wasn't a utf-8 file",
file.display()));
return DummyResult::expr(sp);
}
}
}
pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
Some(f) => f,
None => return DummyResult::expr(sp)
};
let file = res_rel_file(cx, sp, Path::new(&file));
let mut bytes = Vec::new();
match File::open(&file).and_then(|mut f| f.read_to_end(&mut bytes)) {
Err(e) => {
cx.span_err(sp,
&format!("couldn't read {}: {}", file.display(), e));
return DummyResult::expr(sp);
}
Ok(..) => {
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
let filename = format!("{}", file.display());
cx.codemap().new_filemap(filename, "".to_string());
base::MacEager::expr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes))))
}
}
}
// resolve a file-system path to an absolute file-system path (if it
// isn't already)
fn res_rel_file(cx: &mut ExtCtxt, sp: codemap::Span, arg: &Path) -> PathBuf {
// NB: relative paths are resolved relative to the compilation unit
if !arg.is_absolute() {
let mut cu = PathBuf::from(&cx.codemap().span_to_filename(sp));
cu.pop();
cu.push(arg);
cu
} else {
arg.to_path_buf()
}
}<|fim▁end|> | /* column!(): expands to the current column number */
pub fn expand_column(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'static> {
base::check_zero_tts(cx, sp, tts, "column!"); |
<|file_name|>create-test-list.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Rules
for *.py files
* if the changed file is __init__.py, and there is a side-band test/ dir, then test the entire test/functional directory
the reason for this is that the init files are usually organizing collections
and those can affect many different apis if they break
* if the filename is test_*.py then include it
* if the filename is *.py, then check to see if it has an associated test_FILENAME file
and if so, include it in the test
* summarize all of the above so that a test_FILENAME that is a subpath of the first bullet
is not tested twice
for non-*.py files
* if the file is in a test/functional directory, test the whole directory
"""
import subprocess
import os
import shutil
import argparse
def cleanup_tox_directory():
if os.path.exists('.tox'):
shutil.rmtree('.tox')
def examine_python_rules(line):
fname, fext = os.path.splitext(line)
filename = os.path.basename(line)
dirname = os.path.dirname(line)
test_filename = 'test_' + filename
functional_test_file = '{0}/test/functional/{1}'.format(dirname, test_filename)
functional_test_dir = '{0}/test/functional/'.format(dirname)
if filename == '__init__.py' and os.path.exists(functional_test_dir):
return functional_test_dir
elif filename.startswith('test_') and filename.endswith('.py'):
return line
elif fext == '.py' and os.path.exists(functional_test_file):
return functional_test_file
elif 'test/functional' in line and filename == '__init__.py':
print(" * Skipping {0} because it is not a test file".format(line))
elif filename == '__init__.py' and not os.path.exists(functional_test_dir):<|fim▁hole|> print(" * {0} did not match any rules!".format(line))
def examine_non_python_rules(line):
if 'test/functional' in line:
return os.path.dirname(line)
def determine_files_to_test(product, commit):
results = []
build_all = [
'setup.py', 'f5/bigip/contexts.py', 'f5/bigip/mixins.py',
'f5/bigip/resource.py', 'f5sdk_plugins/fixtures.py',
'f5/bigip/__init__.py'
]
output_file = "pytest.{0}.jenkins.txt".format(product)
p1 = subprocess.Popen(
['git', '--no-pager', 'diff', '--name-only', 'origin/development', commit],
stdout=subprocess.PIPE,
)
p2 = subprocess.Popen(
['egrep', '-v', '(^requirements\.|^setup.py)'],
stdin=p1.stdout,
stdout=subprocess.PIPE,
)
p3 = subprocess.Popen(
['egrep', '(^f5\/{0}\/)'.format(product)],
stdin=p2.stdout,
stdout=subprocess.PIPE,
)
out, err = p3.communicate()
out = out.splitlines()
out = filter(None, out)
if not out:
return
for line in out:
fname, fext = os.path.splitext(line)
if not os.path.exists(line):
print "{0} was not found. Maybe this is a rename?".format(line)
continue
if line in build_all:
cleanup_tox_directory()
results.append('f5/{0}'.format(product))
elif fext == '.py':
result = examine_python_rules(line)
if result:
results.append(result)
else:
result = examine_non_python_rules(line)
if result:
results.append(result)
if results:
results = set(results)
results = compress_testable_files(results)
fh = open(output_file, 'w')
fh.writelines("%s\n" % l for l in results)
fh.close()
def compress_testable_files(files):
lines = sorted(files)
for idx, item in enumerate(lines):
file, ext = os.path.splitext(item)
if not ext and not file.endswith('/'):
item += '/'
tmp = [x for x in lines if item in x and item != x]
for _ in tmp:
lines.remove(_)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-c','--commit', help='Git commit to check', required=True)
args = parser.parse_args()
for product in ['iworkflow', 'bigip', 'bigiq']:
determine_files_to_test(product, args.commit)<|fim▁end|> | print(" * {0} does not have a side-band test directory!".format(line))
else: |
<|file_name|>reducer.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> SWITCH,
} from "./actions";
import { IContainerModule } from "../types";
import { ITabState } from "./";
const initialState = {};
export default createReducer(initialState, {
[INITIALIZE](state: IContainerModule<ITabState>, action) {
const { id, currentTab } = action.payload;
if (!id || state[id]) {
return state;
}
const clonedState = clone(state);
clonedState[id] = {
currentTab,
};
return clonedState;
},
[SWITCH](state: IContainerModule<ITabState>, action) {
const { id, tabId } = action.payload;
if (!id || !state[id]) {
return state;
}
const clonedState = clone(state);
clonedState[id] = {
currentTab: tabId,
};
return clonedState;
},
});<|fim▁end|> | import { createReducer } from "redux-create-reducer";
import * as clone from "lodash/cloneDeep";
import {
INITIALIZE, |
<|file_name|>trace.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::collections::BTreeMap;
use serde::{Serialize, Serializer};
use serde::ser::SerializeStruct;
use ethcore::trace::{FlatTrace, LocalizedTrace as EthLocalizedTrace, trace, TraceError};
use ethcore::trace as et;
use ethcore::state_diff;
use ethcore::account_diff;
use ethcore::executed;
use ethcore::client::Executed;
use v1::types::{Bytes, H160, H256, U256};
#[derive(Debug, Serialize)]
/// A diff of some chunk of memory.
pub struct MemoryDiff {
/// Offset into memory the change begins.
pub off: usize,
/// The changed data.
pub data: Bytes,
}
impl From<et::MemoryDiff> for MemoryDiff {
fn from(c: et::MemoryDiff) -> Self {
MemoryDiff {
off: c.offset,
data: c.data.into(),
}
}
}
#[derive(Debug, Serialize)]
/// A diff of some storage value.
pub struct StorageDiff {
/// Which key in storage is changed.
pub key: U256,
/// What the value has been changed to.
pub val: U256,
}
impl From<et::StorageDiff> for StorageDiff {
fn from(c: et::StorageDiff) -> Self {
StorageDiff {
key: c.location.into(),
val: c.value.into(),
}
}
}
#[derive(Debug, Serialize)]
/// A record of an executed VM operation.
pub struct VMExecutedOperation {
/// The total gas used.
#[serde(rename="used")]
pub used: u64,
/// The stack item placed, if any.
pub push: Vec<U256>,
/// If altered, the memory delta.
#[serde(rename="mem")]
pub mem: Option<MemoryDiff>,
/// The altered storage value, if any.
#[serde(rename="store")]
pub store: Option<StorageDiff>,
}
impl From<et::VMExecutedOperation> for VMExecutedOperation {
fn from(c: et::VMExecutedOperation) -> Self {
VMExecutedOperation {
used: c.gas_used.low_u64(),
push: c.stack_push.into_iter().map(Into::into).collect(),
mem: c.mem_diff.map(Into::into),
store: c.store_diff.map(Into::into),
}
}
}
#[derive(Debug, Serialize)]
/// A record of the execution of a single VM operation.
pub struct VMOperation {
/// The program counter.
pub pc: usize,
/// The gas cost for this instruction.
pub cost: u64,
/// Information concerning the execution of the operation.
pub ex: Option<VMExecutedOperation>,
/// Subordinate trace of the CALL/CREATE if applicable.
#[serde(bound="VMTrace: Serialize")]
pub sub: Option<VMTrace>,
}
impl From<(et::VMOperation, Option<et::VMTrace>)> for VMOperation {
fn from(c: (et::VMOperation, Option<et::VMTrace>)) -> Self {
VMOperation {
pc: c.0.pc,
cost: c.0.gas_cost.low_u64(),
ex: c.0.executed.map(Into::into),
sub: c.1.map(Into::into),
}
}
}
#[derive(Debug, Serialize)]
/// A record of a full VM trace for a CALL/CREATE.
pub struct VMTrace {
/// The code to be executed.
pub code: Bytes,
/// The operations executed.
pub ops: Vec<VMOperation>,
}
impl From<et::VMTrace> for VMTrace {
fn from(c: et::VMTrace) -> Self {
let mut subs = c.subs.into_iter();
let mut next_sub = subs.next();
VMTrace {
code: c.code.into(),
ops: c.operations
.into_iter()
.enumerate()
.map(|(i, op)| (op, {
let have_sub = next_sub.is_some() && next_sub.as_ref().unwrap().parent_step == i;
if have_sub {
let r = next_sub.clone();
next_sub = subs.next();
r
} else { None }
}).into())
.collect(),
}
}
}
#[derive(Debug, Serialize)]
/// Aux type for Diff::Changed.
pub struct ChangedType<T> where T: Serialize {
from: T,
to: T,
}
#[derive(Debug, Serialize)]
/// Serde-friendly `Diff` shadow.
pub enum Diff<T> where T: Serialize {
#[serde(rename="=")]
Same,
#[serde(rename="+")]
Born(T),
#[serde(rename="-")]
Died(T),
#[serde(rename="*")]
Changed(ChangedType<T>),
}
impl<T, U> From<account_diff::Diff<T>> for Diff<U> where T: Eq + ::ethcore_ipc::BinaryConvertable, U: Serialize + From<T> {
fn from(c: account_diff::Diff<T>) -> Self {
match c {
account_diff::Diff::Same => Diff::Same,
account_diff::Diff::Born(t) => Diff::Born(t.into()),
account_diff::Diff::Died(t) => Diff::Died(t.into()),
account_diff::Diff::Changed(t, u) => Diff::Changed(ChangedType{from: t.into(), to: u.into()}),
}
}
}
#[derive(Debug, Serialize)]
/// Serde-friendly `AccountDiff` shadow.
pub struct AccountDiff {
pub balance: Diff<U256>,
pub nonce: Diff<U256>,
pub code: Diff<Bytes>,
pub storage: BTreeMap<H256, Diff<H256>>,
}
impl From<account_diff::AccountDiff> for AccountDiff {
fn from(c: account_diff::AccountDiff) -> Self {
AccountDiff {
balance: c.balance.into(),
nonce: c.nonce.into(),
code: c.code.into(),
storage: c.storage.into_iter().map(|(k, v)| (k.into(), v.into())).collect(),
}
}
}
#[derive(Debug)]
/// Serde-friendly `StateDiff` shadow.
pub struct StateDiff(BTreeMap<H160, AccountDiff>);
impl Serialize for StateDiff {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer {
Serialize::serialize(&self.0, serializer)
}
}
impl From<state_diff::StateDiff> for StateDiff {
fn from(c: state_diff::StateDiff) -> Self {
StateDiff(c.raw.into_iter().map(|(k, v)| (k.into(), v.into())).collect())
}
}
/// Create response
#[derive(Debug, Serialize)]
pub struct Create {
/// Sender
from: H160,
/// Value
value: U256,
/// Gas
gas: U256,
/// Initialization code
init: Bytes,
}
impl From<trace::Create> for Create {
fn from(c: trace::Create) -> Self {
Create {
from: c.from.into(),
value: c.value.into(),
gas: c.gas.into(),
init: Bytes::new(c.init),
}
}
}
/// Call type.
#[derive(Debug, Serialize)]
pub enum CallType {
/// None
#[serde(rename="none")]
None,
/// Call<|fim▁hole|> #[serde(rename="callcode")]
CallCode,
/// Delegate call
#[serde(rename="delegatecall")]
DelegateCall,
/// Static call
#[serde(rename="staticcall")]
StaticCall,
}
impl From<executed::CallType> for CallType {
fn from(c: executed::CallType) -> Self {
match c {
executed::CallType::None => CallType::None,
executed::CallType::Call => CallType::Call,
executed::CallType::CallCode => CallType::CallCode,
executed::CallType::DelegateCall => CallType::DelegateCall,
executed::CallType::StaticCall => CallType::StaticCall,
}
}
}
/// Call response
#[derive(Debug, Serialize)]
pub struct Call {
/// Sender
from: H160,
/// Recipient
to: H160,
/// Transfered Value
value: U256,
/// Gas
gas: U256,
/// Input data
input: Bytes,
/// The type of the call.
#[serde(rename="callType")]
call_type: CallType,
}
impl From<trace::Call> for Call {
fn from(c: trace::Call) -> Self {
Call {
from: c.from.into(),
to: c.to.into(),
value: c.value.into(),
gas: c.gas.into(),
input: c.input.into(),
call_type: c.call_type.into(),
}
}
}
/// Suicide
#[derive(Debug, Serialize)]
pub struct Suicide {
/// Address.
pub address: H160,
/// Refund address.
#[serde(rename="refundAddress")]
pub refund_address: H160,
/// Balance.
pub balance: U256,
}
impl From<trace::Suicide> for Suicide {
fn from(s: trace::Suicide) -> Self {
Suicide {
address: s.address.into(),
refund_address: s.refund_address.into(),
balance: s.balance.into(),
}
}
}
/// Action
#[derive(Debug)]
pub enum Action {
/// Call
Call(Call),
/// Create
Create(Create),
/// Suicide
Suicide(Suicide),
}
impl From<trace::Action> for Action {
fn from(c: trace::Action) -> Self {
match c {
trace::Action::Call(call) => Action::Call(call.into()),
trace::Action::Create(create) => Action::Create(create.into()),
trace::Action::Suicide(suicide) => Action::Suicide(suicide.into()),
}
}
}
/// Call Result
#[derive(Debug, Serialize)]
pub struct CallResult {
/// Gas used
#[serde(rename="gasUsed")]
gas_used: U256,
/// Output bytes
output: Bytes,
}
impl From<trace::CallResult> for CallResult {
fn from(c: trace::CallResult) -> Self {
CallResult {
gas_used: c.gas_used.into(),
output: c.output.into(),
}
}
}
/// Craete Result
#[derive(Debug, Serialize)]
pub struct CreateResult {
/// Gas used
#[serde(rename="gasUsed")]
gas_used: U256,
/// Code
code: Bytes,
/// Assigned address
address: H160,
}
impl From<trace::CreateResult> for CreateResult {
fn from(c: trace::CreateResult) -> Self {
CreateResult {
gas_used: c.gas_used.into(),
code: c.code.into(),
address: c.address.into(),
}
}
}
/// Response
#[derive(Debug)]
pub enum Res {
/// Call
Call(CallResult),
/// Create
Create(CreateResult),
/// Call failure
FailedCall(TraceError),
/// Creation failure
FailedCreate(TraceError),
/// None
None,
}
impl From<trace::Res> for Res {
fn from(t: trace::Res) -> Self {
match t {
trace::Res::Call(call) => Res::Call(CallResult::from(call)),
trace::Res::Create(create) => Res::Create(CreateResult::from(create)),
trace::Res::FailedCall(error) => Res::FailedCall(error),
trace::Res::FailedCreate(error) => Res::FailedCreate(error),
trace::Res::None => Res::None,
}
}
}
/// Trace
#[derive(Debug)]
pub struct LocalizedTrace {
/// Action
action: Action,
/// Result
result: Res,
/// Trace address
trace_address: Vec<usize>,
/// Subtraces
subtraces: usize,
/// Transaction position
transaction_position: usize,
/// Transaction hash
transaction_hash: H256,
/// Block Number
block_number: u64,
/// Block Hash
block_hash: H256,
}
impl Serialize for LocalizedTrace {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
let mut struc = serializer.serialize_struct("LocalizedTrace", 9)?;
match self.action {
Action::Call(ref call) => {
struc.serialize_field("type", "call")?;
struc.serialize_field("action", call)?;
},
Action::Create(ref create) => {
struc.serialize_field("type", "create")?;
struc.serialize_field("action", create)?;
},
Action::Suicide(ref suicide) => {
struc.serialize_field("type", "suicide")?;
struc.serialize_field("action", suicide)?;
},
}
match self.result {
Res::Call(ref call) => struc.serialize_field("result", call)?,
Res::Create(ref create) => struc.serialize_field("result", create)?,
Res::FailedCall(ref error) => struc.serialize_field("error", &error.to_string())?,
Res::FailedCreate(ref error) => struc.serialize_field("error", &error.to_string())?,
Res::None => struc.serialize_field("result", &None as &Option<u8>)?,
}
struc.serialize_field("traceAddress", &self.trace_address)?;
struc.serialize_field("subtraces", &self.subtraces)?;
struc.serialize_field("transactionPosition", &self.transaction_position)?;
struc.serialize_field("transactionHash", &self.transaction_hash)?;
struc.serialize_field("blockNumber", &self.block_number)?;
struc.serialize_field("blockHash", &self.block_hash)?;
struc.end()
}
}
impl From<EthLocalizedTrace> for LocalizedTrace {
fn from(t: EthLocalizedTrace) -> Self {
LocalizedTrace {
action: t.action.into(),
result: t.result.into(),
trace_address: t.trace_address.into_iter().map(Into::into).collect(),
subtraces: t.subtraces.into(),
transaction_position: t.transaction_number.into(),
transaction_hash: t.transaction_hash.into(),
block_number: t.block_number.into(),
block_hash: t.block_hash.into(),
}
}
}
/// Trace
#[derive(Debug)]
pub struct Trace {
/// Trace address
trace_address: Vec<usize>,
/// Subtraces
subtraces: usize,
/// Action
action: Action,
/// Result
result: Res,
}
impl Serialize for Trace {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
let mut struc = serializer.serialize_struct("Trace", 4)?;
match self.action {
Action::Call(ref call) => {
struc.serialize_field("type", "call")?;
struc.serialize_field("action", call)?;
},
Action::Create(ref create) => {
struc.serialize_field("type", "create")?;
struc.serialize_field("action", create)?;
},
Action::Suicide(ref suicide) => {
struc.serialize_field("type", "suicide")?;
struc.serialize_field("action", suicide)?;
},
}
match self.result {
Res::Call(ref call) => struc.serialize_field("result", call)?,
Res::Create(ref create) => struc.serialize_field("result", create)?,
Res::FailedCall(ref error) => struc.serialize_field("error", &error.to_string())?,
Res::FailedCreate(ref error) => struc.serialize_field("error", &error.to_string())?,
Res::None => struc.serialize_field("result", &None as &Option<u8>)?,
}
struc.serialize_field("traceAddress", &self.trace_address)?;
struc.serialize_field("subtraces", &self.subtraces)?;
struc.end()
}
}
impl From<FlatTrace> for Trace {
fn from(t: FlatTrace) -> Self {
Trace {
trace_address: t.trace_address.into_iter().map(Into::into).collect(),
subtraces: t.subtraces.into(),
action: t.action.into(),
result: t.result.into(),
}
}
}
#[derive(Debug, Serialize)]
/// A diff of some chunk of memory.
pub struct TraceResults {
/// The output of the call/create
pub output: Bytes,
/// The transaction trace.
pub trace: Vec<Trace>,
/// The transaction trace.
#[serde(rename="vmTrace")]
pub vm_trace: Option<VMTrace>,
/// The transaction trace.
#[serde(rename="stateDiff")]
pub state_diff: Option<StateDiff>,
}
impl From<Executed> for TraceResults {
fn from(t: Executed) -> Self {
TraceResults {
output: t.output.into(),
trace: t.trace.into_iter().map(Into::into).collect(),
vm_trace: t.vm_trace.map(Into::into),
state_diff: t.state_diff.map(Into::into),
}
}
}
#[cfg(test)]
mod tests {
use serde_json;
use std::collections::BTreeMap;
use v1::types::Bytes;
use ethcore::trace::TraceError;
use super::*;
#[test]
fn should_serialize_trace_results() {
let r = TraceResults {
output: vec![0x60].into(),
trace: vec![],
vm_trace: None,
state_diff: None,
};
let serialized = serde_json::to_string(&r).unwrap();
assert_eq!(serialized, r#"{"output":"0x60","trace":[],"vmTrace":null,"stateDiff":null}"#);
}
#[test]
fn test_trace_call_serialize() {
let t = LocalizedTrace {
action: Action::Call(Call {
from: 4.into(),
to: 5.into(),
value: 6.into(),
gas: 7.into(),
input: Bytes::new(vec![0x12, 0x34]),
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 8.into(),
output: vec![0x56, 0x78].into(),
}),
trace_address: vec![10],
subtraces: 1,
transaction_position: 11,
transaction_hash: 12.into(),
block_number: 13,
block_hash: 14.into(),
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"type":"call","action":{"from":"0x0000000000000000000000000000000000000004","to":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","input":"0x1234","callType":"call"},"result":{"gasUsed":"0x8","output":"0x5678"},"traceAddress":[10],"subtraces":1,"transactionPosition":11,"transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":13,"blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
}
#[test]
fn test_trace_failed_call_serialize() {
let t = LocalizedTrace {
action: Action::Call(Call {
from: 4.into(),
to: 5.into(),
value: 6.into(),
gas: 7.into(),
input: Bytes::new(vec![0x12, 0x34]),
call_type: CallType::Call,
}),
result: Res::FailedCall(TraceError::OutOfGas),
trace_address: vec![10],
subtraces: 1,
transaction_position: 11,
transaction_hash: 12.into(),
block_number: 13,
block_hash: 14.into(),
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"type":"call","action":{"from":"0x0000000000000000000000000000000000000004","to":"0x0000000000000000000000000000000000000005","value":"0x6","gas":"0x7","input":"0x1234","callType":"call"},"error":"Out of gas","traceAddress":[10],"subtraces":1,"transactionPosition":11,"transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":13,"blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
}
#[test]
fn test_trace_create_serialize() {
let t = LocalizedTrace {
action: Action::Create(Create {
from: 4.into(),
value: 6.into(),
gas: 7.into(),
init: Bytes::new(vec![0x12, 0x34]),
}),
result: Res::Create(CreateResult {
gas_used: 8.into(),
code: vec![0x56, 0x78].into(),
address: 0xff.into(),
}),
trace_address: vec![10],
subtraces: 1,
transaction_position: 11,
transaction_hash: 12.into(),
block_number: 13,
block_hash: 14.into(),
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"type":"create","action":{"from":"0x0000000000000000000000000000000000000004","value":"0x6","gas":"0x7","init":"0x1234"},"result":{"gasUsed":"0x8","code":"0x5678","address":"0x00000000000000000000000000000000000000ff"},"traceAddress":[10],"subtraces":1,"transactionPosition":11,"transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":13,"blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
}
#[test]
fn test_trace_failed_create_serialize() {
let t = LocalizedTrace {
action: Action::Create(Create {
from: 4.into(),
value: 6.into(),
gas: 7.into(),
init: Bytes::new(vec![0x12, 0x34]),
}),
result: Res::FailedCreate(TraceError::OutOfGas),
trace_address: vec![10],
subtraces: 1,
transaction_position: 11,
transaction_hash: 12.into(),
block_number: 13,
block_hash: 14.into(),
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"type":"create","action":{"from":"0x0000000000000000000000000000000000000004","value":"0x6","gas":"0x7","init":"0x1234"},"error":"Out of gas","traceAddress":[10],"subtraces":1,"transactionPosition":11,"transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":13,"blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
}
#[test]
fn test_trace_suicide_serialize() {
let t = LocalizedTrace {
action: Action::Suicide(Suicide {
address: 4.into(),
refund_address: 6.into(),
balance: 7.into(),
}),
result: Res::None,
trace_address: vec![10],
subtraces: 1,
transaction_position: 11,
transaction_hash: 12.into(),
block_number: 13,
block_hash: 14.into(),
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"type":"suicide","action":{"address":"0x0000000000000000000000000000000000000004","refundAddress":"0x0000000000000000000000000000000000000006","balance":"0x7"},"result":null,"traceAddress":[10],"subtraces":1,"transactionPosition":11,"transactionHash":"0x000000000000000000000000000000000000000000000000000000000000000c","blockNumber":13,"blockHash":"0x000000000000000000000000000000000000000000000000000000000000000e"}"#);
}
#[test]
fn test_vmtrace_serialize() {
let t = VMTrace {
code: vec![0, 1, 2, 3].into(),
ops: vec![
VMOperation {
pc: 0,
cost: 10,
ex: None,
sub: None,
},
VMOperation {
pc: 1,
cost: 11,
ex: Some(VMExecutedOperation {
used: 10,
push: vec![69.into()],
mem: None,
store: None,
}),
sub: Some(VMTrace {
code: vec![0].into(),
ops: vec![
VMOperation {
pc: 0,
cost: 0,
ex: Some(VMExecutedOperation {
used: 10,
push: vec![42.into()].into(),
mem: Some(MemoryDiff {off: 42, data: vec![1, 2, 3].into()}),
store: Some(StorageDiff {key: 69.into(), val: 42.into()}),
}),
sub: None,
}
]
}),
}
]
};
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"code":"0x00010203","ops":[{"pc":0,"cost":10,"ex":null,"sub":null},{"pc":1,"cost":11,"ex":{"used":10,"push":["0x45"],"mem":null,"store":null},"sub":{"code":"0x00","ops":[{"pc":0,"cost":0,"ex":{"used":10,"push":["0x2a"],"mem":{"off":42,"data":"0x010203"},"store":{"key":"0x45","val":"0x2a"}},"sub":null}]}}]}"#);
}
#[test]
fn test_statediff_serialize() {
let t = StateDiff(map![
42.into() => AccountDiff {
balance: Diff::Same,
nonce: Diff::Born(1.into()),
code: Diff::Same,
storage: map![
42.into() => Diff::Same
]
},
69.into() => AccountDiff {
balance: Diff::Same,
nonce: Diff::Changed(ChangedType { from: 1.into(), to: 0.into() }),
code: Diff::Died(vec![96].into()),
storage: map![],
}
]);
let serialized = serde_json::to_string(&t).unwrap();
assert_eq!(serialized, r#"{"0x000000000000000000000000000000000000002a":{"balance":"=","nonce":{"+":"0x1"},"code":"=","storage":{"0x000000000000000000000000000000000000000000000000000000000000002a":"="}},"0x0000000000000000000000000000000000000045":{"balance":"=","nonce":{"*":{"from":"0x1","to":"0x0"}},"code":{"-":"0x60"},"storage":{}}}"#);
}
}<|fim▁end|> | #[serde(rename="call")]
Call,
/// Call code |
<|file_name|>estimator.py<|end_file_name|><|fim▁begin|>import numpy as np
import regreg.api as rr
from selection.randomized.glm import pairs_bootstrap_glm, bootstrap_cov
from selection.randomized.query import query
from selection.randomized.randomization import split
import functools
def pairs_bootstrap_glm(glm_loss,
active,
beta_full=None,
inactive=None,
scaling=1.,
solve_args={'min_its':50, 'tol':1.e-10}):
"""
pairs bootstrap of (beta_hat_active, -grad_inactive(beta_hat_active))
"""
X, Y = glm_loss.data
if beta_full is None:
beta_active = restricted_Mest(glm_loss, active, solve_args=solve_args)
beta_full = np.zeros(glm_loss.shape)
beta_full[active] = beta_active
else:
beta_active = beta_full[active]
X_active = X[:,active]
nactive = active.sum()
ntotal = nactive
if inactive is not None:
X_inactive = X[:,inactive]
ntotal += inactive.sum()
_bootW = np.diag(glm_loss.saturated_loss.hessian(X_active.dot(beta_active)))
_bootQ = X_active.T.dot(_bootW.dot(X_active))
_bootQinv = np.linalg.inv(_bootQ)
if inactive is not None:
_bootC = X_inactive.T.dot(_bootW.dot(X_active))
_bootI = _bootC.dot(_bootQinv)
else:
_bootI = None
nactive = active.sum()
if inactive is not None:
X_full = np.hstack([X_active,X_inactive])
beta_overall = np.zeros(X_full.shape[1])
beta_overall[:nactive] = beta_active
else:
X_full = X_active
beta_overall = beta_active
_boot_mu = lambda X_full, beta_overall: glm_loss.saturated_loss.mean_function(X_full.dot(beta_overall))
if ntotal > nactive:
observed = np.hstack([beta_active, -glm_loss.smooth_objective(beta_full, 'grad')[inactive]])
else:
observed = beta_active
# scaling is a lipschitz constant for a gradient squared
_sqrt_scaling = np.sqrt(scaling)
def _boot_score(X_full, Y, ntotal, _bootQinv, _bootI, nactive, _sqrt_scaling, beta_overall, indices):
X_star = X_full[indices]
Y_star = Y[indices]
score = X_star.T.dot(Y_star - _boot_mu(X_star, beta_overall))
result = np.zeros(ntotal)
result[:nactive] = _bootQinv.dot(score[:nactive])
if ntotal > nactive:
result[nactive:] = score[nactive:] - _bootI.dot(score[:nactive])
result[:nactive] *= _sqrt_scaling
result[nactive:] /= _sqrt_scaling
return result
observed[:nactive] *= _sqrt_scaling
observed[nactive:] /= _sqrt_scaling
return functools.partial(_boot_score, X_full, Y, ntotal, _bootQinv, _bootI, nactive, _sqrt_scaling, beta_overall), observed
def pairs_bootstrap_score(glm_loss,
active,
beta_active=None,
solve_args={'min_its':50, 'tol':1.e-10}):
"""
pairs bootstrap of (beta_hat_active, -grad_inactive(beta_hat_active))
"""
X, Y = glm_loss.data
if beta_active is None:
beta_active = restricted_Mest(glm_loss, active, solve_args=solve_args)
X_active = X[:,active]
_bootW = np.diag(glm_loss.saturated_loss.hessian(X_active.dot(beta_active)))
_boot_mu = lambda X_active, beta_active: glm_loss.saturated_loss.mean_function(X_active.dot(beta_active))
def _boot_score(X, Y, active, beta_active, indices):
X_star = X[indices]
Y_star = Y[indices]
score = -X_star.T.dot(Y_star - _boot_mu(X_star[:,active], beta_active))
return score
return functools.partial(_boot_score, X, Y, active, beta_active)
def set_alpha_matrix(glm_loss,
active,
beta_full=None,
inactive=None,
scaling=1.,
solve_args={'min_its': 50, 'tol': 1.e-10}):
X, Y = glm_loss.data
if beta_full is None:
beta_active = restricted_Mest(glm_loss, active, solve_args=solve_args)
beta_full = np.zeros(glm_loss.shape)
beta_full[active] = beta_active
else:
beta_active = beta_full[active]
X_active = X[:,active]
nactive = active.sum()
ntotal = nactive
if inactive is not None:
X_inactive = X[:,inactive]
ntotal += inactive.sum()
_W = np.diag(glm_loss.saturated_loss.hessian(X_active.dot(beta_active)))
_Q = X_active.T.dot(_W.dot(X_active))
_Qinv = np.linalg.inv(_Q)
nactive = active.sum()
if inactive is not None:
X_full = np.hstack([X_active, X_inactive])
beta_overall = np.zeros(X_full.shape[1])
beta_overall[:nactive] = beta_active
else:
X_full = X_active
beta_overall = beta_active
obs_residuals = Y - glm_loss.saturated_loss.mean_function(X_full.dot(beta_overall))
return np.dot(np.dot(_Qinv, X_active.T), np.diag(obs_residuals))
class M_estimator(query):
def __init__(self, loss, epsilon, penalty, randomization, solve_args={'min_its':50, 'tol':1.e-10}):
"""
Fits the logistic regression to a candidate active set, without penalty.
Calls the method bootstrap_covariance() to bootstrap the covariance matrix.
Computes $\bar{\beta}_E$ which is the restricted
M-estimator (i.e. subject to the constraint $\beta_{-E}=0$).
Parameters:
-----------
active: np.bool
The active set from fitting the logistic lasso
solve_args: dict
Arguments to be passed to regreg solver.
Returns:
--------
None
Notes:
------
Sets self._beta_unpenalized which will be used in the covariance matrix calculation.
Also computes Hessian of loss at restricted M-estimator as well as the bootstrap covariance.
"""
query.__init__(self, randomization)
(self.loss,
self.epsilon,
self.penalty,
self.randomization,
self.solve_args) = (loss,
epsilon,
penalty,
randomization,
solve_args)
# Methods needed for subclassing a query
def solve(self, scaling=1, solve_args={'min_its':20, 'tol':1.e-10}):
self.randomize()
(loss,
randomized_loss,
epsilon,
penalty,
randomization,
solve_args) = (self.loss,
self.randomized_loss,
self.epsilon,
self.penalty,
self.randomization,
self.solve_args)
# initial solution
problem = rr.simple_problem(randomized_loss, penalty)
self.initial_soln = problem.solve(**solve_args)
# find the active groups and their direction vectors
# as well as unpenalized groups
groups = np.unique(penalty.groups)
active_groups = np.zeros(len(groups), np.bool)
unpenalized_groups = np.zeros(len(groups), np.bool)
active_directions = []
active = np.zeros(loss.shape, np.bool)
unpenalized = np.zeros(loss.shape, np.bool)
initial_scalings = []
for i, g in enumerate(groups):
group = penalty.groups == g
active_groups[i] = (np.linalg.norm(self.initial_soln[group]) > 1.e-6 * penalty.weights[g]) and (penalty.weights[g] > 0)
unpenalized_groups[i] = (penalty.weights[g] == 0)
if active_groups[i]:
active[group] = True
z = np.zeros(active.shape, np.float)
z[group] = self.initial_soln[group] / np.linalg.norm(self.initial_soln[group])
active_directions.append(z)
initial_scalings.append(np.linalg.norm(self.initial_soln[group]))
if unpenalized_groups[i]:
unpenalized[group] = True
# solve the restricted problem
self._overall = active + unpenalized
self._inactive = ~self._overall
self._unpenalized = unpenalized
self._active_directions = np.array(active_directions).T
self._active_groups = np.array(active_groups, np.bool)
self._unpenalized_groups = np.array(unpenalized_groups, np.bool)
self.selection_variable = {'groups':self._active_groups,
'variables':self._overall,
'directions':self._active_directions}
# initial state for opt variables
initial_subgrad = -(self.randomized_loss.smooth_objective(self.initial_soln, 'grad') +
self.randomized_loss.quadratic.objective(self.initial_soln, 'grad'))
# the quadratic of a smooth_atom is not included in computing the smooth_objective
initial_subgrad = initial_subgrad[self._inactive]
initial_unpenalized = self.initial_soln[self._unpenalized]
self.observed_opt_state = np.concatenate([initial_scalings,
initial_unpenalized,
initial_subgrad], axis=0)
# set the _solved bit
self._solved = True
# Now setup the pieces for linear decomposition
(loss,
epsilon,
penalty,
initial_soln,
overall,
inactive,
unpenalized,
active_groups,
active_directions) = (self.loss,
self.epsilon,
self.penalty,
self.initial_soln,
self._overall,
self._inactive,
self._unpenalized,
self._active_groups,
self._active_directions)
# scaling should be chosen to be Lipschitz constant for gradient of Gaussian part
# we are implicitly assuming that
# loss is a pairs model
_sqrt_scaling = np.sqrt(scaling)
_beta_unpenalized = restricted_Mest(loss, overall, solve_args=solve_args)
beta_full = np.zeros(overall.shape)
beta_full[overall] = _beta_unpenalized
_hessian = loss.hessian(beta_full)
self._beta_full = beta_full
# observed state for score
self.observed_score_state = np.hstack([_beta_unpenalized * _sqrt_scaling,
-loss.smooth_objective(beta_full, 'grad')[inactive] / _sqrt_scaling])
# form linear part
self.num_opt_var = p = loss.shape[0] # shorthand for p
# (\bar{\beta}_{E \cup U}, N_{-E}, c_E, \beta_U, z_{-E})
# E for active
# U for unpenalized
# -E for inactive
_opt_linear_term = np.zeros((p, self._active_groups.sum() + unpenalized.sum() + inactive.sum()))
_score_linear_term = np.zeros((p, p))
# \bar{\beta}_{E \cup U} piece -- the unpenalized M estimator
Mest_slice = slice(0, overall.sum())
_Mest_hessian = _hessian[:,overall]
_score_linear_term[:,Mest_slice] = -_Mest_hessian / _sqrt_scaling
# N_{-(E \cup U)} piece -- inactive coordinates of score of M estimator at unpenalized solution
null_idx = range(overall.sum(), p)
inactive_idx = np.nonzero(inactive)[0]
for _i, _n in zip(inactive_idx, null_idx):
_score_linear_term[_i,_n] = -_sqrt_scaling
# c_E piece
scaling_slice = slice(0, active_groups.sum())
if len(active_directions)==0:
_opt_hessian=0
else:
_opt_hessian = (_hessian + epsilon * np.identity(p)).dot(active_directions)
_opt_linear_term[:,scaling_slice] = _opt_hessian / _sqrt_scaling
self.observed_opt_state[scaling_slice] *= _sqrt_scaling
# beta_U piece
unpenalized_slice = slice(active_groups.sum(), active_groups.sum() + unpenalized.sum())
unpenalized_directions = np.identity(p)[:,unpenalized]
if unpenalized.sum():
_opt_linear_term[:,unpenalized_slice] = (_hessian + epsilon * np.identity(p)).dot(unpenalized_directions) / _sqrt_scaling
self.observed_opt_state[unpenalized_slice] *= _sqrt_scaling
# subgrad piece
subgrad_idx = range(active_groups.sum() + unpenalized.sum(), active_groups.sum() + inactive.sum() + unpenalized.sum())
subgrad_slice = slice(active_groups.sum() + unpenalized.sum(), active_groups.sum() + inactive.sum() + unpenalized.sum())
for _i, _s in zip(inactive_idx, subgrad_idx):
_opt_linear_term[_i,_s] = _sqrt_scaling
self.observed_opt_state[subgrad_slice] /= _sqrt_scaling
# form affine part
_opt_affine_term = np.zeros(p)
idx = 0
groups = np.unique(penalty.groups)
for i, g in enumerate(groups):
if active_groups[i]:
group = penalty.groups == g
_opt_affine_term[group] = active_directions[:,idx][group] * penalty.weights[g]
idx += 1
# two transforms that encode score and optimization
# variable roles
self.opt_transform = (_opt_linear_term, _opt_affine_term)
self.score_transform = (_score_linear_term, np.zeros(_score_linear_term.shape[0]))
# later, we will modify `score_transform`
# in `linear_decomposition`
# now store everything needed for the projections
# the projection acts only on the optimization
# variables
self.scaling_slice = scaling_slice
# weights are scaled here because the linear terms scales them by scaling
new_groups = penalty.groups[inactive]
new_weights = dict([(g, penalty.weights[g] / _sqrt_scaling) for g in penalty.weights.keys() if g in np.unique(new_groups)])
# we form a dual group lasso object
# to do the projection
self.group_lasso_dual = rr.group_lasso_dual(new_groups, weights=new_weights, bound=1.)
self.subgrad_slice = subgrad_slice
self._setup = True
def setup_sampler(self, scaling=1, solve_args={'min_its':20, 'tol':1.e-10}):
pass
def projection(self, opt_state):
"""
Full projection for Langevin.
The state here will be only the state of the optimization variables.
"""
if not self._setup:
raise ValueError('setup_sampler should be called before using this function')
if ('subgradient' not in self.selection_variable and
'scaling' not in self.selection_variable): # have not conditioned on any thing else
new_state = opt_state.copy() # not really necessary to copy
new_state[self.scaling_slice] = np.maximum(opt_state[self.scaling_slice], 0)
new_state[self.subgrad_slice] = self.group_lasso_dual.bound_prox(opt_state[self.subgrad_slice])
elif ('subgradient' not in self.selection_variable and
'scaling' in self.selection_variable): # conditioned on the initial scalings
# only the subgradient in opt_state
new_state = self.group_lasso_dual.bound_prox(opt_state)
elif ('subgradient' in self.selection_variable and
'scaling' not in self.selection_variable): # conditioned on the subgradient
# only the scaling in opt_state
new_state = np.maximum(opt_state, 0)
else:
new_state = opt_state
return new_state
# optional things to condition on
def condition_on_subgradient(self):
"""
Maybe we should allow subgradients of only some variables...
"""
if not self._setup:
raise ValueError('setup_sampler should be called before using this function')
opt_linear, opt_offset = self.opt_transform
new_offset = opt_linear[:,self.subgrad_slice].dot(self.observed_opt_state[self.subgrad_slice]) + opt_offset
new_linear = opt_linear[:,self.scaling_slice]
self.opt_transform = (new_linear, new_offset)
# for group LASSO this should not induce a bigger jacobian as
# the subgradients are in the interior of a ball
self.selection_variable['subgradient'] = self.observed_opt_state[self.subgrad_slice]
# reset variables
self.observed_opt_state = self.observed_opt_state[self.scaling_slice]
self.scaling_slice = slice(None, None, None)
self.subgrad_slice = np.zeros(new_linear.shape[1], np.bool)
self.num_opt_var = new_linear.shape[1]
def condition_on_scalings(self):
"""
Maybe we should allow subgradients of only some variables...
"""
if not self._setup:
raise ValueError('setup_sampler should be called before using this function')
opt_linear, opt_offset = self.opt_transform
new_offset = opt_linear[:,self.scaling_slice].dot(self.observed_opt_state[self.scaling_slice]) + opt_offset
new_linear = opt_linear[:,self.subgrad_slice]
self.opt_transform = (new_linear, new_offset)
# for group LASSO this will induce a bigger jacobian
self.selection_variable['scalings'] = self.observed_opt_state[self.scaling_slice]
# reset slices
self.observed_opt_state = self.observed_opt_state[self.subgrad_slice]
self.subgrad_slice = slice(None, None, None)
self.scaling_slice = np.zeros(new_linear.shape[1], np.bool)
self.num_opt_var = new_linear.shape[1]
def restricted_Mest(Mest_loss, active, solve_args={'min_its':50, 'tol':1.e-10}):
X, Y = Mest_loss.data
if Mest_loss._is_transform:
raise NotImplementedError('to fit restricted model, X must be an ndarray or scipy.sparse; general transforms not implemented')
X_restricted = X[:,active]
loss_restricted = rr.affine_smooth(Mest_loss.saturated_loss, X_restricted)
beta_E = loss_restricted.solve(**solve_args)
return beta_E
class M_estimator_split(M_estimator):
def __init__(self, loss, epsilon, subsample_size, penalty, solve_args={'min_its':50, 'tol':1.e-10}):
total_size = loss.saturated_loss.shape[0]
self.randomization = split(loss.shape, subsample_size, total_size)
M_estimator.__init__(self,loss, epsilon, penalty, self.randomization, solve_args=solve_args)
total_size = loss.saturated_loss.shape[0]
if subsample_size > total_size:
raise ValueError('subsample size must be smaller than total sample size')
self.total_size, self.subsample_size = total_size, subsample_size
def setup_sampler(self, scaling=1., solve_args={'min_its': 50, 'tol': 1.e-10}, B=2000):
M_estimator.setup_sampler(self,
scaling=scaling,
solve_args=solve_args)
# now we need to estimate covariance of
# loss.grad(\beta_E^*) - 1/pi * randomized_loss.grad(\beta_E^*)
m, n, p = self.subsample_size, self.total_size, self.loss.shape[0] # shorthand
#from .glm import pairs_bootstrap_score
bootstrap_score = pairs_bootstrap_score(self.loss,
self._overall,
beta_active=self._beta_full[self._overall],
solve_args=solve_args)
# find unpenalized MLE on subsample
newq, oldq = rr.identity_quadratic(0, 0, 0, 0), self.randomized_loss.quadratic
self.randomized_loss.quadratic = newq
beta_active_subsample = restricted_Mest(self.randomized_loss,
self._overall)
bootstrap_score_split = pairs_bootstrap_score(self.loss,
self._overall,
beta_active=beta_active_subsample,
solve_args=solve_args)
self.randomized_loss.quadratic = oldq
inv_frac = n / m
def subsample_diff(m, n, indices):
subsample = np.random.choice(indices, size=m, replace=False)
full_score = bootstrap_score(indices) # a sum of n terms
randomized_score = bootstrap_score_split(subsample) # a sum of m terms
return full_score - randomized_score * inv_frac
first_moment = np.zeros(p)
second_moment = np.zeros((p, p))
_n = np.arange(n)
for _ in range(B):
indices = np.random.choice(_n, size=n, replace=True)
randomized_score = subsample_diff(m, n, indices)
first_moment += randomized_score
second_moment += np.multiply.outer(randomized_score, randomized_score)
first_moment /= B
second_moment /= B
cov = second_moment - np.multiply.outer(first_moment,
first_moment)
self.randomization.set_covariance(cov)
return bootstrap_score, cov
class M_estimator_approx(M_estimator):
def __init__(self, loss, epsilon, penalty, randomization, randomizer, estimation):
M_estimator.__init__(self, loss, epsilon, penalty, randomization)
self.randomizer = randomizer
self.estimation = estimation
def solve_approx(self):
self.solve()
(_opt_linear_term, _opt_affine_term) = self.opt_transform
self._opt_linear_term = np.concatenate((_opt_linear_term[self._overall, :], _opt_linear_term[~self._overall, :]), 0)
self._opt_affine_term = np.concatenate((_opt_affine_term[self._overall], _opt_affine_term[~self._overall]), 0)
self.opt_transform = (self._opt_linear_term, self._opt_affine_term)
(_score_linear_term, _) = self.score_transform
self._score_linear_term = np.concatenate((_score_linear_term[self._overall, :], _score_linear_term[~self._overall, :]), 0)
self.score_transform = (self._score_linear_term, np.zeros(self._score_linear_term.shape[0]))
self.feasible_point = np.append(self.observed_score_state, np.abs(self.initial_soln[self._overall]))
lagrange = self.penalty._weight_array
self.inactive_lagrange = lagrange[~self._overall]
X, _ = self.loss.data
n, p = X.shape
self.p = p
nactive = self._overall.sum()
self.nactive = nactive
self.target_observed = self.observed_score_state[:self.nactive]
if self.estimation == 'parametric':
score_cov = np.zeros((p,p))
inv_X_active = np.linalg.inv(X[:, self._overall].T.dot(X[:, self._overall]))
projection_X_active = X[:,self._overall].dot(np.linalg.inv(X[:, self._overall].T.dot(X[:, self._overall]))).dot(X[:,self._overall].T)
score_cov[:self.nactive, :self.nactive] = inv_X_active
score_cov[self.nactive:, self.nactive:] = X[:,~self._overall].T.dot(np.identity(n)- projection_X_active).dot(X[:,~self._overall])
elif self.estimation == 'bootstrap':
bootstrap_score = pairs_bootstrap_glm(self.loss,
self._overall,
beta_full=self._beta_full,
inactive=~self._overall)[0]
score_cov = bootstrap_cov(lambda: np.random.choice(n, size=(n,), replace=True), bootstrap_score)
self.score_cov = score_cov
self.target_cov = score_cov[:nactive, :nactive]
self.score_cov_inv = np.linalg.inv(self.score_cov)
self.B = self._opt_linear_term
self.A = self._score_linear_term
self.B_active = self.B[:nactive, :nactive]
self.B_inactive = self.B[nactive:, :nactive]
self.A_active = self._score_linear_term[:nactive, :]
self.A_inactive = self._score_linear_term[nactive:, :]
self.offset_active = self._opt_affine_term[:nactive]
class M_estimator_approx_carved(M_estimator_split):
def __init__(self, loss, epsilon, subsample_size, penalty, estimation):
M_estimator_split.__init__(self,loss, epsilon, subsample_size, penalty, solve_args={'min_its':50, 'tol':1.e-10})
self.estimation = estimation
def solve_approx(self):
self.solve()
self.nactive = self._overall.sum()
X, _ = self.loss.data
n, p = X.shape
self.p = p
self.target_observed = self.observed_score_state[:self.nactive]
self.feasible_point = np.concatenate([self.observed_score_state, np.fabs(self.observed_opt_state[:self.nactive]),
self.observed_opt_state[self.nactive:]], axis = 0)<|fim▁hole|>
(_opt_linear_term, _opt_affine_term) = self.opt_transform
self._opt_linear_term = np.concatenate(
(_opt_linear_term[self._overall, :], _opt_linear_term[~self._overall, :]), 0)
self._opt_affine_term = np.concatenate((_opt_affine_term[self._overall], _opt_affine_term[~self._overall]), 0)
self.opt_transform = (self._opt_linear_term, self._opt_affine_term)
(_score_linear_term, _) = self.score_transform
self._score_linear_term = np.concatenate(
(_score_linear_term[self._overall, :], _score_linear_term[~self._overall, :]), 0)
self.score_transform = (self._score_linear_term, np.zeros(self._score_linear_term.shape[0]))
lagrange = self.penalty._weight_array
#print("True or false", np.all(lagrange[0]-np.fabs(self.feasible_point[p+self.nactive:]))>0)
#print("True or false", np.all(self.feasible_point[p:][:self.nactive]) > 0)
self.inactive_lagrange = lagrange[~self._overall]
self.bootstrap_score, self.randomization_cov = self.setup_sampler()
if self.estimation == 'parametric':
score_cov = np.zeros((p,p))
inv_X_active = np.linalg.inv(X[:, self._overall].T.dot(X[:, self._overall]))
projection_X_active = X[:,self._overall].dot(np.linalg.inv(X[:, self._overall].T.dot(X[:, self._overall]))).dot(X[:,self._overall].T)
score_cov[:self.nactive, :self.nactive] = inv_X_active
score_cov[self.nactive:, self.nactive:] = X[:,~self._overall].T.dot(np.identity(n)- projection_X_active).dot(X[:,~self._overall])
elif self.estimation == 'bootstrap':
score_cov = bootstrap_cov(lambda: np.random.choice(n, size=(n,), replace=True), self.bootstrap_score)
self.score_cov = score_cov
self.score_cov_inv = np.linalg.inv(self.score_cov)
class M_estimator_approx_logistic(M_estimator):
def __init__(self, loss, epsilon, penalty, randomization, randomizer, estimation):
M_estimator.__init__(self, loss, epsilon, penalty, randomization)
self.randomizer = randomizer
self.estimation = estimation
def solve_approx(self):
self.solve()
(_opt_linear_term, _opt_affine_term) = self.opt_transform
self._opt_linear_term = np.concatenate((_opt_linear_term[self._overall, :], _opt_linear_term[~self._overall, :]), 0)
self._opt_affine_term = np.concatenate((_opt_affine_term[self._overall], _opt_affine_term[~self._overall]), 0)
self.opt_transform = (self._opt_linear_term, self._opt_affine_term)
(_score_linear_term, _) = self.score_transform
self._score_linear_term = np.concatenate((_score_linear_term[self._overall, :], _score_linear_term[~self._overall, :]), 0)
self.score_transform = (self._score_linear_term, np.zeros(self._score_linear_term.shape[0]))
self.feasible_point = np.append(self.observed_score_state, np.abs(self.initial_soln[self._overall]))
lagrange = self.penalty._weight_array
self.inactive_lagrange = lagrange[~self._overall]
X, _ = self.loss.data
n, p = X.shape
self.p = p
nactive = self._overall.sum()
self.nactive = nactive
self.target_observed = self.observed_score_state[:self.nactive]
if self.estimation == 'parametric':
score_cov = np.zeros((p,p))
vec = np.exp(X[:, self._overall].dot(self.target_observed))
#vec = np.exp(np.zeros(n))
pi = np.true_divide(vec, np.power(1. + vec, 2))
weights = np.diag(pi)
Q_active = X[:, self._overall].T.dot(weights).dot(X[:, self._overall])
Q_active_inv = np.linalg.inv(Q_active)
P_inactive = X[:, ~self._overall].T.dot(np.identity(n)-weights.dot(X[:, self._overall].dot(Q_active_inv)
.dot(X[:, self._overall].T)))
score_cov[:self.nactive, :self.nactive] = Q_active_inv
score_cov[self.nactive:, self.nactive:] = P_inactive.dot(weights).dot(P_inactive.T)
elif self.estimation == 'bootstrap':
bootstrap_score = pairs_bootstrap_glm(self.loss,
self._overall,
beta_full=self._beta_full,
inactive=~self._overall)[0]
score_cov = bootstrap_cov(lambda: np.random.choice(n, size=(n,), replace=True), bootstrap_score)
self.score_cov = score_cov
self.target_cov = score_cov[:nactive, :nactive]
self.score_cov_inv = np.linalg.inv(self.score_cov)
self.B = self._opt_linear_term
self.A = self._score_linear_term
self.B_active = self.B[:nactive, :nactive]
self.B_inactive = self.B[nactive:, :nactive]
self.A_active = self._score_linear_term[:nactive, :]
self.A_inactive = self._score_linear_term[nactive:, :]
self.offset_active = self._opt_affine_term[:nactive]<|fim▁end|> | |
<|file_name|>communication_config.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import api, models, fields, _
from openerp.exceptions import ValidationError
import logging
logger = logging.getLogger(__name__)
class CommunicationConfig(models.Model):
""" This class allows to configure if and how we will inform the
sponsor when a given event occurs. """
_name = 'partner.communication.config'
_description = 'Communication Configuration'
##########################################################################
# FIELDS #
##########################################################################
name = fields.Char(
required=True, help='Rule name')
send_mode = fields.Selection('_get_send_mode', required=True)
send_mode_pref_field = fields.Char(
'Partner preference field',
help='Name of the field in res.partner in which to find the '
'delivery preference'
)
print_if_not_email = fields.Boolean(
help="Should we print the communication if the sponsor don't have "
"an e-mail address"
)
email_template_id = fields.Many2one(
'email.template', 'Email template')
report_id = fields.Many2one(
'ir.actions.report.xml', 'Letter template')
from_employee_id = fields.Many2one(
'hr.employee', 'Communication From',
help='The sponsor will receive the communication from this employee'
)
##########################################################################
# FIELDS METHODS #
##########################################################################
@api.one
@api.constrains('send_mode_pref_field')
def _validate_config(self):
""" Test if the config is valid. """
valid = True
if self.send_mode_pref_field:
valid = hasattr(self.env['res.partner'], self.send_mode_pref_field)
if not valid:
raise ValidationError(
"Following field does not exist in res.partner: %s." %
self.send_mode_pref_field
)
def _get_send_mode(self):
send_modes = self.get_delivery_preferences()
send_modes.append(
('partner_preference', _('Partner specific'))
)
return send_modes
##########################################################################
# PUBLIC METHODS #
##########################################################################
@api.model
def get_delivery_preferences(self):
return [
('none', _("Don't inform sponsor")),
('auto_digital', _('Send e-mail automatically')),
('digital', _('Prepare e-mail (sent manually)')),
('auto_physical', _('Print letter automatically')),
('physical', _('Prepare report (print manually)')),
]
def get_inform_mode(self, partner):<|fim▁hole|> communication (digital, physical or False).
:param partner: res.partner record
:returns: send_mode (auto/digital/False), auto_mode (True/False)
"""
self.ensure_one()
if self.send_mode != 'partner_preference':
send_mode = self.send_mode
else:
send_mode = getattr(
partner, self.send_mode_pref_field, False)
auto_mode = 'auto' in send_mode
send_mode = send_mode.replace('auto_', '')
if send_mode == 'none':
send_mode = False
if send_mode == 'digital' and not partner.email:
if self.print_if_not_email:
send_mode = 'physical'
else:
send_mode = False
return send_mode, auto_mode<|fim▁end|> | """ Returns how the partner should be informed for the given |
<|file_name|>header.py<|end_file_name|><|fim▁begin|>import bpy
# -----------------------------------------------------------------------------
# Draw UI, use an function to be append into 3D View Header
# -----------------------------------------------------------------------------
def ui_3D(self, context):
layout = self.layout
row = layout.row(align=True)
row.operator("view.grid_control", text='', icon='GRID')
icon = 'CURSOR'
row.operator("object.center_pivot_mesh_obj", text='', icon=icon)
icon = 'SMOOTH'
row.operator("object.smooth_shading", text='', icon=icon)
row = layout.row(align=True)
icon = 'FORCE_TEXTURE'
row.operator("unwrap.uv_checker", text='', icon=icon)
icon = 'EDITMODE_HLT'
row.operator("object.retopo_shading", text='', icon=icon)
# -----------------------------------------------------------------------------
# Draw UI, use an function to be append into UV/Image Editor View Header
# -----------------------------------------------------------------------------
def ui_UV(self, context):
layout = self.layout<|fim▁hole|> row.operator("unwrap.reset_cursor", text='', icon=icon)
icon = 'FORCE_TEXTURE'
row.operator("unwrap.uv_checker", text='', icon=icon)
def register():
bpy.types.VIEW3D_HT_header.append(ui_3D)
bpy.types.IMAGE_HT_header.append(ui_UV)
def unregister():
bpy.types.VIEW3D_HT_header.remove(ui_3D)
bpy.types.IMAGE_HT_header.remove(ui_UV)<|fim▁end|> | row = layout.row(align=True)
icon = 'CURSOR' |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various generic utility functions and classes.
Utilities are mainly for internal usage, but external libraries and tools
may find some of them useful. Utilities are generally stable, but absolute
backwards compatibility between major versions is not guaranteed.
All utilities are exposed via the :mod:`robot.utils` package, and should be
used either like::
from robot import utils
assert utils.Matcher('H?llo').match('Hillo')
or::<|fim▁hole|>
assert Matcher('H?llo').match('Hillo')
"""
from .argumentparser import ArgumentParser, cmdline2list
from .application import Application
from .compress import compress_text
from .connectioncache import ConnectionCache
from .dotdict import DotDict, OrderedDict
from .encoding import (decode_output, encode_output,
decode_from_system, encode_to_system)
from .error import (get_error_message, get_error_details, ErrorDetails)
from .escaping import escape, unescape, split_from_equals
from .etreewrapper import ET, ETSource
from .frange import frange
from .markuputils import html_format, html_escape, xml_escape, attribute_escape
from .markupwriters import HtmlWriter, XmlWriter, NullMarkupWriter
from .importer import Importer
from .match import eq, Matcher, MultiMatcher
from .misc import (getdoc, isatty, plural_or_not, printable_name, py2to3,
roundup, seq2str, seq2str2)
from .normalizing import lower, normalize, NormalizedDict
from .platform import (IRONPYTHON, JYTHON, PY2, PY3, PYTHON, UNIXY, WINDOWS,
RERAISED_EXCEPTIONS)
from .recommendations import RecommendationFinder
from .robotenv import get_env_var, set_env_var, del_env_var, get_env_vars
from .robotinspect import is_java_init, is_java_method
from .robotio import binary_file_writer, file_writer
from .robotpath import abspath, find_file, get_link_path, normpath
from .robottime import (elapsed_time_to_string, format_time, get_elapsed_time,
get_time, get_timestamp, secs_to_timestamp,
secs_to_timestr, timestamp_to_secs, timestr_to_secs,
parse_time)
from .robottypes import (is_bytes, is_dict_like, is_falsy, is_integer,
is_list_like, is_number, is_string, is_truthy,
is_unicode, long, type_name, unicode, StringIO)
from .setter import setter, SetterAwareType
from .sortable import Sortable
from .text import (cut_long_message, format_assign_message,
pad_console_length, get_console_length, split_tags_from_doc,
split_args_from_name_or_path)
from .unic import prepr, unic
from .utf8reader import Utf8Reader
# Used by the old SeleniumLibrary until version 2.9.2.
# https://github.com/robotframework/SeleniumLibrary/issues/261
# TODO: Remove in RF 3.0.
def html_attr_escape(attr):
"""Deprecated!! Use attribute_escape instead."""
return attribute_escape(attr)<|fim▁end|> |
from robot.utils import Matcher |
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack import *
class Casacore(CMakePackage):
"""A suite of c++ libraries for radio astronomy data processing."""
homepage = "https://github.com/casacore/casacore"
url = "https://github.com/casacore/casacore/archive/v2.4.1.tar.gz"
maintainers = ['mpokorny']
version('3.4.0', sha256='31f02ad2e26f29bab4a47a2a69e049d7bc511084a0b8263360e6157356f92ae1')
version('3.3.0', sha256='3a714644b908ef6e81489b792cc9b80f6d8267a275e15d38a42a6a5137d39d3d')
version('3.2.0', sha256='ae5d3786cb6dfdd7ebc5eecc0c724ff02bbf6929720bc23be43a027978e79a5f')
version('3.1.2', sha256='ac94f4246412eb45d503f1019cabe2bb04e3861e1f3254b832d9b1164ea5f281')
version('3.1.1', sha256='85d2b17d856592fb206b17e0a344a29330650a4269c80b87f8abb3eaf3dadad4')
version('3.1.0', sha256='a6adf2d77ad0d6f32995b1e297fd88d31ded9c3e0bb8f28966d7b35a969f7897')
version('3.0.0', sha256='6f0e68fd77b5c96299f7583a03a53a90980ec347bff9dfb4c0abb0e2933e6bcb')
version('2.4.1', sha256='58eccc875053b2c6fe44fe53b6463030ef169597ec29926936f18d27b5087d63')
depends_on('[email protected]:', type='build')
variant('openmp', default=False, description='Build OpenMP support')
variant('shared', default=True, description='Build shared libraries')
variant('readline', default=True, description='Build readline support')
# see note below about the reason for disabling the "sofa" variant
# variant('sofa', default=False, description='Build SOFA support')
variant('adios2', default=False, description='Build ADIOS2 support')
variant('fftpack', default=False, description='Build FFTPack')
variant('hdf5', default=False, description='Build HDF5 support')
variant('python', default=False, description='Build python support')
# Force dependency on readline in v3.2 and earlier. Although the
# presence of readline is tested in CMakeLists.txt, and casacore
# can be built without it, there's no way to control that
# dependency at build time; since many systems come with readline,
# it's better to explicitly depend on it here always.
depends_on('readline', when='@:3.2.0')
depends_on('readline', when='+readline')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('blas')
depends_on('lapack')
depends_on('cfitsio')
depends_on('[email protected]:+cfitsio')
depends_on('[email protected]: precision=float,double', when='@3.4.0:')
depends_on('[email protected]: precision=float,double', when='~fftpack')
# SOFA dependency suffers the same problem in CMakeLists.txt as readline;
# force a dependency when building unit tests
depends_on('sofa-c', type='test')
depends_on('hdf5', when='+hdf5')
depends_on('adios2+mpi', when='+adios2')
depends_on('mpi', when='+adios2')
depends_on('[email protected]:', when='+python')
depends_on('boost+python', when='+python')
depends_on('py-numpy', when='+python')
def cmake_args(self):
args = []
spec = self.spec
args.append(self.define_from_variant('ENABLE_SHARED', 'shared'))
args.append(self.define_from_variant('USE_OPENMP', 'openmp'))
args.append(self.define_from_variant('USE_READLINE', 'readline'))
args.append(self.define_from_variant('USE_HDF5', 'hdf5'))
args.append(self.define_from_variant('USE_ADIOS2', 'adios2'))
args.append(self.define_from_variant('USE_MPI', 'adios2'))
if spec.satisfies('+adios2'):
args.append(self.define('ENABLE_TABLELOCKING', False))
# fftw3 is required by casacore starting with v3.4.0, but the
# old fftpack is still available. For v3.4.0 and later, we
# always require FFTW3 dependency with the optional addition
# of FFTPack. In older casacore versions, only one of FFTW3 or
# FFTPack can be selected.
if spec.satisfies('@3.4.0:'):
if spec.satisfies('+fftpack'):<|fim▁hole|> args.append('-DBUILD_FFTPACK_DEPRECATED=YES')
args.append(self.define('USE_FFTW3', True))
else:
args.append(self.define('USE_FFTW3', spec.satisfies('~fftpack')))
# Python2 and Python3 binding
if spec.satisfies('~python'):
args.extend(['-DBUILD_PYTHON=NO', '-DBUILD_PYTHON3=NO'])
elif spec.satisfies('^[email protected]:'):
args.extend(['-DBUILD_PYTHON=NO', '-DBUILD_PYTHON3=YES'])
else:
args.extend(['-DBUILD_PYTHON=YES', '-DBUILD_PYTHON3=NO'])
args.append('-DBUILD_TESTING=OFF')
return args
def patch(self):
# Rely on CMake ability to find hdf5, available since CMake 3.7.X
os.remove('cmake/FindHDF5.cmake')<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import requests
from django.conf import settings
from django.http import HttpResponse, HttpResponseBadRequest
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_http_methods
@csrf_exempt
@require_http_methods(["POST"])
def post_service_request(request):
payload = request.POST.copy()
outgoing = payload.dict()
if outgoing.get("internal_feedback", False):
if "internal_feedback" in outgoing:
del outgoing["internal_feedback"]
api_key = settings.OPEN311["INTERNAL_FEEDBACK_API_KEY"]
else:
api_key = settings.OPEN311["API_KEY"]
outgoing["api_key"] = api_key
url = settings.OPEN311["URL_BASE"]
session = requests.Session()
# Modify parameters for request in case of City of Turku
if "smbackend_turku" in settings.INSTALLED_APPS:
outgoing.pop("service_request_type")
outgoing.pop("can_be_published")
outgoing["address_string"] = "null"
outgoing["service_code"] = settings.OPEN311["SERVICE_CODE"]
<|fim▁hole|> return HttpResponseBadRequest()
return HttpResponse(r.content, content_type="application/json")<|fim▁end|> | r = session.post(url, data=outgoing)
if r.status_code != 200: |
<|file_name|>zip_longest.rs<|end_file_name|><|fim▁begin|>use std::cmp::Ordering::{Equal, Greater, Less};
use super::size_hint;
use std::iter::Fuse;
use crate::either_or_both::EitherOrBoth;
// ZipLongest originally written by SimonSapin,
// and dedicated to itertools https://github.com/rust-lang/rust/pull/19283
/// An iterator which iterates two other iterators simultaneously
///
/// This iterator is *fused*.
///
/// See [`.zip_longest()`](../trait.Itertools.html#method.zip_longest) for more information.
#[derive(Clone, Debug)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct ZipLongest<T, U> {
a: Fuse<T>,
b: Fuse<U>,
}
/// Create a new `ZipLongest` iterator.
pub fn zip_longest<T, U>(a: T, b: U) -> ZipLongest<T, U>
where T: Iterator,
U: Iterator
{
ZipLongest {
a: a.fuse(),
b: b.fuse(),
}
}
impl<T, U> Iterator for ZipLongest<T, U>
where T: Iterator,
U: Iterator<|fim▁hole|> #[inline]
fn next(&mut self) -> Option<Self::Item> {
match (self.a.next(), self.b.next()) {
(None, None) => None,
(Some(a), None) => Some(EitherOrBoth::Left(a)),
(None, Some(b)) => Some(EitherOrBoth::Right(b)),
(Some(a), Some(b)) => Some(EitherOrBoth::Both(a, b)),
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
size_hint::max(self.a.size_hint(), self.b.size_hint())
}
}
impl<T, U> DoubleEndedIterator for ZipLongest<T, U>
where T: DoubleEndedIterator + ExactSizeIterator,
U: DoubleEndedIterator + ExactSizeIterator
{
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
match self.a.len().cmp(&self.b.len()) {
Equal => match (self.a.next_back(), self.b.next_back()) {
(None, None) => None,
(Some(a), Some(b)) => Some(EitherOrBoth::Both(a, b)),
// These can only happen if .len() is inconsistent with .next_back()
(Some(a), None) => Some(EitherOrBoth::Left(a)),
(None, Some(b)) => Some(EitherOrBoth::Right(b)),
},
Greater => self.a.next_back().map(EitherOrBoth::Left),
Less => self.b.next_back().map(EitherOrBoth::Right),
}
}
}
impl<T, U> ExactSizeIterator for ZipLongest<T, U>
where T: ExactSizeIterator,
U: ExactSizeIterator
{}<|fim▁end|> | {
type Item = EitherOrBoth<T::Item, U::Item>;
|
<|file_name|>declaration.js<|end_file_name|><|fim▁begin|>'use strict';
exports.__esModule = true;
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _warnOnce = require('./warn-once');<|fim▁hole|>var _warnOnce2 = _interopRequireDefault(_warnOnce);
var _node = require('./node');
var _node2 = _interopRequireDefault(_node);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Declaration = function (_Node) {
_inherits(Declaration, _Node);
function Declaration(defaults) {
_classCallCheck(this, Declaration);
var _this = _possibleConstructorReturn(this, _Node.call(this, defaults));
_this.type = 'decl';
return _this;
}
/* istanbul ignore next */
_createClass(Declaration, [{
key: '_value',
get: function get() {
(0, _warnOnce2.default)('Node#_value was deprecated. Use Node#raws.value');
return this.raws.value;
}
/* istanbul ignore next */
,
set: function set(val) {
(0, _warnOnce2.default)('Node#_value was deprecated. Use Node#raws.value');
this.raws.value = val;
}
/* istanbul ignore next */
}, {
key: '_important',
get: function get() {
(0, _warnOnce2.default)('Node#_important was deprecated. Use Node#raws.important');
return this.raws.important;
}
/* istanbul ignore next */
,
set: function set(val) {
(0, _warnOnce2.default)('Node#_important was deprecated. Use Node#raws.important');
this.raws.important = val;
}
}]);
return Declaration;
}(_node2.default);
exports.default = Declaration;
module.exports = exports['default'];<|fim▁end|> | |
<|file_name|>RevealingModuleVariant.js<|end_file_name|><|fim▁begin|>var AppGlobal = {
exposePrivateVariablesForTesting: true
};
var testableObject = function (exposePrivateVariablesForTesting) {
var _privateVar = "can't see this";<|fim▁hole|>
function _privateFunction() {
console.log("Executed Private");
}
function _exposedFunction() {
console.log("Exposed Function");
}
var returnValue = {
ExposedFunction: _exposedFunction,
ExposedVariable: _publicVar
};
if (exposePrivateVariablesForTesting) {
$.extend(returnValue, {
PrivateVar: _privateVar,
PrivateFunction: _privateFunction
});
}
return returnValue;
}(AppGlobal.exposePrivateVariablesForTesting);
testableObject.ExposedFunction();
console.log(testableObject.ExposedVariable);
testableObject.PrivateFunction();
console.log(testableObject.PrivateVar);<|fim▁end|> | var _publicVar = "we see this fine"; |
<|file_name|>jpeg.py<|end_file_name|><|fim▁begin|># This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
# The source was jpeg.ksy from here - https://github.com/kaitai-io/kaitai_struct_formats/blob/24e2d00048b8084ceec30a187a79cb87a79a48ba/image/jpeg.ksy
import array
import struct
import zlib
from enum import Enum
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from .exif import Exif
class Jpeg(KaitaiStruct):
class ComponentId(Enum):
y = 1
cb = 2
cr = 3
i = 4
q = 5
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.segments = []
while not self._io.is_eof():
self.segments.append(self._root.Segment(self._io, self, self._root))
class Segment(KaitaiStruct):
class MarkerEnum(Enum):
tem = 1
sof0 = 192
sof1 = 193
sof2 = 194
sof3 = 195
dht = 196
sof5 = 197
sof6 = 198
sof7 = 199
soi = 216
eoi = 217
sos = 218
dqt = 219
dnl = 220
dri = 221
dhp = 222
app0 = 224
app1 = 225
app2 = 226
app3 = 227
app4 = 228
app5 = 229
app6 = 230
app7 = 231
app8 = 232
app9 = 233
app10 = 234
app11 = 235
app12 = 236
app13 = 237
app14 = 238
app15 = 239
com = 254
def __init__(self, _io, _parent=None, _root=None):
self._io = _io<|fim▁hole|> if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
self.length = self._io.read_u2be()
if ((self.marker != self._root.Segment.MarkerEnum.soi) and (self.marker != self._root.Segment.MarkerEnum.eoi)) :
_on = self.marker
if _on == self._root.Segment.MarkerEnum.sos:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSos(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app1:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp1(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.sof0:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentSof0(io, self, self._root)
elif _on == self._root.Segment.MarkerEnum.app0:
self._raw_data = self._io.read_bytes((self.length - 2))
io = KaitaiStream(BytesIO(self._raw_data))
self.data = self._root.SegmentApp0(io, self, self._root)
else:
self.data = self._io.read_bytes((self.length - 2))
if self.marker == self._root.Segment.MarkerEnum.sos:
self.image_data = self._io.read_bytes_full()
class SegmentSos(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.num_components = self._io.read_u1()
self.components = [None] * (self.num_components)
for i in range(self.num_components):
self.components[i] = self._root.SegmentSos.Component(self._io, self, self._root)
self.start_spectral_selection = self._io.read_u1()
self.end_spectral = self._io.read_u1()
self.appr_bit_pos = self._io.read_u1()
class Component(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.id = self._root.ComponentId(self._io.read_u1())
self.huffman_table = self._io.read_u1()
class SegmentApp1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.read_strz("ASCII", 0, False, True, True)
_on = self.magic
if _on == u"Exif":
self.body = self._root.ExifInJpeg(self._io, self, self._root)
class SegmentSof0(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.bits_per_sample = self._io.read_u1()
self.image_height = self._io.read_u2be()
self.image_width = self._io.read_u2be()
self.num_components = self._io.read_u1()
self.components = [None] * (self.num_components)
for i in range(self.num_components):
self.components[i] = self._root.SegmentSof0.Component(self._io, self, self._root)
class Component(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.id = self._root.ComponentId(self._io.read_u1())
self.sampling_factors = self._io.read_u1()
self.quantization_table_id = self._io.read_u1()
@property
def sampling_x(self):
if hasattr(self, '_m_sampling_x'):
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
self._m_sampling_x = ((self.sampling_factors & 240) >> 4)
return self._m_sampling_x if hasattr(self, '_m_sampling_x') else None
@property
def sampling_y(self):
if hasattr(self, '_m_sampling_y'):
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
self._m_sampling_y = (self.sampling_factors & 15)
return self._m_sampling_y if hasattr(self, '_m_sampling_y') else None
class ExifInJpeg(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.extra_zero = self._io.ensure_fixed_contents(struct.pack('1b', 0))
self._raw_data = self._io.read_bytes_full()
io = KaitaiStream(BytesIO(self._raw_data))
self.data = Exif(io)
class SegmentApp0(KaitaiStruct):
class DensityUnit(Enum):
no_units = 0
pixels_per_inch = 1
pixels_per_cm = 2
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.read_str_byte_limit(5, "ASCII")
self.version_major = self._io.read_u1()
self.version_minor = self._io.read_u1()
self.density_units = self._root.SegmentApp0.DensityUnit(self._io.read_u1())
self.density_x = self._io.read_u2be()
self.density_y = self._io.read_u2be()
self.thumbnail_x = self._io.read_u1()
self.thumbnail_y = self._io.read_u1()
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3))<|fim▁end|> | self._parent = _parent
self._root = _root if _root else self
self.magic = self._io.ensure_fixed_contents(struct.pack('1b', -1))
self.marker = self._root.Segment.MarkerEnum(self._io.read_u1()) |
<|file_name|>VideoWizard.py<|end_file_name|><|fim▁begin|>from boxbranding import getBoxType, getMachineName, getMachineBuild, getBrandOEM, getMachineBrand
from Screens.Wizard import WizardSummary
from Screens.WizardLanguage import WizardLanguage
from Screens.Rc import Rc
from Components.AVSwitch import iAVSwitch
from Screens.Screen import Screen
from Components.Pixmap import Pixmap
from Components.config import config, ConfigBoolean, configfile
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_ACTIVE_SKIN
from Tools.HardwareInfo import HardwareInfo
config.misc.showtestcard = ConfigBoolean(default = False)
boxtype = getBoxType()
has_rca = False
has_dvi = False
if boxtype in ('formuler3', 'enibox', 'mago', 'x2plus', 'sf3038', 'sf108', 'twinboxlcd', 'atemio6000', 'atemio6100', 'atemio6200', 'mbminiplus', 'vp7358ci', 'enibox', 'gbquad', 'gbquadplus', 'et5x00', 'et6000', 'et7000', 'et7500', 'et8500', 'classm', 'axodin', 'axodinc', 'genius', 'evo', 'galaxym6', 'geniuse3hd', 'evoe3hd', 'axase3', 'axase3c', 'starsatlx', 'mixosf7', 'mixoslumi', 'tmnano', 'azboxme', 'azboxminime', 'optimussos1', 'optimussos2', 'gb800seplus', 'gb800ueplus', 'gbultrase', 'gbultraue', 'sezam1000hd', 'ixussone', 'ixusszero', 'enfinity', 'marvel1', 'bre2ze', 'force1', 'force1plus', 'worldvisionf1', 'optimussos1plus', 'optimussos2plus', 'optimussos3plus', 'formuler1', 'tmnano2super', 'vusolose', 'vuzero', 'tyrant') or getMachineBrand() == 'Zgemma':
has_rca = True
if boxtype == 'dm8000' or boxtype == 'dm800':
has_dvi = True
class VideoWizardSummary(WizardSummary):
skin = (
"""<screen name="VideoWizardSummary" position="0,0" size="132,64" id="1">
<widget name="text" position="6,4" size="120,40" font="Regular;12" transparent="1" />
<widget source="parent.list" render="Label" position="6,40" size="120,21" font="Regular;14">
<convert type="StringListSelection" />
</widget>
<!--widget name="pic" pixmap="%s" position="6,22" zPosition="10" size="64,64" transparent="1" alphatest="on"/-->
</screen>""",
"""<screen name="VideoWizardSummary" position="0,0" size="96,64" id="2">
<widget name="text" position="0,4" size="96,40" font="Regular;12" transparent="1" />
<widget source="parent.list" render="Label" position="0,40" size="96,21" font="Regular;14">
<convert type="StringListSelection" />
</widget>
<!--widget name="pic" pixmap="%s" position="0,22" zPosition="10" size="64,64" transparent="1" alphatest="on"/-->
</screen>""")
#% (resolveFilename(SCOPE_PLUGINS, "SystemPlugins/Videomode/lcd_Scart.png"))
def __init__(self, session, parent):
WizardSummary.__init__(self, session, parent)
#self["pic"] = Pixmap()
def setLCDPicCallback(self):
self.parent.setLCDTextCallback(self.setText)
def setLCDPic(self, file):
self["pic"].instance.setPixmapFromFile(file)
class VideoWizard(WizardLanguage, Rc):
skin = """
<screen position="fill" title="Welcome..." flags="wfNoBorder" >
<panel name="WizardMarginsTemplate"/>
<panel name="WizardPictureLangTemplate"/>
<panel name="RemoteControlTemplate"/>
<panel position="left" size="10,*" />
<panel position="right" size="10,*" />
<panel position="fill">
<widget name="text" position="top" size="*,270" font="Regular;23" valign="center" />
<panel position="fill">
<panel position="left" size="150,*">
<widget name="portpic" position="top" zPosition="10" size="150,150" transparent="1" alphatest="on"/>
</panel>
<panel position="fill" layout="stack">
<widget source="list" render="Listbox" position="fill" scrollbarMode="showOnDemand" >
<convert type="StringList" />
</widget>
<!--<widget name="config" position="fill" zPosition="1" scrollbarMode="showOnDemand" />-->
</panel>
</panel>
</panel>
</screen>"""
def __init__(self, session):
# FIXME anyone knows how to use relative paths from the plugin's directory?
self.xmlfile = resolveFilename(SCOPE_SKIN, "videowizard.xml")
self.hw = iAVSwitch
WizardLanguage.__init__(self, session, showSteps = False, showStepSlider = False)
Rc.__init__(self)
self["wizard"] = Pixmap()
self["portpic"] = Pixmap()
Screen.setTitle(self, _("Welcome..."))
self.port = None
self.mode = None
self.rate = None
def createSummary(self):
return VideoWizardSummary
def markDone(self):
self.hw.saveMode(self.port, self.mode, self.rate)
config.misc.videowizardenabled.value = 0
config.misc.videowizardenabled.save()
configfile.save()
def listInputChannels(self):
hw_type = HardwareInfo().get_device_name()
has_hdmi = HardwareInfo().has_hdmi()
list = []
for port in self.hw.getPortList():
if self.hw.isPortUsed(port):
descr = port
if descr == 'HDMI' and has_dvi:
descr = 'DVI'
if descr == 'Scart' and has_rca:
descr = 'RCA'
if port != "DVI-PC":
list.append((descr,port))
list.sort(key = lambda x: x[0])
print "listInputChannels:", list
return list
def inputSelectionMade(self, index):
print "inputSelectionMade:", index
self.port = index
self.inputSelect(index)
def inputSelectionMoved(self):
hw_type = HardwareInfo().get_device_name()
has_hdmi = HardwareInfo().has_hdmi()
print "input selection moved:", self.selection
self.inputSelect(self.selection)
if self["portpic"].instance is not None:
picname = self.selection
if picname == 'HDMI' and has_dvi:
picname = "DVI"
if picname == 'Scart' and has_rca:
picname = "RCA"
self["portpic"].instance.setPixmapFromFile(resolveFilename(SCOPE_ACTIVE_SKIN, "icons/" + picname + ".png"))
def inputSelect(self, port):
print "inputSelect:", port
modeList = self.hw.getModeList(self.selection)
print "modeList:", modeList
self.port = port
if len(modeList) > 0:
ratesList = self.listRates(modeList[0][0])
self.hw.setMode(port = port, mode = modeList[0][0], rate = ratesList[0][0])<|fim▁hole|>
def listModes(self):
list = []
print "modes for port", self.port
for mode in self.hw.getModeList(self.port):
#if mode[0] != "PC":
list.append((mode[0], mode[0]))
print "modeslist:", list
list.sort()
return list
def modeSelectionMade(self, index):
print "modeSelectionMade:", index
self.mode = index
self.modeSelect(index)
def modeSelectionMoved(self):
print "mode selection moved:", self.selection
self.modeSelect(self.selection)
def modeSelect(self, mode):
ratesList = self.listRates(mode)
print "ratesList:", ratesList
if self.port == "HDMI" and mode in ("720p", "1080i", "1080p"):
self.rate = "multi"
self.hw.setMode(port = self.port, mode = mode, rate = "multi")
else:
self.hw.setMode(port = self.port, mode = mode, rate = ratesList[0][0])
def listRates(self, querymode = None):
if querymode is None:
querymode = self.mode
list = []
print "modes for port", self.port, "and mode", querymode
for mode in self.hw.getModeList(self.port):
print mode
if mode[0] == querymode:
for rate in mode[1]:
if self.port == "DVI-PC":
print "rate:", rate
if rate == "640x480":
list.insert(0, (rate, rate))
continue
list.append((rate, rate))
return list
def rateSelectionMade(self, index):
print "rateSelectionMade:", index
self.rate = index
self.rateSelect(index)
def rateSelectionMoved(self):
print "rate selection moved:", self.selection
self.rateSelect(self.selection)
def rateSelect(self, rate):
self.hw.setMode(port = self.port, mode = self.mode, rate = rate)
def showTestCard(self, selection = None):
if selection is None:
selection = self.selection
print "set config.misc.showtestcard to", {'yes': True, 'no': False}[selection]
if selection == "yes":
config.misc.showtestcard.value = True
else:
config.misc.showtestcard.value = False
def keyNumberGlobal(self, number):
if number in (1,2,3):
if number == 1:
self.hw.saveMode("HDMI", "720p", "multi")
elif number == 2:
self.hw.saveMode("HDMI", "1080i", "multi")
elif number == 3:
self.hw.saveMode("Scart", "Multi", "multi")
self.hw.setConfiguredMode()
self.close()
WizardLanguage.keyNumberGlobal(self, number)<|fim▁end|> | |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Django settings for test_project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
TIME_ZONE = 'Etc/UTC'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 't^4dt#fkxftpborp@%lg*#h2wj%vizl)#pkkt$&0f7b87rbu6y'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_project.urls'
WSGI_APPLICATION = 'test_project.wsgi.application'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'django.contrib.admin',
'djcelery',
'django_nose',
'useful', # Import the app to run tests
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
TEMPLATE_CONTEXT_PROCESSORS = (
'useful.context_processors.settings',
)
BROKER_BACKEND = 'memory'
CELERY_ALWAYS_EAGER = True
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'cleanup': {
'task': 'useful.tasks.call_management_command',
'schedule': timedelta(seconds=10),
'args': ('validate', ),<|fim▁hole|> },
}<|fim▁end|> | |
<|file_name|>add_delivery.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__author__ = 'Rolf Jagerman'
from PySide import QtGui
import os
from authentication import AuthenticationListener, AuthenticationClient
from loadui import loadUi
from config import UI_DIRECTORY
from drawers import Drawers
from users import User
from locations import Location
class AddDelivery(QtGui.QFrame, AuthenticationListener):
"""
The add delivery form that enables a user to place a delivery in the robot
"""
def __init__(self, content):
super(AddDelivery, self).__init__()
self.content = content
loadUi(os.path.join(UI_DIRECTORY, 'add_delivery.ui'), self)
self.cancel_button.clicked.connect(self.cancel)
self.save_button.clicked.connect(self.place)
self.deliveries = {}
self.sender = None
self.drawer_id = '1'
for location in Location.get_locations():
self.location_combobox.addItem(location.name)
#from PySide.QtGui import QComboBox
#test = QComboBox()
#self.recipient_combobox.addItems(User.get_users())
#self.location_combobox.addItems(Location.get_locations())
AuthenticationClient.add_listener(self)
def show(self, *args, **kwargs):
super(AddDelivery, self).show()
self.prepare_open_drawer()
while self.recipient_combobox.count() > 0:
self.recipient_combobox.removeItem(0);
for user in User.get_users():
if user.id != self.sender.id:
self.recipient_combobox.addItem(user.name)
# Reset the combobox fields when this form is shown
#from PySide.QtGui import QComboBox
##test = QComboBox()
while self.drawer_combobox.count() > 0:
self.drawer_combobox.removeItem(0)
#self.drawer_combobox.removeItems()
for drawer in Drawers.available_drawers():
self.drawer_combobox.addItem(drawer)
self.recipient_combobox.setCurrentIndex(0)
self.location_combobox.setCurrentIndex(0)
self.drawer_combobox.setCurrentIndex(0)
def prepare_open_drawer(self):
self.content.components['open_drawer'].save = lambda : self.save()
self.content.components['open_drawer'].back = lambda : self.back()
self.content.components['open_drawer'].success_message = 'Have you succesfully placed your delivery?'
self.content.components['open_drawer'].instruction_message = 'place'
def on_login(self, user):
self.sender = user
def on_login_failure(self, user):
self.sender = None
def on_logout(self, user):
self.sender = None<|fim▁hole|> def place(self):
self.drawer_id = self.drawer_combobox.currentText()
recipient_text = self.recipient_combobox.currentText()
for user in User.get_users():
if user.name == recipient_text:
self.recipient_id = user.id
location_text = self.location_combobox.currentText()
for location in Location.get_locations():
if location.name == location_text:
self.location_id = location.id
self.content.components['open_drawer'].drawer_id = self.drawer_combobox.currentText()
self.content.activate(self.content.components['open_drawer'])
def cancel(self):
self.content.activate(self.content.components['welcome'])
def back(self):
self.content.activate(self.content.components['add_delivery'])
def save(self):
Drawers.add_delivery(self.drawer_id, self.recipient_id, self.location_id)
self.content.activate(self.content.components['welcome'])<|fim▁end|> | |
<|file_name|>codeforces-options.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | window.codeforcesOptions = [];
window.codeforcesOptions.subscribeServerUrl = "http://pubsub.codeforces.com:85/sub"; |
<|file_name|>check.py<|end_file_name|><|fim▁begin|>'''
YARN Cluster Metrics
--------------------
yarn.metrics.appsSubmitted The number of submitted apps
yarn.metrics.appsCompleted The number of completed apps
yarn.metrics.appsPending The number of pending apps
yarn.metrics.appsRunning The number of running apps
yarn.metrics.appsFailed The number of failed apps
yarn.metrics.appsKilled The number of killed apps
yarn.metrics.reservedMB The size of reserved memory
yarn.metrics.availableMB The amount of available memory
yarn.metrics.allocatedMB The amount of allocated memory
yarn.metrics.totalMB The amount of total memory
yarn.metrics.reservedVirtualCores The number of reserved virtual cores
yarn.metrics.availableVirtualCores The number of available virtual cores
yarn.metrics.allocatedVirtualCores The number of allocated virtual cores
yarn.metrics.totalVirtualCores The total number of virtual cores
yarn.metrics.containersAllocated The number of containers allocated
yarn.metrics.containersReserved The number of containers reserved
yarn.metrics.containersPending The number of containers pending
yarn.metrics.totalNodes The total number of nodes
yarn.metrics.activeNodes The number of active nodes
yarn.metrics.lostNodes The number of lost nodes
yarn.metrics.unhealthyNodes The number of unhealthy nodes
yarn.metrics.decommissionedNodes The number of decommissioned nodes
yarn.metrics.rebootedNodes The number of rebooted nodes
YARN App Metrics
----------------
yarn.app.progress The progress of the application as a percent
yarn.app.startedTime The time in which application started (in ms since epoch)
yarn.app.finishedTime The time in which the application finished (in ms since epoch)
yarn.app.elapsedTime The elapsed time since the application started (in ms)
yarn.app.allocatedMB The sum of memory in MB allocated to the applications running containers
yarn.app.allocatedVCores The sum of virtual cores allocated to the applications running containers
yarn.app.runningContainers The number of containers currently running for the application
yarn.app.memorySeconds The amount of memory the application has allocated (megabyte-seconds)
yarn.app.vcoreSeconds The amount of CPU resources the application has allocated (virtual core-seconds)
YARN Node Metrics
-----------------
yarn.node.lastHealthUpdate The last time the node reported its health (in ms since epoch)
yarn.node.usedMemoryMB The total amount of memory currently used on the node (in MB)
yarn.node.availMemoryMB The total amount of memory currently available on the node (in MB)
yarn.node.usedVirtualCores The total number of vCores currently used on the node
yarn.node.availableVirtualCores The total number of vCores available on the node
yarn.node.numContainers The total number of containers currently running on the node
YARN Capacity Scheduler Metrics
-----------------
yarn.queue.root.maxCapacity The configured maximum queue capacity in percentage for root queue
yarn.queue.root.usedCapacity The used queue capacity in percentage for root queue
yarn.queue.root.capacity The configured queue capacity in percentage for root queue
yarn.queue.numPendingApplications The number of pending applications in this queue
yarn.queue.userAMResourceLimit.memory The maximum memory resources a user can use for Application Masters (in MB)
yarn.queue.userAMResourceLimit.vCores The maximum vCpus a user can use for Application Masters
yarn.queue.absoluteCapacity The absolute capacity percentage this queue can use of entire cluster
yarn.queue.userLimitFactor The minimum user limit percent set in the configuration
yarn.queue.userLimit The user limit factor set in the configuration
yarn.queue.numApplications The number of applications currently in the queue
yarn.queue.usedAMResource.memory The memory resources used for Application Masters (in MB)
yarn.queue.usedAMResource.vCores The vCpus used for Application Masters
yarn.queue.absoluteUsedCapacity The absolute used capacity percentage this queue is using of the entire cluster
yarn.queue.resourcesUsed.memory The total memory resources this queue is using (in MB)
yarn.queue.resourcesUsed.vCores The total vCpus this queue is using
yarn.queue.AMResourceLimit.vCores The maximum vCpus this queue can use for Application Masters
yarn.queue.AMResourceLimit.memory The maximum memory resources this queue can use for Application Masters (in MB)
yarn.queue.capacity The configured queue capacity in percentage relative to its parent queue
yarn.queue.numActiveApplications The number of active applications in this queue
yarn.queue.absoluteMaxCapacity The absolute maximum capacity percentage this queue can use of the entire cluster
yarn.queue.usedCapacity The used queue capacity in percentage
yarn.queue.numContainers The number of containers being used
yarn.queue.maxCapacity The configured maximum queue capacity in percentage relative to its parent queue
yarn.queue.maxApplications The maximum number of applications this queue can have
yarn.queue.maxApplicationsPerUser The maximum number of active applications per user this queue can have
'''
# stdlib
from urlparse import urljoin, urlsplit, urlunsplit
# 3rd party
from requests.exceptions import Timeout, HTTPError, InvalidURL, ConnectionError
import requests
# Project
from checks import AgentCheck
from config import _is_affirmative
# Default settings
DEFAULT_RM_URI = 'http://localhost:8088'
DEFAULT_TIMEOUT = 5
DEFAULT_CUSTER_NAME = 'default_cluster'
DEFAULT_COLLECT_APP_METRICS = True
MAX_DETAILED_QUEUES = 100
# Path to retrieve cluster metrics
YARN_CLUSTER_METRICS_PATH = '/ws/v1/cluster/metrics'
# Path to retrieve YARN APPS
YARN_APPS_PATH = '/ws/v1/cluster/apps'
# Path to retrieve node statistics
YARN_NODES_PATH = '/ws/v1/cluster/nodes'
# Path to retrieve queue statistics
YARN_SCHEDULER_PATH = '/ws/v1/cluster/scheduler'
# Metric types
GAUGE = 'gauge'
INCREMENT = 'increment'
# Name of the service check
SERVICE_CHECK_NAME = 'yarn.can_connect'
# Application states to collect
YARN_APPLICATION_STATES = 'RUNNING'
# Cluster metrics identifier
YARN_CLUSTER_METRICS_ELEMENT = 'clusterMetrics'
# Cluster metrics for YARN
YARN_CLUSTER_METRICS = {
'appsSubmitted': ('yarn.metrics.apps_submitted', GAUGE),
'appsCompleted': ('yarn.metrics.apps_completed', GAUGE),
'appsPending': ('yarn.metrics.apps_pending', GAUGE),
'appsRunning': ('yarn.metrics.apps_running', GAUGE),
'appsFailed': ('yarn.metrics.apps_failed', GAUGE),
'appsKilled': ('yarn.metrics.apps_killed', GAUGE),<|fim▁hole|> 'availableMB': ('yarn.metrics.available_mb', GAUGE),
'allocatedMB': ('yarn.metrics.allocated_mb', GAUGE),
'totalMB': ('yarn.metrics.total_mb', GAUGE),
'reservedVirtualCores': ('yarn.metrics.reserved_virtual_cores', GAUGE),
'availableVirtualCores': ('yarn.metrics.available_virtual_cores', GAUGE),
'allocatedVirtualCores': ('yarn.metrics.allocated_virtual_cores', GAUGE),
'totalVirtualCores': ('yarn.metrics.total_virtual_cores', GAUGE),
'containersAllocated': ('yarn.metrics.containers_allocated', GAUGE),
'containersReserved': ('yarn.metrics.containers_reserved', GAUGE),
'containersPending': ('yarn.metrics.containers_pending', GAUGE),
'totalNodes': ('yarn.metrics.total_nodes', GAUGE),
'activeNodes': ('yarn.metrics.active_nodes', GAUGE),
'lostNodes': ('yarn.metrics.lost_nodes', GAUGE),
'unhealthyNodes': ('yarn.metrics.unhealthy_nodes', GAUGE),
'decommissionedNodes': ('yarn.metrics.decommissioned_nodes', GAUGE),
'rebootedNodes': ('yarn.metrics.rebooted_nodes', GAUGE),
}
# Application metrics for YARN
YARN_APP_METRICS = {
'progress': ('yarn.apps.progress', INCREMENT),
'startedTime': ('yarn.apps.started_time', INCREMENT),
'finishedTime': ('yarn.apps.finished_time', INCREMENT),
'elapsedTime': ('yarn.apps.elapsed_time', INCREMENT),
'allocatedMB': ('yarn.apps.allocated_mb', INCREMENT),
'allocatedVCores': ('yarn.apps.allocated_vcores', INCREMENT),
'runningContainers': ('yarn.apps.running_containers', INCREMENT),
'memorySeconds': ('yarn.apps.memory_seconds', INCREMENT),
'vcoreSeconds': ('yarn.apps.vcore_seconds', INCREMENT),
}
# Node metrics for YARN
YARN_NODE_METRICS = {
'lastHealthUpdate': ('yarn.node.last_health_update', GAUGE),
'usedMemoryMB': ('yarn.node.used_memory_mb', GAUGE),
'availMemoryMB': ('yarn.node.avail_memory_mb', GAUGE),
'usedVirtualCores': ('yarn.node.used_virtual_cores', GAUGE),
'availableVirtualCores': ('yarn.node.available_virtual_cores', GAUGE),
'numContainers': ('yarn.node.num_containers', GAUGE),
}
# Root queue metrics for YARN
YARN_ROOT_QUEUE_METRICS = {
'maxCapacity': ('yarn.queue.root.max_capacity', GAUGE),
'usedCapacity': ('yarn.queue.root.used_capacity', GAUGE),
'capacity': ('yarn.queue.root.capacity', GAUGE)
}
# Queue metrics for YARN
YARN_QUEUE_METRICS = {
'numPendingApplications': ('yarn.queue.num_pending_applications', GAUGE),
'userAMResourceLimit.memory': ('yarn.queue.user_am_resource_limit.memory', GAUGE),
'userAMResourceLimit.vCores': ('yarn.queue.user_am_resource_limit.vcores', GAUGE),
'absoluteCapacity': ('yarn.queue.absolute_capacity', GAUGE),
'userLimitFactor': ('yarn.queue.user_limit_factor', GAUGE),
'userLimit': ('yarn.queue.user_limit', GAUGE),
'numApplications': ('yarn.queue.num_applications', GAUGE),
'usedAMResource.memory': ('yarn.queue.used_am_resource.memory', GAUGE),
'usedAMResource.vCores': ('yarn.queue.used_am_resource.vcores', GAUGE),
'absoluteUsedCapacity': ('yarn.queue.absolute_used_capacity', GAUGE),
'resourcesUsed.memory': ('yarn.queue.resources_used.memory', GAUGE),
'resourcesUsed.vCores': ('yarn.queue.resources_used.vcores', GAUGE),
'AMResourceLimit.vCores': ('yarn.queue.am_resource_limit.vcores', GAUGE),
'AMResourceLimit.memory': ('yarn.queue.am_resource_limit.memory', GAUGE),
'capacity': ('yarn.queue.capacity', GAUGE),
'numActiveApplications': ('yarn.queue.num_active_applications', GAUGE),
'absoluteMaxCapacity': ('yarn.queue.absolute_max_capacity', GAUGE),
'usedCapacity' : ('yarn.queue.used_capacity', GAUGE),
'numContainers': ('yarn.queue.num_containers', GAUGE),
'maxCapacity': ('yarn.queue.max_capacity', GAUGE),
'maxApplications': ('yarn.queue.max_applications', GAUGE),
'maxApplicationsPerUser': ('yarn.queue.max_applications_per_user', GAUGE)
}
class YarnCheck(AgentCheck):
'''
Extract statistics from YARN's ResourceManger REST API
'''
_ALLOWED_APPLICATION_TAGS = [
'applicationTags',
'applicationType',
'name',
'queue',
'user'
]
def check(self, instance):
# Get properties from conf file
rm_address = instance.get('resourcemanager_uri', DEFAULT_RM_URI)
app_tags = instance.get('application_tags', {})
queue_blacklist = instance.get('queue_blacklist', [])
if type(app_tags) is not dict:
self.log.error('application_tags is incorrect: %s is not a dictionary', app_tags)
app_tags = {}
filtered_app_tags = {}
for dd_prefix, yarn_key in app_tags.iteritems():
if yarn_key in self._ALLOWED_APPLICATION_TAGS:
filtered_app_tags[dd_prefix] = yarn_key
app_tags = filtered_app_tags
# Collected by default
app_tags['app_name'] = 'name'
# Get additional tags from the conf file
tags = instance.get('tags', [])
if tags is None:
tags = []
else:
tags = list(set(tags))
# Get the cluster name from the conf file
cluster_name = instance.get('cluster_name')
if cluster_name is None:
self.warning("The cluster_name must be specified in the instance configuration, defaulting to '%s'" % (DEFAULT_CUSTER_NAME))
cluster_name = DEFAULT_CUSTER_NAME
tags.append('cluster_name:%s' % cluster_name)
# Get metrics from the Resource Manager
self._yarn_cluster_metrics(rm_address, tags)
if _is_affirmative(instance.get('collect_app_metrics', DEFAULT_COLLECT_APP_METRICS)):
self._yarn_app_metrics(rm_address, app_tags, tags)
self._yarn_node_metrics(rm_address, tags)
self._yarn_scheduler_metrics(rm_address, tags, queue_blacklist)
def _yarn_cluster_metrics(self, rm_address, addl_tags):
'''
Get metrics related to YARN cluster
'''
metrics_json = self._rest_request_to_json(rm_address, YARN_CLUSTER_METRICS_PATH)
if metrics_json:
yarn_metrics = metrics_json[YARN_CLUSTER_METRICS_ELEMENT]
if yarn_metrics is not None:
self._set_yarn_metrics_from_json(addl_tags, yarn_metrics, YARN_CLUSTER_METRICS)
def _yarn_app_metrics(self, rm_address, app_tags, addl_tags):
'''
Get metrics for running applications
'''
metrics_json = self._rest_request_to_json(
rm_address,
YARN_APPS_PATH,
states=YARN_APPLICATION_STATES
)
if (metrics_json and metrics_json['apps'] is not None and
metrics_json['apps']['app'] is not None):
for app_json in metrics_json['apps']['app']:
tags = []
for dd_tag, yarn_key in app_tags.iteritems():
try:
val = app_json[yarn_key]
if val:
tags.append("{tag}:{value}".format(
tag=dd_tag, value=val
))
except KeyError:
self.log.error("Invalid value %s for application_tag", yarn_key)
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, app_json, YARN_APP_METRICS)
def _yarn_node_metrics(self, rm_address, addl_tags):
'''
Get metrics related to YARN nodes
'''
metrics_json = self._rest_request_to_json(rm_address, YARN_NODES_PATH)
if (metrics_json and metrics_json['nodes'] is not None and
metrics_json['nodes']['node'] is not None):
for node_json in metrics_json['nodes']['node']:
node_id = node_json['id']
tags = ['node_id:%s' % str(node_id)]
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, node_json, YARN_NODE_METRICS)
def _yarn_scheduler_metrics(self, rm_address, addl_tags, queue_blacklist):
'''
Get metrics from YARN scheduler
'''
metrics_json = self._rest_request_to_json(rm_address, YARN_SCHEDULER_PATH)
try:
metrics_json = metrics_json['scheduler']['schedulerInfo']
if metrics_json['type'] == 'capacityScheduler':
self._yarn_capacity_scheduler_metrics(metrics_json, addl_tags, queue_blacklist)
except KeyError:
pass
def _yarn_capacity_scheduler_metrics(self, metrics_json, addl_tags, queue_blacklist):
'''
Get metrics from YARN scheduler if it's type is capacityScheduler
'''
tags = ['queue_name:%s' % metrics_json['queueName']]
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, metrics_json, YARN_ROOT_QUEUE_METRICS)
if metrics_json['queues'] is not None and metrics_json['queues']['queue'] is not None:
queues_count = 0
for queue_json in metrics_json['queues']['queue']:
queue_name = queue_json['queueName']
if queue_name in queue_blacklist:
self.log.debug('Queue "%s" is blacklisted. Ignoring it' % queue_name)
continue
queues_count += 1
if queues_count > MAX_DETAILED_QUEUES:
self.warning("Found more than 100 queues, will only send metrics on first 100 queues. " +
" Please filter the queues with the check's `queue_blacklist` parameter")
break
tags = ['queue_name:%s' % str(queue_name)]
tags.extend(addl_tags)
self._set_yarn_metrics_from_json(tags, queue_json, YARN_QUEUE_METRICS)
def _set_yarn_metrics_from_json(self, tags, metrics_json, yarn_metrics):
'''
Parse the JSON response and set the metrics
'''
for dict_path, metric in yarn_metrics.iteritems():
metric_name, metric_type = metric
metric_value = self._get_value_from_json(dict_path, metrics_json)
if metric_value is not None:
self._set_metric(metric_name,
metric_type,
metric_value,
tags)
def _get_value_from_json(self, dict_path, metrics_json):
'''
Get a value from a dictionary under N keys, represented as str("key1.key2...key{n}")
'''
for key in dict_path.split('.'):
if key in metrics_json:
metrics_json = metrics_json.get(key)
else:
return None
return metrics_json
def _set_metric(self, metric_name, metric_type, value, tags=None, device_name=None):
'''
Set a metric
'''
if metric_type == GAUGE:
self.gauge(metric_name, value, tags=tags, device_name=device_name)
elif metric_type == INCREMENT:
self.increment(metric_name, value, tags=tags, device_name=device_name)
else:
self.log.error('Metric type "%s" unknown', metric_type)
def _rest_request_to_json(self, address, object_path, *args, **kwargs):
'''
Query the given URL and return the JSON response
'''
response_json = None
service_check_tags = ['url:%s' % self._get_url_base(address)]
url = address
if object_path:
url = self._join_url_dir(url, object_path)
# Add args to the url
if args:
for directory in args:
url = self._join_url_dir(url, directory)
self.log.debug('Attempting to connect to "%s"' % url)
# Add kwargs as arguments
if kwargs:
query = '&'.join(['{0}={1}'.format(key, value) for key, value in kwargs.iteritems()])
url = urljoin(url, '?' + query)
try:
response = requests.get(url, timeout=self.default_integration_http_timeout)
response.raise_for_status()
response_json = response.json()
except Timeout as e:
self.service_check(SERVICE_CHECK_NAME,
AgentCheck.CRITICAL,
tags=service_check_tags,
message="Request timeout: {0}, {1}".format(url, e))
raise
except (HTTPError,
InvalidURL,
ConnectionError) as e:
self.service_check(SERVICE_CHECK_NAME,
AgentCheck.CRITICAL,
tags=service_check_tags,
message="Request failed: {0}, {1}".format(url, e))
raise
except ValueError as e:
self.service_check(SERVICE_CHECK_NAME,
AgentCheck.CRITICAL,
tags=service_check_tags,
message=str(e))
raise
else:
self.service_check(SERVICE_CHECK_NAME,
AgentCheck.OK,
tags=service_check_tags,
message='Connection to %s was successful' % url)
return response_json
def _join_url_dir(self, url, *args):
'''
Join a URL with multiple directories
'''
for path in args:
url = url.rstrip('/') + '/'
url = urljoin(url, path.lstrip('/'))
return url
def _get_url_base(self, url):
'''
Return the base of a URL
'''
s = urlsplit(url)
return urlunsplit([s.scheme, s.netloc, '', '', ''])<|fim▁end|> | 'reservedMB': ('yarn.metrics.reserved_mb', GAUGE), |
<|file_name|>GeneralProcessorComplexArithmetic.cpp<|end_file_name|><|fim▁begin|>#include "GeneralProcessor.h"
namespace engine {
////==--------------------------------------------------------------------====//
// ECKERT PROCESSOR / ADD
// [ description ]
// ADD function for 2 complex numbers
// (a + ib) + (c + id) = (a + c) + i(b + d)
// [ Update ]
// Jan 25, 2016
//====--------------------------------------------------------------------==////
SpElement GeneralProcessor::addComplex(const SpElement &p_ey, const SpElement &p_ex) {
SpElement p_etemp;
bool y_cplx = p_ey->isType(Element::COMPLEX);
bool x_cplx = p_ex->isType(Element::COMPLEX);
if (y_cplx && x_cplx) {
//== COMPLEX + COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = a + c
p_real = addScalar(a, c);
// imag = b + d
p_imag = addScalar(b, d);
// optimize complex
setComplex(p_etemp, p_real, p_imag);
}
else if (y_cplx) {
//== COMPLEX + ANY ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = p_ex;
// real = a + c
p_real = addScalar(a, c);
// imag = b
p_imag = b->clone();
// optimize complex
setComplex(p_etemp, p_real, p_imag);
}
else if (x_cplx) {
//== ANY + COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = p_ey;
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = a + c
p_real = addScalar(a, c);
// imag = d<|fim▁hole|> p_imag = d->clone();
setComplex(p_etemp, p_real, p_imag);
}
else {
//== Unexpected ==//
throw BadArgument("BAD_TYPE", __FUNCTION__);
}
return p_etemp;
}
////==--------------------------------------------------------------------====//
// ECKERT PROCESSOR / SUBTRACT
// [ description ]
// Subtract function for 2 complex numbers
// (a + ib) - (c + id) = (a - c) + i(b - d)
// [ Update ]
// Jan 25, 2016
//====--------------------------------------------------------------------==////
SpElement GeneralProcessor::subComplex(const SpElement &p_ey, const SpElement &p_ex) {
SpElement p_etemp;
bool y_cplx = p_ey->isType(Element::COMPLEX);
bool x_cplx = p_ex->isType(Element::COMPLEX);
if (y_cplx && x_cplx) {
//== Complex - Complex ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = a - c
p_real = subScalar(a, c);
// imag = b - d
p_imag = subScalar(b, d);
// optimize complex
setComplex(p_etemp, p_real, p_imag);
}
else if (y_cplx) {
//== Complex - ANY ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = p_ex;
// real = a - c
p_real = subScalar(a, c);
// imag = b
p_imag = b->clone();
// optimize complex
setComplex(p_etemp, p_real, p_imag);
}
else if (x_cplx) {
//== ANY - Complex ==//
SpElement p_real;
SpElement p_imag;
auto a = p_ey;
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = a - c
p_real = subScalar(a, c);
// imag = -d
p_imag = neg(d);
setComplex(p_etemp, p_real, p_imag);
}
else {
//== Unexpected ==//
throw BadArgument("BAD_TYPE", __FUNCTION__);
}
return p_etemp;
}
////==--------------------------------------------------------------------====//
// ECKERT PROCESSOR / MULTIPLY
// [ description ]
// MULTIPLY function for 2 complex numbers
// (a + ib) * (c + id) = (ac - bd) + i(ad + bc)
// [ Update ]
// Jan 28, 2016
//====--------------------------------------------------------------------==////
SpElement GeneralProcessor::mulComplex(const SpElement &p_ey, const SpElement &p_ex) {
SpElement p_etemp;
bool y_cplx = p_ey->isType(Element::COMPLEX);
bool x_cplx = p_ex->isType(Element::COMPLEX);
if (y_cplx && x_cplx) {
//== COMPLEX * COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = (ac - bd)
p_real = subScalar(
mulScalar(a, c), mulScalar(b, d)
);
// imag = (ad + bc)
p_imag = addScalar(
mulScalar(a, d), mulScalar(b, c)
);
setComplex(p_etemp, p_real, p_imag);
}
else if (y_cplx) {
//== COMPLEX * ANY ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = p_ex;
// real = ac
p_real = mulScalar(a, c);
// imag = bc
p_imag = mulScalar(b, c);
setComplex(p_etemp, p_real, p_imag);
}
else if (x_cplx) {
//== ANY * COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = p_ey;
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// real = ac
p_real = mulScalar(a, c);
// imag = ad
p_imag = mulScalar(a, d);
setComplex(p_etemp, p_real, p_imag);
}
else {
//== Unexpected ==//
throw BadArgument("BAD_TYPE", __FUNCTION__);
}
return p_etemp;
}
////==--------------------------------------------------------------------====//
// ECKERT PROCESSOR / DIVIDE
// [ description ]
// DIVIDE function for 2 complex numbers
// (a + ib) / (c + id) = ((ac + bd) / sq) + i((bc - ad) / sq)
// sq = c^2 + d^2
// [ Update ]
// Jan 28, 2016
//====--------------------------------------------------------------------==////
SpElement GeneralProcessor::divComplex(const SpElement &p_ey, const SpElement &p_ex) {
// check division by zero
if (isZero(p_ex)) {
throw InvalidValue("DIV_ZERO", __FUNCTION__);
}
else if (isZero(p_ey)) {
return p_ey;
}
SpElement p_etemp;
bool y_cplx = p_ey->isType(Element::COMPLEX);
bool x_cplx = p_ex->isType(Element::COMPLEX);
if (y_cplx && x_cplx) {
//== COMPLEX / COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// sq = c^2 + d^2
p_etemp = addScalar(
mulScalar(c, c), mulScalar(d, d)
);
// real = (ac + bd) / sq
p_real = addScalar(
mulScalar(a, c), mulScalar(b, d)
);
p_real = divScalar(p_real, p_etemp);
// imag = (bc - ad) / sq
p_imag = subScalar(
mulScalar(b, c), mulScalar(a, d)
);
p_imag = divScalar(p_imag, p_etemp);
setComplex(p_etemp, p_real, p_imag);
}
else if (y_cplx) {
//== COMPLEX / ANY ==//
SpElement p_real;
SpElement p_imag;
auto a = GET_COMPLEX_RE(p_ey);
auto b = GET_COMPLEX_IM(p_ey);
auto c = p_ex;
// real = a/c
p_real = divScalar(a, c);
// imag = b/c
p_imag = divScalar(b, c);
setComplex(p_etemp, p_real, p_imag);
}
else if (x_cplx) {
//== ANY / COMPLEX ==//
SpElement p_real;
SpElement p_imag;
auto a = p_ey;
auto c = GET_COMPLEX_RE(p_ex);
auto d = GET_COMPLEX_IM(p_ex);
// sq = c^2 + d^2
p_etemp = addScalar(
mulScalar(c, c), mulScalar(d, d)
);
// real = ac / sq
p_real = divScalar(
mulScalar(a, c), p_etemp
);
// imag = -ad / sq
p_imag = neg(
divScalar(mulScalar(a, d), p_etemp)
);
// optimize complex
setComplex(p_etemp, p_real, p_imag);
}
else {
//== Unexpected ==//
throw BadArgument("BAD_TYPE", __FUNCTION__);
}
return p_etemp;
}
} // namespace engine<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
models.py
Created by Darcy Liu on 2012-03-03.
Copyright (c) 2012 Close To U. All rights reserved.
"""
from django.db import models
from django.contrib.auth.models import User
# class Setting(models.Model):
# sid = models.AutoField(primary_key=True)
# option = models.CharField(unique=True,max_length=128,verbose_name='Option')
# value = models.CharField(max_length=256,verbose_name='Value')
class Minisite(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(unique=True,max_length=128,verbose_name='slug')
meta = models.TextField(blank=True, verbose_name='meta')
description = models.TextField(blank=True, verbose_name='description')<|fim▁hole|> result = self.name
return unicode(result)
class Page(models.Model):
key = models.AutoField(primary_key=True)
name = models.CharField(max_length=256,verbose_name='name')
slug = models.CharField(max_length=128,verbose_name='slug')
#type=//insite standlone
Mode_Choices = (
('0', 'insite'),
('1', 'standlone'),
)
mode = models.CharField(verbose_name='format',max_length=1,default=0,choices=Mode_Choices)
#content-type
mime = models.CharField(max_length=64,default='text/html;charset=utf-8',verbose_name='mime')
#format
Format_Choices = (
('0', 'txt'),
('1', 'html'),
('2', 'markdown'),
('3', 'textile'),
)
format = models.CharField(verbose_name='format',max_length=1,default=0,choices=Format_Choices)
text = models.TextField(blank=True, verbose_name='content')
script = models.TextField(blank=True, verbose_name='script')
style = models.TextField(blank=True, verbose_name='style')
text_html = models.TextField(blank=True, verbose_name='html')
minisite = models.ForeignKey(Minisite,verbose_name='minisite')
author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self):
result = self.name
return unicode(result)
class Meta:
unique_together = (('slug', 'minisite'),)<|fim▁end|> | author = models.ForeignKey(User,verbose_name='author')
created = models.DateTimeField(auto_now_add=True,verbose_name='created')
updated = models.DateTimeField(auto_now=True,verbose_name='updated')
def __unicode__(self): |
<|file_name|>OrStringFilterColumn.java<|end_file_name|><|fim▁begin|>package helpers.db.filter.column;
import org.apache.commons.lang.StringUtils;
import java.util.ArrayList;
import java.util.List;
/**
* Filter on a disjunction of string matches.
* Instead of filtering on "column ~= filter", filters on (columns[0] ~= filter or ... or columns[n - 1] ~= filter).
*
* @author jtremeaux
*/
public class OrStringFilterColumn extends StringFilterColumn {
private String[] columns;
public OrStringFilterColumn(String column, String filter, String... columns) {
super(column, filter);
this.columns = columns;
}
@Override
public String getPredicate() {
List<String> predicates = new ArrayList<>();
for (String c : columns) {
StringFilterColumn f = new StringFilterColumn(c, filter) {<|fim▁hole|> }
};
predicates.add(f.getPredicate());
}
return "(" + StringUtils.join(predicates, " or ") + ")";
}
}<|fim▁end|> | @Override
public String getParamName() {
return "filtercolumn_" + OrStringFilterColumn.this.hashCode(); |
<|file_name|>native.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var React = require('react');
var React__default = _interopDefault(React);
var mobx = require('mobx');
var reactNative = require('react-native');
function _typeof(obj) {
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
_typeof = function (obj) {
return typeof obj;
};
} else {
_typeof = function (obj) {
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
};
}
return _typeof(obj);
}
function _classCallCheck(instance, Constructor) {
if (!(instance instanceof Constructor)) {
throw new TypeError("Cannot call a class as a function");
}
}
function _defineProperties(target, props) {
for (var i = 0; i < props.length; i++) {
var descriptor = props[i];
descriptor.enumerable = descriptor.enumerable || false;
descriptor.configurable = true;
if ("value" in descriptor) descriptor.writable = true;
Object.defineProperty(target, descriptor.key, descriptor);
}
}
function _createClass(Constructor, protoProps, staticProps) {
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
if (staticProps) _defineProperties(Constructor, staticProps);
return Constructor;
}
function _defineProperty(obj, key, value) {
if (key in obj) {
Object.defineProperty(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
function _inherits(subClass, superClass) {
if (typeof superClass !== "function" && superClass !== null) {
throw new TypeError("Super expression must either be null or a function");
}
subClass.prototype = Object.create(superClass && superClass.prototype, {
constructor: {
value: subClass,
writable: true,
configurable: true
}
});
if (superClass) _setPrototypeOf(subClass, superClass);
}
function _getPrototypeOf(o) {
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
return o.__proto__ || Object.getPrototypeOf(o);
};
return _getPrototypeOf(o);
}
function _setPrototypeOf(o, p) {
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
function _assertThisInitialized(self) {
if (self === void 0) {
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
}
return self;
}
function _possibleConstructorReturn(self, call) {
if (call && (typeof call === "object" || typeof call === "function")) {
return call;
}
return _assertThisInitialized(self);
}
// These functions can be stubbed out in specific environments
function unwrapExports (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x.default : x;
}
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var reactIs_production_min = createCommonjsModule(function (module, exports) {
Object.defineProperty(exports,"__esModule",{value:!0});
var b="function"===typeof Symbol&&Symbol.for,c=b?Symbol.for("react.element"):60103,d=b?Symbol.for("react.portal"):60106,e=b?Symbol.for("react.fragment"):60107,f=b?Symbol.for("react.strict_mode"):60108,g=b?Symbol.for("react.profiler"):60114,h=b?Symbol.for("react.provider"):60109,k=b?Symbol.for("react.context"):60110,l=b?Symbol.for("react.async_mode"):60111,m=b?Symbol.for("react.forward_ref"):60112,n=b?Symbol.for("react.placeholder"):60113;
function q(a){if("object"===typeof a&&null!==a){var p=a.$$typeof;switch(p){case c:switch(a=a.type,a){case l:case e:case g:case f:return a;default:switch(a=a&&a.$$typeof,a){case k:case m:case h:return a;default:return p}}case d:return p}}}exports.typeOf=q;exports.AsyncMode=l;exports.ContextConsumer=k;exports.ContextProvider=h;exports.Element=c;exports.ForwardRef=m;exports.Fragment=e;exports.Profiler=g;exports.Portal=d;exports.StrictMode=f;
exports.isValidElementType=function(a){return "string"===typeof a||"function"===typeof a||a===e||a===l||a===g||a===f||a===n||"object"===typeof a&&null!==a&&("function"===typeof a.then||a.$$typeof===h||a.$$typeof===k||a.$$typeof===m)};exports.isAsyncMode=function(a){return q(a)===l};exports.isContextConsumer=function(a){return q(a)===k};exports.isContextProvider=function(a){return q(a)===h};exports.isElement=function(a){return "object"===typeof a&&null!==a&&a.$$typeof===c};
exports.isForwardRef=function(a){return q(a)===m};exports.isFragment=function(a){return q(a)===e};exports.isProfiler=function(a){return q(a)===g};exports.isPortal=function(a){return q(a)===d};exports.isStrictMode=function(a){return q(a)===f};
});
unwrapExports(reactIs_production_min);
var reactIs_production_min_1 = reactIs_production_min.typeOf;
var reactIs_production_min_2 = reactIs_production_min.AsyncMode;
var reactIs_production_min_3 = reactIs_production_min.ContextConsumer;
var reactIs_production_min_4 = reactIs_production_min.ContextProvider;
var reactIs_production_min_5 = reactIs_production_min.Element;
var reactIs_production_min_6 = reactIs_production_min.ForwardRef;
var reactIs_production_min_7 = reactIs_production_min.Fragment;
var reactIs_production_min_8 = reactIs_production_min.Profiler;
var reactIs_production_min_9 = reactIs_production_min.Portal;
var reactIs_production_min_10 = reactIs_production_min.StrictMode;
var reactIs_production_min_11 = reactIs_production_min.isValidElementType;
var reactIs_production_min_12 = reactIs_production_min.isAsyncMode;
var reactIs_production_min_13 = reactIs_production_min.isContextConsumer;
var reactIs_production_min_14 = reactIs_production_min.isContextProvider;
var reactIs_production_min_15 = reactIs_production_min.isElement;
var reactIs_production_min_16 = reactIs_production_min.isForwardRef;
var reactIs_production_min_17 = reactIs_production_min.isFragment;
var reactIs_production_min_18 = reactIs_production_min.isProfiler;
var reactIs_production_min_19 = reactIs_production_min.isPortal;
var reactIs_production_min_20 = reactIs_production_min.isStrictMode;
var reactIs = createCommonjsModule(function (module) {
{
module.exports = reactIs_production_min;
}
});
var _ReactIs$ForwardRef;
function _defineProperty$1(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
/**
* Copyright 2015, Yahoo! Inc.
* Copyrights licensed under the New BSD License. See the accompanying LICENSE file for terms.
*/
var REACT_STATICS = {
childContextTypes: true,
contextTypes: true,
defaultProps: true,
displayName: true,
getDefaultProps: true,
getDerivedStateFromProps: true,
mixins: true,
propTypes: true,
type: true
};
var KNOWN_STATICS = {
name: true,
length: true,
prototype: true,
caller: true,
callee: true,
arguments: true,
arity: true
};
var TYPE_STATICS = _defineProperty$1({}, reactIs.ForwardRef, (_ReactIs$ForwardRef = {}, _defineProperty$1(_ReactIs$ForwardRef, '$$typeof', true), _defineProperty$1(_ReactIs$ForwardRef, 'render', true), _ReactIs$ForwardRef));
var defineProperty = Object.defineProperty;
var getOwnPropertyNames = Object.getOwnPropertyNames;
var getOwnPropertySymbols = Object.getOwnPropertySymbols;
var getOwnPropertyDescriptor = Object.getOwnPropertyDescriptor;
var getPrototypeOf = Object.getPrototypeOf;
var objectPrototype = Object.prototype;
function hoistNonReactStatics(targetComponent, sourceComponent, blacklist) {
if (typeof sourceComponent !== 'string') {
// don't hoist over string (html) components
if (objectPrototype) {
var inheritedComponent = getPrototypeOf(sourceComponent);
if (inheritedComponent && inheritedComponent !== objectPrototype) {
hoistNonReactStatics(targetComponent, inheritedComponent, blacklist);
}
}
var keys = getOwnPropertyNames(sourceComponent);
if (getOwnPropertySymbols) {
keys = keys.concat(getOwnPropertySymbols(sourceComponent));
}
var targetStatics = TYPE_STATICS[targetComponent['$$typeof']] || REACT_STATICS;
var sourceStatics = TYPE_STATICS[sourceComponent['$$typeof']] || REACT_STATICS;
for (var i = 0; i < keys.length; ++i) {
var key = keys[i];
if (!KNOWN_STATICS[key] && !(blacklist && blacklist[key]) && !(sourceStatics && sourceStatics[key]) && !(targetStatics && targetStatics[key])) {
var descriptor = getOwnPropertyDescriptor(sourceComponent, key);
try {
// Avoid failures from read-only properties
defineProperty(targetComponent, key, descriptor);
} catch (e) {}
}
}
return targetComponent;
}
return targetComponent;
}
var hoistNonReactStatics_cjs = hoistNonReactStatics;
var EventEmitter =
/*#__PURE__*/
function () {
function EventEmitter() {
_classCallCheck(this, EventEmitter);
this.listeners = [];
}
_createClass(EventEmitter, [{
key: "on",
value: function on(cb) {
var _this = this;
this.listeners.push(cb);
return function () {
var index = _this.listeners.indexOf(cb);
if (index !== -1) _this.listeners.splice(index, 1);
};
}
}, {
key: "emit",
value: function emit(data) {
this.listeners.forEach(function (fn) {
return fn(data);
});
}
}]);
return EventEmitter;
}();
function createChainableTypeChecker(validate) {
function checkType(isRequired, props, propName, componentName, location, propFullName) {
for (var _len = arguments.length, rest = new Array(_len > 6 ? _len - 6 : 0), _key = 6; _key < _len; _key++) {
rest[_key - 6] = arguments[_key];
}
return mobx.untracked(function () {
componentName = componentName || "<<anonymous>>";
propFullName = propFullName || propName;
if (props[propName] == null) {
if (isRequired) {
var actual = props[propName] === null ? "null" : "undefined";
return new Error("The " + location + " `" + propFullName + "` is marked as required " + "in `" + componentName + "`, but its value is `" + actual + "`.");
}
return null;
} else {
return validate.apply(void 0, [props, propName, componentName, location, propFullName].concat(rest));
}
});
}
var chainedCheckType = checkType.bind(null, false);
chainedCheckType.isRequired = checkType.bind(null, true);
return chainedCheckType;
} // Copied from React.PropTypes
function isSymbol(propType, propValue) {
// Native Symbol.
if (propType === "symbol") {
return true;
} // 19.4.3.5 Symbol.prototype[@@toStringTag] === 'Symbol'
if (propValue["@@toStringTag"] === "Symbol") {
return true;
} // Fallback for non-spec compliant Symbols which are polyfilled.
if (typeof Symbol === "function" && propValue instanceof Symbol) {
return true;
}
return false;
} // Copied from React.PropTypes
function getPropType(propValue) {
var propType = _typeof(propValue);
if (Array.isArray(propValue)) {
return "array";
}
if (propValue instanceof RegExp) {
// Old webkits (at least until Android 4.0) return 'function' rather than
// 'object' for typeof a RegExp. We'll normalize this here so that /bla/
// passes PropTypes.object.
return "object";
}
if (isSymbol(propType, propValue)) {
return "symbol";
}
return propType;
} // This handles more types than `getPropType`. Only used for error messages.
// Copied from React.PropTypes
function getPreciseType(propValue) {
var propType = getPropType(propValue);
if (propType === "object") {
if (propValue instanceof Date) {
return "date";
} else if (propValue instanceof RegExp) {
return "regexp";
}
}
return propType;
}
function createObservableTypeCheckerCreator(allowNativeType, mobxType) {
return createChainableTypeChecker(function (props, propName, componentName, location, propFullName) {
return mobx.untracked(function () {
if (allowNativeType) {
if (getPropType(props[propName]) === mobxType.toLowerCase()) return null;
}
var mobxChecker;
switch (mobxType) {
case "Array":
mobxChecker = mobx.isObservableArray;
break;
case "Object":
mobxChecker = mobx.isObservableObject;
break;
case "Map":
mobxChecker = mobx.isObservableMap;
break;
default:
throw new Error("Unexpected mobxType: ".concat(mobxType));
}
var propValue = props[propName];
if (!mobxChecker(propValue)) {
var preciseType = getPreciseType(propValue);
var nativeTypeExpectationMessage = allowNativeType ? " or javascript `" + mobxType.toLowerCase() + "`" : "";
return new Error("Invalid prop `" + propFullName + "` of type `" + preciseType + "` supplied to" + " `" + componentName + "`, expected `mobx.Observable" + mobxType + "`" + nativeTypeExpectationMessage + ".");
}
return null;
});
});
}
function createObservableArrayOfTypeChecker(allowNativeType, typeChecker) {
return createChainableTypeChecker(function (props, propName, componentName, location, propFullName) {
for (var _len2 = arguments.length, rest = new Array(_len2 > 5 ? _len2 - 5 : 0), _key2 = 5; _key2 < _len2; _key2++) {
rest[_key2 - 5] = arguments[_key2];
}
return mobx.untracked(function () {
if (typeof typeChecker !== "function") {
return new Error("Property `" + propFullName + "` of component `" + componentName + "` has " + "invalid PropType notation.");
}
var error = createObservableTypeCheckerCreator(allowNativeType, "Array")(props, propName, componentName);
if (error instanceof Error) return error;
var propValue = props[propName];
for (var i = 0; i < propValue.length; i++) {
error = typeChecker.apply(void 0, [propValue, i, componentName, location, propFullName + "[" + i + "]"].concat(rest));
if (error instanceof Error) return error;
}
return null;
});
});
}
var observableArray = createObservableTypeCheckerCreator(false, "Array");
var observableArrayOf = createObservableArrayOfTypeChecker.bind(null, false);
var observableMap = createObservableTypeCheckerCreator(false, "Map");
var observableObject = createObservableTypeCheckerCreator(false, "Object");
var arrayOrObservableArray = createObservableTypeCheckerCreator(true, "Array");
var arrayOrObservableArrayOf = createObservableArrayOfTypeChecker.bind(null, true);
var objectOrObservableObject = createObservableTypeCheckerCreator(true, "Object");
var propTypes = /*#__PURE__*/Object.freeze({
observableArray: observableArray,
observableArrayOf: observableArrayOf,
observableMap: observableMap,
observableObject: observableObject,
arrayOrObservableArray: arrayOrObservableArray,
arrayOrObservableArrayOf: arrayOrObservableArrayOf,
objectOrObservableObject: objectOrObservableObject
});
function isStateless(component) {
// `function() {}` has prototype, but `() => {}` doesn't
// `() => {}` via Babel has prototype too.
return !(component.prototype && component.prototype.render);
}
var symbolId = 0;
function newSymbol(name) {
if (typeof Symbol === "function") {
return Symbol(name);
}
var symbol = "__$mobx-react ".concat(name, " (").concat(symbolId, ")");
symbolId++;
return symbol;
}
var mobxMixins = newSymbol("patchMixins");
var mobxPatchedDefinition = newSymbol("patchedDefinition");
function getMixins(target, methodName) {
var mixins = target[mobxMixins] = target[mobxMixins] || {};
var methodMixins = mixins[methodName] = mixins[methodName] || {};
methodMixins.locks = methodMixins.locks || 0;
methodMixins.methods = methodMixins.methods || [];
return methodMixins;
}
function wrapper(realMethod, mixins) {
var _this = this;
for (var _len = arguments.length, args = new Array(_len > 2 ? _len - 2 : 0), _key = 2; _key < _len; _key++) {
args[_key - 2] = arguments[_key];
}
// locks are used to ensure that mixins are invoked only once per invocation, even on recursive calls
mixins.locks++;
try {
var retVal;
if (realMethod !== undefined && realMethod !== null) {
retVal = realMethod.apply(this, args);
}
return retVal;
} finally {
mixins.locks--;
if (mixins.locks === 0) {
mixins.methods.forEach(function (mx) {
mx.apply(_this, args);
});
}
}
}
function wrapFunction(realMethod, mixins) {
var fn = function fn() {
for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
wrapper.call.apply(wrapper, [this, realMethod, mixins].concat(args));
};
return fn;
}
function patch(target, methodName) {
var mixins = getMixins(target, methodName);
for (var _len3 = arguments.length, mixinMethods = new Array(_len3 > 2 ? _len3 - 2 : 0), _key3 = 2; _key3 < _len3; _key3++) {
mixinMethods[_key3 - 2] = arguments[_key3];
}
for (var _i = 0; _i < mixinMethods.length; _i++) {
var mixinMethod = mixinMethods[_i];
if (mixins.methods.indexOf(mixinMethod) < 0) {
mixins.methods.push(mixinMethod);
}
}
var oldDefinition = Object.getOwnPropertyDescriptor(target, methodName);
if (oldDefinition && oldDefinition[mobxPatchedDefinition]) {
// already patched definition, do not repatch
return;
}
var originalMethod = target[methodName];
var newDefinition = createDefinition(target, methodName, oldDefinition ? oldDefinition.enumerable : undefined, mixins, originalMethod);
Object.defineProperty(target, methodName, newDefinition);
}
function createDefinition(target, methodName, enumerable, mixins, originalMethod) {
var _ref;
var wrappedFunc = wrapFunction(originalMethod, mixins);
return _ref = {}, _defineProperty(_ref, mobxPatchedDefinition, true), _defineProperty(_ref, "get", function get() {
return wrappedFunc;
}), _defineProperty(_ref, "set", function set(value) {
if (this === target) {
wrappedFunc = wrapFunction(value, mixins);
} else {
// when it is an instance of the prototype/a child prototype patch that particular case again separately
// since we need to store separate values depending on wether it is the actual instance, the prototype, etc
// e.g. the method for super might not be the same as the method for the prototype which might be not the same
// as the method for the instance
var newDefinition = createDefinition(this, methodName, enumerable, mixins, value);
Object.defineProperty(this, methodName, newDefinition);
}
}), _defineProperty(_ref, "configurable", true), _defineProperty(_ref, "enumerable", enumerable), _ref;
}
var injectorContextTypes = {
mobxStores: objectOrObservableObject
};
Object.seal(injectorContextTypes);
var proxiedInjectorProps = {
contextTypes: {
get: function get() {
return injectorContextTypes;
},
set: function set(_) {
console.warn("Mobx Injector: you are trying to attach `contextTypes` on an component decorated with `inject` (or `observer`) HOC. Please specify the contextTypes on the wrapped component instead. It is accessible through the `wrappedComponent`");
},
configurable: true,
enumerable: false
},
isMobxInjector: {
value: true,
writable: true,
configurable: true,
enumerable: true
}
/**
* Store Injection
*/
};
function createStoreInjector(grabStoresFn, component, injectNames) {
var displayName = "inject-" + (component.displayName || component.name || component.constructor && component.constructor.name || "Unknown");
if (injectNames) displayName += "-with-" + injectNames;
var Injector =
/*#__PURE__*/
function (_Component) {
_inherits(Injector, _Component);
function Injector() {
var _getPrototypeOf2;
var _this;
_classCallCheck(this, Injector);
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
_this = _possibleConstructorReturn(this, (_getPrototypeOf2 = _getPrototypeOf(Injector)).call.apply(_getPrototypeOf2, [this].concat(args)));
_this.storeRef = function (instance) {
_this.wrappedInstance = instance;
};
return _this;
}
_createClass(Injector, [{
key: "render",
value: function render() {
// Optimization: it might be more efficient to apply the mapper function *outside* the render method
// (if the mapper is a function), that could avoid expensive(?) re-rendering of the injector component
// See this test: 'using a custom injector is not too reactive' in inject.js
var newProps = {};
for (var key in this.props) {
if (this.props.hasOwnProperty(key)) {
newProps[key] = this.props[key];
}
}
var additionalProps = grabStoresFn(this.context.mobxStores || {}, newProps, this.context) || {};
for (var _key2 in additionalProps) {
newProps[_key2] = additionalProps[_key2];
}
if (!isStateless(component)) {
newProps.ref = this.storeRef;
}
return React.createElement(component, newProps);
}
}]);
return Injector;
}(React.Component); // Static fields from component should be visible on the generated Injector
Injector.displayName = displayName;
hoistNonReactStatics_cjs(Injector, component);
Injector.wrappedComponent = component;
Object.defineProperties(Injector, proxiedInjectorProps);
return Injector;
}
function grabStoresByName(storeNames) {
return function (baseStores, nextProps) {
storeNames.forEach(function (storeName) {
if (storeName in nextProps // prefer props over stores
) return;
if (!(storeName in baseStores)) throw new Error("MobX injector: Store '" + storeName + "' is not available! Make sure it is provided by some Provider");
nextProps[storeName] = baseStores[storeName];
});
return nextProps;
};
}
/**
* higher order component that injects stores to a child.
* takes either a varargs list of strings, which are stores read from the context,
* or a function that manually maps the available stores from the context to props:
* storesToProps(mobxStores, props, context) => newProps
*/
function inject()
/* fn(stores, nextProps) or ...storeNames */
{
var grabStoresFn;
if (typeof arguments[0] === "function") {
grabStoresFn = arguments[0];
return function (componentClass) {
var injected = createStoreInjector(grabStoresFn, componentClass);
injected.isMobxInjector = false; // supress warning
// mark the Injector as observer, to make it react to expressions in `grabStoresFn`,
// see #111
injected = observer(injected);
injected.isMobxInjector = true; // restore warning
return injected;
};
} else {
var storeNames = [];
for (var i = 0; i < arguments.length; i++) {
storeNames[i] = arguments[i];
}
grabStoresFn = grabStoresByName(storeNames);
return function (componentClass) {
return createStoreInjector(grabStoresFn, componentClass, storeNames.join("-"));
};
}
}
var mobxAdminProperty = mobx.$mobx || "$mobx";
var mobxIsUnmounted = newSymbol("isUnmounted");
/**
* dev tool support
*/
var isDevtoolsEnabled = false;
var isUsingStaticRendering = false;
var warnedAboutObserverInjectDeprecation = false; // WeakMap<Node, Object>;
var componentByNodeRegistry = typeof WeakMap !== "undefined" ? new WeakMap() : undefined;
var renderReporter = new EventEmitter();
var skipRenderKey = newSymbol("skipRender");
var isForcingUpdateKey = newSymbol("isForcingUpdate");
/**
* Helper to set `prop` to `this` as non-enumerable (hidden prop)
* @param target
* @param prop
* @param value
*/
function setHiddenProp(target, prop, value) {
if (!Object.hasOwnProperty.call(target, prop)) {
Object.defineProperty(target, prop, {
enumerable: false,
configurable: true,
writable: true,
value: value
});
} else {
target[prop] = value;
}
}
function findDOMNode$1(component) {
return null;
}
function reportRendering(component) {
var node = findDOMNode$1(component);
if (node && componentByNodeRegistry) componentByNodeRegistry.set(node, component);
renderReporter.emit({
event: "render",
renderTime: component.__$mobRenderEnd - component.__$mobRenderStart,
totalTime: Date.now() - component.__$mobRenderStart,
component: component,
node: node
});
}
function trackComponents() {
if (typeof WeakMap === "undefined") throw new Error("[mobx-react] tracking components is not supported in this browser.");
if (!isDevtoolsEnabled) isDevtoolsEnabled = true;
}
function useStaticRendering(useStaticRendering) {
isUsingStaticRendering = useStaticRendering;
}
/**
* Errors reporter
*/
var errorsReporter = new EventEmitter();
/**
* Utilities
*/
function patch$1(target, funcName) {
patch(target, funcName, reactiveMixin[funcName]);
}
function shallowEqual(objA, objB) {
//From: https://github.com/facebook/fbjs/blob/c69904a511b900266935168223063dd8772dfc40/packages/fbjs/src/core/shallowEqual.js
if (is(objA, objB)) return true;
if (_typeof(objA) !== "object" || objA === null || _typeof(objB) !== "object" || objB === null) {
return false;
}
var keysA = Object.keys(objA);
var keysB = Object.keys(objB);
if (keysA.length !== keysB.length) return false;
for (var i = 0; i < keysA.length; i++) {
if (!hasOwnProperty.call(objB, keysA[i]) || !is(objA[keysA[i]], objB[keysA[i]])) {
return false;
}
}
return true;
}
function is(x, y) {
// From: https://github.com/facebook/fbjs/blob/c69904a511b900266935168223063dd8772dfc40/packages/fbjs/src/core/shallowEqual.js
if (x === y) {
return x !== 0 || 1 / x === 1 / y;
} else {
return x !== x && y !== y;
}
}
function makeComponentReactive(render) {
var _this2 = this;
if (isUsingStaticRendering === true) return render.call(this);
function reactiveRender() {
var _this = this;
isRenderingPending = false;
var exception = undefined;
var rendering = undefined;
reaction.track(function () {
if (isDevtoolsEnabled) {
_this.__$mobRenderStart = Date.now();
}
try {
rendering = mobx._allowStateChanges(false, baseRender);
} catch (e) {
exception = e;
}
if (isDevtoolsEnabled) {
_this.__$mobRenderEnd = Date.now();
}
});
if (exception) {
errorsReporter.emit(exception);
throw exception;
}
return rendering;
} // Generate friendly name for debugging
var initialName = this.displayName || this.name || this.constructor && (this.constructor.displayName || this.constructor.name) || "<component>";
var rootNodeID = this._reactInternalInstance && this._reactInternalInstance._rootNodeID || this._reactInternalInstance && this._reactInternalInstance._debugID || this._reactInternalFiber && this._reactInternalFiber._debugID;
/**
* If props are shallowly modified, react will render anyway,
* so atom.reportChanged() should not result in yet another re-render
*/
setHiddenProp(this, skipRenderKey, false);
/**
* forceUpdate will re-assign this.props. We don't want that to cause a loop,
* so detect these changes
*/
setHiddenProp(this, isForcingUpdateKey, false); // wire up reactive render
var baseRender = render.bind(this);
var isRenderingPending = false;
var reaction = new mobx.Reaction("".concat(initialName, "#").concat(rootNodeID, ".render()"), function () {
if (!isRenderingPending) {
// N.B. Getting here *before mounting* means that a component constructor has side effects (see the relevant test in misc.js)
// This unidiomatic React usage but React will correctly warn about this so we continue as usual
// See #85 / Pull #44
isRenderingPending = true;
if (typeof _this2.componentWillReact === "function") _this2.componentWillReact(); // TODO: wrap in action?
if (_this2[mobxIsUnmounted] !== true) {
// If we are unmounted at this point, componentWillReact() had a side effect causing the component to unmounted
// TODO: remove this check? Then react will properly warn about the fact that this should not happen? See #73
// However, people also claim this migth happen during unit tests..
var hasError = true;
try {
setHiddenProp(_this2, isForcingUpdateKey, true);
if (!_this2[skipRenderKey]) React.Component.prototype.forceUpdate.call(_this2);
hasError = false;
} finally {
setHiddenProp(_this2, isForcingUpdateKey, false);
if (hasError) reaction.dispose();
}
}
}
});
reaction.reactComponent = this;
reactiveRender[mobxAdminProperty] = reaction;
this.render = reactiveRender;
return reactiveRender.call(this);
}
/**
* ReactiveMixin
*/
var reactiveMixin = {
componentWillUnmount: function componentWillUnmount() {
if (isUsingStaticRendering === true) return;
this.render[mobxAdminProperty] && this.render[mobxAdminProperty].dispose();
this[mobxIsUnmounted] = true;
if (isDevtoolsEnabled) {
var node = findDOMNode$1(this);
if (node && componentByNodeRegistry) {
componentByNodeRegistry.delete(node);
}
renderReporter.emit({
event: "destroy",
component: this,
node: node
});
}
},
componentDidMount: function componentDidMount() {
if (isDevtoolsEnabled) {
reportRendering(this);
}
},
componentDidUpdate: function componentDidUpdate() {
if (isDevtoolsEnabled) {
reportRendering(this);
}
},
shouldComponentUpdate: function shouldComponentUpdate(nextProps, nextState) {
if (isUsingStaticRendering) {
console.warn("[mobx-react] It seems that a re-rendering of a React component is triggered while in static (server-side) mode. Please make sure components are rendered only once server-side.");
} // update on any state changes (as is the default)
if (this.state !== nextState) {
return true;
} // update if props are shallowly not equal, inspired by PureRenderMixin
// we could return just 'false' here, and avoid the `skipRender` checks etc
// however, it is nicer if lifecycle events are triggered like usually,
// so we return true here if props are shallowly modified.
return !shallowEqual(this.props, nextProps);
}
};
function makeObservableProp(target, propName) {
var valueHolderKey = newSymbol("reactProp_".concat(propName, "_valueHolder"));
var atomHolderKey = newSymbol("reactProp_".concat(propName, "_atomHolder"));
function getAtom() {
if (!this[atomHolderKey]) {
setHiddenProp(this, atomHolderKey, mobx.createAtom("reactive " + propName));
}
return this[atomHolderKey];
}
Object.defineProperty(target, propName, {
configurable: true,
enumerable: true,
get: function get() {
getAtom.call(this).reportObserved();<|fim▁hole|> return this[valueHolderKey];
},
set: function set(v) {
if (!this[isForcingUpdateKey] && !shallowEqual(this[valueHolderKey], v)) {
setHiddenProp(this, valueHolderKey, v);
setHiddenProp(this, skipRenderKey, true);
getAtom.call(this).reportChanged();
setHiddenProp(this, skipRenderKey, false);
} else {
setHiddenProp(this, valueHolderKey, v);
}
}
});
}
/**
* Observer function / decorator
*/
function observer(arg1, arg2) {
if (typeof arg1 === "string") {
throw new Error("Store names should be provided as array");
}
if (Array.isArray(arg1)) {
// TODO: remove in next major
// component needs stores
if (!warnedAboutObserverInjectDeprecation) {
warnedAboutObserverInjectDeprecation = true;
console.warn('Mobx observer: Using observer to inject stores is deprecated since 4.0. Use `@inject("store1", "store2") @observer ComponentClass` or `inject("store1", "store2")(observer(componentClass))` instead of `@observer(["store1", "store2"]) ComponentClass`');
}
if (!arg2) {
// invoked as decorator
return function (componentClass) {
return observer(arg1, componentClass);
};
} else {
return inject.apply(null, arg1)(observer(arg2));
}
}
var componentClass = arg1;
if (componentClass.isMobxInjector === true) {
console.warn("Mobx observer: You are trying to use 'observer' on a component that already has 'inject'. Please apply 'observer' before applying 'inject'");
}
if (componentClass.__proto__ === React.PureComponent) {
console.warn("Mobx observer: You are using 'observer' on React.PureComponent. These two achieve two opposite goals and should not be used together");
} // Stateless function component:
// If it is function but doesn't seem to be a react class constructor,
// wrap it to a react class automatically
if (typeof componentClass === "function" && (!componentClass.prototype || !componentClass.prototype.render) && !componentClass.isReactClass && !React.Component.isPrototypeOf(componentClass)) {
var _class, _temp;
var observerComponent = observer((_temp = _class =
/*#__PURE__*/
function (_Component) {
_inherits(_class, _Component);
function _class() {
_classCallCheck(this, _class);
return _possibleConstructorReturn(this, _getPrototypeOf(_class).apply(this, arguments));
}
_createClass(_class, [{
key: "render",
value: function render() {
return componentClass.call(this, this.props, this.context);
}
}]);
return _class;
}(React.Component), _class.displayName = componentClass.displayName || componentClass.name, _class.contextTypes = componentClass.contextTypes, _class.propTypes = componentClass.propTypes, _class.defaultProps = componentClass.defaultProps, _temp));
hoistNonReactStatics_cjs(observerComponent, componentClass);
return observerComponent;
}
if (!componentClass) {
throw new Error("Please pass a valid component to 'observer'");
}
var target = componentClass.prototype || componentClass;
mixinLifecycleEvents(target);
componentClass.isMobXReactObserver = true;
makeObservableProp(target, "props");
makeObservableProp(target, "state");
var baseRender = target.render;
target.render = function () {
return makeComponentReactive.call(this, baseRender);
};
return componentClass;
}
function mixinLifecycleEvents(target) {
["componentDidMount", "componentWillUnmount", "componentDidUpdate"].forEach(function (funcName) {
patch$1(target, funcName);
});
if (!target.shouldComponentUpdate) {
target.shouldComponentUpdate = reactiveMixin.shouldComponentUpdate;
} else {
if (target.shouldComponentUpdate !== reactiveMixin.shouldComponentUpdate) {
// TODO: make throw in next major
console.warn("Use `shouldComponentUpdate` in an `observer` based component breaks the behavior of `observer` and might lead to unexpected results. Manually implementing `sCU` should not be needed when using mobx-react.");
}
}
}
var Observer = observer(function (_ref) {
var children = _ref.children,
observerInject = _ref.inject,
render = _ref.render;
var component = children || render;
if (typeof component === "undefined") {
return null;
}
if (!observerInject) {
return component();
} // TODO: remove in next major
console.warn("<Observer inject=.../> is no longer supported. Please use inject on the enclosing component instead");
var InjectComponent = inject(observerInject)(component);
return React__default.createElement(InjectComponent, null);
});
Observer.displayName = "Observer";
var ObserverPropsCheck = function ObserverPropsCheck(props, key, componentName, location, propFullName) {
var extraKey = key === "children" ? "render" : "children";
if (typeof props[key] === "function" && typeof props[extraKey] === "function") {
return new Error("Invalid prop,do not use children and render in the same time in`" + componentName);
}
if (typeof props[key] === "function" || typeof props[extraKey] === "function") {
return;
}
return new Error("Invalid prop `" + propFullName + "` of type `" + _typeof(props[key]) + "` supplied to" + " `" + componentName + "`, expected `function`.");
};
Observer.propTypes = {
render: ObserverPropsCheck,
children: ObserverPropsCheck
};
/**
* Copyright (c) 2013-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
function componentWillMount() {
// Call this.constructor.gDSFP to support sub-classes.
var state = this.constructor.getDerivedStateFromProps(this.props, this.state);
if (state !== null && state !== undefined) {
this.setState(state);
}
}
function componentWillReceiveProps(nextProps) {
// Call this.constructor.gDSFP to support sub-classes.
// Use the setState() updater to ensure state isn't stale in certain edge cases.
function updater(prevState) {
var state = this.constructor.getDerivedStateFromProps(nextProps, prevState);
return state !== null && state !== undefined ? state : null;
}
// Binding "this" is important for shallow renderer support.
this.setState(updater.bind(this));
}
function componentWillUpdate(nextProps, nextState) {
try {
var prevProps = this.props;
var prevState = this.state;
this.props = nextProps;
this.state = nextState;
this.__reactInternalSnapshotFlag = true;
this.__reactInternalSnapshot = this.getSnapshotBeforeUpdate(
prevProps,
prevState
);
} finally {
this.props = prevProps;
this.state = prevState;
}
}
// React may warn about cWM/cWRP/cWU methods being deprecated.
// Add a flag to suppress these warnings for this special case.
componentWillMount.__suppressDeprecationWarning = true;
componentWillReceiveProps.__suppressDeprecationWarning = true;
componentWillUpdate.__suppressDeprecationWarning = true;
function polyfill(Component) {
var prototype = Component.prototype;
if (!prototype || !prototype.isReactComponent) {
throw new Error('Can only polyfill class components');
}
if (
typeof Component.getDerivedStateFromProps !== 'function' &&
typeof prototype.getSnapshotBeforeUpdate !== 'function'
) {
return Component;
}
// If new component APIs are defined, "unsafe" lifecycles won't be called.
// Error if any of these lifecycles are present,
// Because they would work differently between older and newer (16.3+) versions of React.
var foundWillMountName = null;
var foundWillReceivePropsName = null;
var foundWillUpdateName = null;
if (typeof prototype.componentWillMount === 'function') {
foundWillMountName = 'componentWillMount';
} else if (typeof prototype.UNSAFE_componentWillMount === 'function') {
foundWillMountName = 'UNSAFE_componentWillMount';
}
if (typeof prototype.componentWillReceiveProps === 'function') {
foundWillReceivePropsName = 'componentWillReceiveProps';
} else if (typeof prototype.UNSAFE_componentWillReceiveProps === 'function') {
foundWillReceivePropsName = 'UNSAFE_componentWillReceiveProps';
}
if (typeof prototype.componentWillUpdate === 'function') {
foundWillUpdateName = 'componentWillUpdate';
} else if (typeof prototype.UNSAFE_componentWillUpdate === 'function') {
foundWillUpdateName = 'UNSAFE_componentWillUpdate';
}
if (
foundWillMountName !== null ||
foundWillReceivePropsName !== null ||
foundWillUpdateName !== null
) {
var componentName = Component.displayName || Component.name;
var newApiName =
typeof Component.getDerivedStateFromProps === 'function'
? 'getDerivedStateFromProps()'
: 'getSnapshotBeforeUpdate()';
throw Error(
'Unsafe legacy lifecycles will not be called for components using new component APIs.\n\n' +
componentName +
' uses ' +
newApiName +
' but also contains the following legacy lifecycles:' +
(foundWillMountName !== null ? '\n ' + foundWillMountName : '') +
(foundWillReceivePropsName !== null
? '\n ' + foundWillReceivePropsName
: '') +
(foundWillUpdateName !== null ? '\n ' + foundWillUpdateName : '') +
'\n\nThe above lifecycles should be removed. Learn more about this warning here:\n' +
'https://fb.me/react-async-component-lifecycle-hooks'
);
}
// React <= 16.2 does not support static getDerivedStateFromProps.
// As a workaround, use cWM and cWRP to invoke the new static lifecycle.
// Newer versions of React will ignore these lifecycles if gDSFP exists.
if (typeof Component.getDerivedStateFromProps === 'function') {
prototype.componentWillMount = componentWillMount;
prototype.componentWillReceiveProps = componentWillReceiveProps;
}
// React <= 16.2 does not support getSnapshotBeforeUpdate.
// As a workaround, use cWU to invoke the new lifecycle.
// Newer versions of React will ignore that lifecycle if gSBU exists.
if (typeof prototype.getSnapshotBeforeUpdate === 'function') {
if (typeof prototype.componentDidUpdate !== 'function') {
throw new Error(
'Cannot polyfill getSnapshotBeforeUpdate() for components that do not define componentDidUpdate() on the prototype'
);
}
prototype.componentWillUpdate = componentWillUpdate;
var componentDidUpdate = prototype.componentDidUpdate;
prototype.componentDidUpdate = function componentDidUpdatePolyfill(
prevProps,
prevState,
maybeSnapshot
) {
// 16.3+ will not execute our will-update method;
// It will pass a snapshot value to did-update though.
// Older versions will require our polyfilled will-update value.
// We need to handle both cases, but can't just check for the presence of "maybeSnapshot",
// Because for <= 15.x versions this might be a "prevContext" object.
// We also can't just check "__reactInternalSnapshot",
// Because get-snapshot might return a falsy value.
// So check for the explicit __reactInternalSnapshotFlag flag to determine behavior.
var snapshot = this.__reactInternalSnapshotFlag
? this.__reactInternalSnapshot
: maybeSnapshot;
componentDidUpdate.call(this, prevProps, prevState, snapshot);
};
}
return Component;
}
var specialReactKeys = {
children: true,
key: true,
ref: true
};
var Provider =
/*#__PURE__*/
function (_Component) {
_inherits(Provider, _Component);
function Provider(props, context) {
var _this;
_classCallCheck(this, Provider);
_this = _possibleConstructorReturn(this, _getPrototypeOf(Provider).call(this, props, context));
_this.state = {};
copyStores(props, _this.state);
return _this;
}
_createClass(Provider, [{
key: "render",
value: function render() {
return React.Children.only(this.props.children);
}
}, {
key: "getChildContext",
value: function getChildContext() {
var stores = {}; // inherit stores
copyStores(this.context.mobxStores, stores); // add own stores
copyStores(this.props, stores);
return {
mobxStores: stores
};
}
}], [{
key: "getDerivedStateFromProps",
value: function getDerivedStateFromProps(nextProps, prevState) {
if (!nextProps) return null;
if (!prevState) return nextProps; // Maybe this warning is too aggressive?
if (Object.keys(nextProps).filter(validStoreName).length !== Object.keys(prevState).filter(validStoreName).length) console.warn("MobX Provider: The set of provided stores has changed. Please avoid changing stores as the change might not propagate to all children");
if (!nextProps.suppressChangedStoreWarning) for (var key in nextProps) {
if (validStoreName(key) && prevState[key] !== nextProps[key]) console.warn("MobX Provider: Provided store '" + key + "' has changed. Please avoid replacing stores as the change might not propagate to all children");
}
return nextProps;
}
}]);
return Provider;
}(React.Component);
Provider.contextTypes = {
mobxStores: objectOrObservableObject
};
Provider.childContextTypes = {
mobxStores: objectOrObservableObject.isRequired
};
function copyStores(from, to) {
if (!from) return;
for (var key in from) {
if (validStoreName(key)) to[key] = from[key];
}
}
function validStoreName(key) {
return !specialReactKeys[key] && key !== "suppressChangedStoreWarning";
} // TODO: kill in next major
polyfill(Provider);
var storeKey = newSymbol("disposeOnUnmount");
function runDisposersOnWillUnmount() {
var _this = this;
if (!this[storeKey]) {
// when disposeOnUnmount is only set to some instances of a component it will still patch the prototype
return;
}
this[storeKey].forEach(function (propKeyOrFunction) {
var prop = typeof propKeyOrFunction === "string" ? _this[propKeyOrFunction] : propKeyOrFunction;
if (prop !== undefined && prop !== null) {
if (typeof prop !== "function") {
throw new Error("[mobx-react] disposeOnUnmount only works on functions such as disposers returned by reactions, autorun, etc.");
}
prop();
}
});
this[storeKey] = [];
}
function disposeOnUnmount(target, propertyKeyOrFunction) {
if (Array.isArray(propertyKeyOrFunction)) {
return propertyKeyOrFunction.map(function (fn) {
return disposeOnUnmount(target, fn);
});
}
if (!target instanceof React.Component) {
throw new Error("[mobx-react] disposeOnUnmount only works on class based React components.");
}
if (typeof propertyKeyOrFunction !== "string" && typeof propertyKeyOrFunction !== "function") {
throw new Error("[mobx-react] disposeOnUnmount only works if the parameter is either a property key or a function.");
} // add property key / function we want run (disposed) to the store
var componentWasAlreadyModified = !!target[storeKey];
var store = target[storeKey] || (target[storeKey] = []);
store.push(propertyKeyOrFunction); // tweak the component class componentWillUnmount if not done already
if (!componentWasAlreadyModified) {
patch(target, "componentWillUnmount", runDisposersOnWillUnmount);
} // return the disposer as is if invoked as a non decorator
if (typeof propertyKeyOrFunction !== "string") {
return propertyKeyOrFunction;
}
}
if (!React.Component) throw new Error("mobx-react requires React to be available");
if (!mobx.spy) throw new Error("mobx-react requires mobx to be available");
if (typeof reactNative.unstable_batchedUpdates === "function") mobx.configure({
reactionScheduler: reactNative.unstable_batchedUpdates
});
var onError = function onError(fn) {
return errorsReporter.on(fn);
};
if ((typeof __MOBX_DEVTOOLS_GLOBAL_HOOK__ === "undefined" ? "undefined" : _typeof(__MOBX_DEVTOOLS_GLOBAL_HOOK__)) === "object") {
var mobx$1 = {
spy: mobx.spy,
extras: {
getDebugName: mobx.getDebugName
}
};
var mobxReact = {
renderReporter: renderReporter,
componentByNodeRegistry: componentByNodeRegistry,
componentByNodeRegistery: componentByNodeRegistry,
trackComponents: trackComponents
};
__MOBX_DEVTOOLS_GLOBAL_HOOK__.injectMobxReact(mobxReact, mobx$1);
}
exports.propTypes = propTypes;
exports.PropTypes = propTypes;
exports.onError = onError;
exports.observer = observer;
exports.Observer = Observer;
exports.renderReporter = renderReporter;
exports.componentByNodeRegistery = componentByNodeRegistry;
exports.componentByNodeRegistry = componentByNodeRegistry;
exports.trackComponents = trackComponents;
exports.useStaticRendering = useStaticRendering;
exports.Provider = Provider;
exports.inject = inject;
exports.disposeOnUnmount = disposeOnUnmount;<|fim▁end|> | |
<|file_name|>election.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package memorytopo
import (
"path"
log "github.com/golang/glog"
"golang.org/x/net/context"
"github.com/youtube/vitess/go/vt/topo"
)
// NewMasterParticipation is part of the topo.Server interface
func (c *Conn) NewMasterParticipation(name, id string) (topo.MasterParticipation, error) {
c.factory.mu.Lock()
defer c.factory.mu.Unlock()
// Make sure the global path exists.
electionPath := path.Join(electionsPath, name)
if n := c.factory.getOrCreatePath(c.cell, electionPath); n == nil {
return nil, topo.ErrNoNode
}
return &cMasterParticipation{
c: c,
name: name,
id: id,
stop: make(chan struct{}),
done: make(chan struct{}),
}, nil
}
// cMasterParticipation implements topo.MasterParticipation.
//
// We use a directory (in global election path, with the name) with
// ephemeral files in it, that contains the id. The oldest revision
// wins the election.
type cMasterParticipation struct {
// c is our memorytopo connection
c *Conn
// name is the name of this MasterParticipation
name string
// id is the process's current id.
id string
// stop is a channel closed when Stop is called.
stop chan struct{}
// done is a channel closed when we're done processing the Stop
done chan struct{}
}
// WaitForMastership is part of the topo.MasterParticipation interface.
func (mp *cMasterParticipation) WaitForMastership() (context.Context, error) {
// If Stop was already called, mp.done is closed, so we are interrupted.
select {
case <-mp.done:
return nil, topo.ErrInterrupted
default:
}
electionPath := path.Join(electionsPath, mp.name)
var ld topo.LockDescriptor
// We use a cancelable context here. If stop is closed,
// we just cancel that context.
lockCtx, lockCancel := context.WithCancel(context.Background())
go func() {
select {
case <-mp.stop:
if ld != nil {
if err := ld.Unlock(context.Background()); err != nil {
log.Errorf("failed to unlock LockDescriptor %v: %v", electionPath, err)
}
}
lockCancel()
close(mp.done)
}
}()
// Try to get the mastership, by getting a lock.
var err error
ld, err = mp.c.Lock(lockCtx, electionPath, mp.id)
if err != nil {
// It can be that we were interrupted.
return nil, err
}
// We got the lock. Return the lockContext. If Stop() is called,
// it will cancel the lockCtx, and cancel the returned context.<|fim▁hole|>}
// Stop is part of the topo.MasterParticipation interface
func (mp *cMasterParticipation) Stop() {
close(mp.stop)
<-mp.done
}
// GetCurrentMasterID is part of the topo.MasterParticipation interface
func (mp *cMasterParticipation) GetCurrentMasterID(ctx context.Context) (string, error) {
electionPath := path.Join(electionsPath, mp.name)
mp.c.factory.mu.Lock()
defer mp.c.factory.mu.Unlock()
n := mp.c.factory.nodeByPath(mp.c.cell, electionPath)
if n == nil {
return "", nil
}
return n.lockContents, nil
}<|fim▁end|> | return lockCtx, nil |
<|file_name|>push_doc.py<|end_file_name|><|fim▁begin|>'''
'''
from __future__ import absolute_import, print_function
from ...exceptions import ProtocolError
from ..message import Message
from . import register
import logging
log = logging.getLogger(__name__)
@register
class push_doc_1(Message):
'''
'''
msgtype = 'PUSH-DOC'
revision = 1
def __init__(self, header, metadata, content):
super(push_doc_1, self).__init__(header, metadata, content)<|fim▁hole|>
'''
header = cls.create_header()
content = { 'doc' : document.to_json() }
msg = cls(header, metadata, content)
return msg
def push_to_document(self, doc):
if 'doc' not in self.content:
raise ProtocolError("No doc in PUSH-DOC")
doc.replace_with_json(self.content['doc'])<|fim▁end|> |
@classmethod
def create(cls, document, **metadata):
''' |
<|file_name|>header.rs<|end_file_name|><|fim▁begin|>use byteorder::{BigEndian, WriteBytesExt};
use errors::{be_u16, ParseError};
use nom::IResult;
use std::convert::From;
use std::io;
use std::io::Write;
/// Query operation type
#[derive(Debug,Clone,Copy,PartialEq)]
pub enum Opcode {
/// Standard query
Query,
/// Inverse query
InverseQuery,
/// Status request
Status,
/// Placeholder for values unknown to this library.
Unknown {
/// The unrecognized opcode.
value: u8,
},
}
/// Response status codes
#[derive(Debug,Clone,Copy,PartialEq)]
pub enum Rcode {
/// No error condition.
NoError,
/// The name server was unable to interpret the query.
FormatError,
/// There was a problem with the name server.
ServerFailure,
/// (Authoritative server only) - signifies the domain name does not exist.
NameError,
/// The requested query is not implemented.
NotImplemented,
/// The query was refused for policy reasons.
Refused,
/// Placeholder for values unknown to this library.
Unknown {
/// The unrecognized response code.
value: u8,
},
}
/// Header for resource record queries and responses
#[derive(Debug,Clone,Copy,PartialEq)]
pub struct Header {
/// A 16 bit identifier assigned by the program.
pub id: u16,
/// Specifies whether this message is a query (`false`) or response (`true`).
pub qr: bool,
/// The type of query
pub opcode: Opcode,
/// Whether the response is authoritative
pub authoritative: bool,
/// Whether the response is truncated
pub truncated: bool,
/// Whether recursion is desired
pub recursion_desired: bool,
/// Whether recursion is available
pub recursion_available: bool,
/// The response code
pub rcode: Rcode,
/// The number of entries in the question section.
pub question_count: u16,
/// The number of entries in the resource records section.
pub answer_count: u16,
/// The number of entries in the authority records section.
pub ns_count: u16,
/// The number of entries in the additional records section.
pub additional_count: u16,
}
impl Header {<|fim▁hole|> pub fn query(id: u16, opcode: Opcode, recursion_desired: bool, questions: u16) -> Header {
Header {
id: id,
qr: false,
opcode: opcode,
authoritative: false,
truncated: false,
recursion_desired: recursion_desired,
recursion_available: false,
rcode: Rcode::NoError,
question_count: questions,
answer_count: 0,
ns_count: 0,
additional_count: 0,
}
}
/// Create a `Header` for a response
pub fn response(query: Header, recursion_available: bool) -> Header {
Header {
id: query.id,
qr: true,
opcode: query.opcode,
authoritative: false,
truncated: false,
recursion_desired: query.recursion_desired,
recursion_available: recursion_available,
rcode: Rcode::NoError,
question_count: query.question_count,
answer_count: 0,
ns_count: 0,
additional_count: 0,
}
}
fn flags_to_u16(&self) -> u16 {
let opcode: u8 = self.opcode.into();
let rcode: u8 = self.rcode.into();
let mut res = (rcode as u16) | ((opcode as u16) << 11);
if self.qr {
res |= 0b1000_0000_0000_0000;
}
if self.authoritative {
res |= 0b0000_0100_0000_0000;
}
if self.truncated {
res |= 0b0000_0010_0000_0000;
}
if self.recursion_desired {
res |= 0b0000_0001_0000_0000;
}
if self.recursion_available {
res |= 0b0000_0000_1000_0000;
}
res
}
}
pub fn write_header(header: &Header, writer: &mut Write) -> io::Result<()> {
writer.write_u16::<BigEndian>(header.id)?;
writer.write_u16::<BigEndian>(header.flags_to_u16())?;
writer.write_u16::<BigEndian>(header.question_count)?;
writer.write_u16::<BigEndian>(header.answer_count)?;
writer.write_u16::<BigEndian>(header.ns_count)?;
writer.write_u16::<BigEndian>(header.additional_count)?;
Ok(())
}
// 1 1 1 1 1 1
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ID |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | QDCOUNT |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ANCOUNT |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | NSCOUNT |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
// | ARCOUNT |
// +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
impl From<u8> for Opcode {
fn from(bits: u8) -> Opcode {
match bits {
0 => Opcode::Query,
1 => Opcode::InverseQuery,
2 => Opcode::Status,
x => Opcode::Unknown { value: x },
}
}
}
impl From<Opcode> for u8 {
fn from(opcode: Opcode) -> u8 {
match opcode {
Opcode::Query => 0,
Opcode::InverseQuery => 1,
Opcode::Status => 2,
Opcode::Unknown { value: x } => x,
}
}
}
impl From<u8> for Rcode {
fn from(bits: u8) -> Rcode {
match bits {
0 => Rcode::NoError,
1 => Rcode::FormatError,
2 => Rcode::ServerFailure,
3 => Rcode::NameError,
4 => Rcode::NotImplemented,
5 => Rcode::Refused,
x => Rcode::Unknown { value: x },
}
}
}
impl From<Rcode> for u8 {
fn from(rcode: Rcode) -> u8 {
match rcode {
Rcode::NoError => 0,
Rcode::FormatError => 1,
Rcode::ServerFailure => 2,
Rcode::NameError => 3,
Rcode::NotImplemented => 4,
Rcode::Refused => 5,
Rcode::Unknown { value: x } => x,
}
}
}
pub fn parse_header(i: &[u8]) -> IResult<&[u8], Header, ParseError> {
let (i, id) = try_parse!(i, be_u16);
let (i, flags) = try_parse!(i, be_u16);
let (i, qdcount) = try_parse!(i, be_u16);
let (i, ancount) = try_parse!(i, be_u16);
let (i, nscount) = try_parse!(i, be_u16);
let (i, arcount) = try_parse!(i, be_u16);
let header = Header {
id: id,
qr: (flags & 0b1000_0000_0000_0000) != 0,
opcode: Opcode::from(((flags & 0b0111_1000_0000_0000) >> 11) as u8),
authoritative: (flags & 0b0000_0100_0000_0000) != 0,
truncated: (flags & 0b0000_0010_0000_0000) != 0,
recursion_desired: (flags & 0b0000_0001_0000_0000) != 0,
recursion_available: (flags & 0b0000_0000_1000_0000) != 0,
rcode: Rcode::from((flags & 0b0000_0000_0000_1111) as u8),
question_count: qdcount,
answer_count: ancount,
ns_count: nscount,
additional_count: arcount,
};
IResult::Done(i, header)
}
#[cfg(test)]
mod tests {
use super::*;
use nom::IResult::Done;
fn query_1() -> Header {
Header::query(2, Opcode::Query, true, 1)
}
fn response_1() -> Header {
let mut h = Header::response(query_1(), true);
h.answer_count = 1;
h
}
#[test]
fn parse_query_1_header() {
let data = include_bytes!("../assets/captures/dns_1_query.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], query_1()));
}
#[test]
fn parse_response_1_header() {
let data = include_bytes!("../assets/captures/dns_1_response.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], response_1()));
}
fn query_2() -> Header {
Header::query(3, Opcode::Query, true, 1)
}
fn response_2() -> Header {
let mut h = Header::response(query_2(), true);
h.answer_count = 1;
h
}
#[test]
fn parse_query_2_header() {
let data = include_bytes!("../assets/captures/dns_2_query.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], query_2()));
}
#[test]
fn parse_response_2_header() {
let data = include_bytes!("../assets/captures/dns_2_response.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], response_2()));
}
fn query_3() -> Header {
Header::query(0xda64, Opcode::Query, true, 1)
}
fn response_3() -> Header {
let mut h = Header::response(query_3(), true);
h.answer_count = 2;
h.ns_count = 1;
h
}
#[test]
fn parse_query_3_header() {
let data = include_bytes!("../assets/captures/dns_3_query.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], query_3()));
}
#[test]
fn parse_response_3_header() {
let data = include_bytes!("../assets/captures/dns_3_response.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], response_3()));
}
fn query_4() -> Header {
let mut h = Header::query(0x60ff, Opcode::Query, true, 1);
h.additional_count = 1;
h
}
fn response_4() -> Header {
let mut h = Header::response(query_4(), true);
h.answer_count = 13;
h.additional_count = 1;
h
}
#[test]
fn parse_query_4_header() {
let data = include_bytes!("../assets/captures/dns_4_query.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], query_4()));
}
#[test]
fn parse_response_4_header() {
let data = include_bytes!("../assets/captures/dns_4_response.bin");
assert_eq!(parse_header(&data[0..12]), Done(&b""[..], response_4()));
}
}<|fim▁end|> | /// Create a `Header` for a query |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from django.core.exceptions import ImproperlyConfigured
from django import VERSION
<|fim▁hole|>
from .core import AppCache<|fim▁end|> | if VERSION[0]<1 and VERSION[1]<7:
raise ImproperlyConfigured("""
The app_config required 1.7 or higher version of django.
""") |
<|file_name|>MasnaviDataProvider.java<|end_file_name|><|fim▁begin|>package com.nouribygi.masnavi.database;
import android.app.SearchManager;
import android.content.ContentProvider;
import android.content.ContentResolver;
import android.content.ContentValues;<|fim▁hole|>
import com.nouribygi.masnavi.util.MasnaviSettings;
public class MasnaviDataProvider extends ContentProvider {
public static String AUTHORITY =
"com.nouribygi.masnavi.database.MasnaviDataProvider";
public static final Uri
SEARCH_URI = Uri.parse("content://" + AUTHORITY + "/masnavi/search");
public static final String VERSES_MIME_TYPE =
ContentResolver.CURSOR_DIR_BASE_TYPE +
"/vnd.com.nouribygi.masnavi";
public static final String AYAH_MIME_TYPE =
ContentResolver.CURSOR_ITEM_BASE_TYPE +
"/vnd.com.nouribygi.masnavi";
// UriMatcher stuff
private static final int SEARCH_VERSES = 0;
private static final int GET_VERSE = 1;
private static final int SEARCH_SUGGEST = 2;
private static final UriMatcher sURIMatcher = buildUriMatcher();
private DatabaseHandler mDatabase = null;
private static UriMatcher buildUriMatcher() {
UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
matcher.addURI(AUTHORITY, "masnavi/search", SEARCH_VERSES);
matcher.addURI(AUTHORITY, "masnavi/search/*", SEARCH_VERSES);
matcher.addURI(AUTHORITY, "masnavi/search/*/*", SEARCH_VERSES);
matcher.addURI(AUTHORITY, "masnavi/verse/#/#", GET_VERSE);
matcher.addURI(AUTHORITY, "masnavi/verse/*/#/#", GET_VERSE);
matcher.addURI(AUTHORITY, SearchManager.SUGGEST_URI_PATH_QUERY,
SEARCH_SUGGEST);
matcher.addURI(AUTHORITY, SearchManager.SUGGEST_URI_PATH_QUERY + "/*",
SEARCH_SUGGEST);
return matcher;
}
@Override
public boolean onCreate() {
mDatabase = DatabaseHandler.getInstance(getContext());
return true;
}
@Override
public Cursor query(Uri uri, String[] projection, String selection,
String[] selectionArgs, String sortOrder) {
String query = "";
if (selectionArgs.length >= 1)
query = selectionArgs[0];
int bookCode = MasnaviSettings.getSelectedBook(getContext());
return mDatabase.search(query, bookCode);
}
@Override
public String getType(Uri uri) {
switch (sURIMatcher.match(uri)) {
case SEARCH_VERSES:
return VERSES_MIME_TYPE;
case GET_VERSE:
return AYAH_MIME_TYPE;
case SEARCH_SUGGEST:
return SearchManager.SUGGEST_MIME_TYPE;
default:
throw new IllegalArgumentException("Unknown URL " + uri);
}
}
@Override
public Uri insert(Uri uri, ContentValues values) {
throw new UnsupportedOperationException();
}
@Override
public int update(Uri uri, ContentValues values, String selection,
String[] selectionArgs) {
throw new UnsupportedOperationException();
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
throw new UnsupportedOperationException();
}
}<|fim▁end|> | import android.content.UriMatcher;
import android.database.Cursor;
import android.net.Uri; |
<|file_name|>hw3_2a.py<|end_file_name|><|fim▁begin|>import sympy
x1, x2 = sympy.symbols('x1 x2')
f = 100*(x2 - x1**2)**2 + (1-x1)**2
df_dx1 = sympy.diff(f,x1)
df_dx2 = sympy.diff(f,x2)
H = sympy.hessian(f, (x1, x2))
xs = sympy.solve([df_dx1, df_dx2], [x1, x2])
H_xs = H.subs([(x1,xs[0][0]), (x2,xs[0][1])])<|fim▁hole|> if i.evalf() <= 0:
count += 1
if count == 0:
print 'Local minima'
elif count == len(lambda_xs.keys()):
print 'Lacal maxima'
else:
print 'Saddle point'<|fim▁end|> | lambda_xs = H_xs.eigenvals()
count = 0
for i in lambda_xs.keys(): |
<|file_name|>7503.js<|end_file_name|><|fim▁begin|>{
it("returns a key", () => {
var nativeEvent = new KeyboardEvent("keypress", {
key: "f"
});
expect(getEventKey(nativeEvent)).toBe("f");<|fim▁hole|>}<|fim▁end|> | }); |
<|file_name|>ThemeManagers.py<|end_file_name|><|fim▁begin|># encoding: utf-8
"""
IMPORTANT - COLOUR SUPPORT IS CURRENTLY EXTREMELY EXPERIMENTAL. THE API MAY CHANGE, AND NO DEFAULT
WIDGETS CURRENTLY TAKE ADVANTAGE OF THEME SUPPORT AT ALL.
"""
import curses
from . import global_options
def disable_color():
global_options.DISABLE_ALL_COLORS = True
def enable_color():
global_options.DISABLE_ALL_COLORS = False
class ThemeManager(object):
_colors_to_define = (
# DO NOT DEFINE THIS COLOR - THINGS BREAK
#('WHITE_BLACK', DO_NOT_DO_THIS, DO_NOT_DO_THIS),
('BLACK_WHITE', curses.COLOR_BLACK, curses.COLOR_WHITE),
#('BLACK_ON_DEFAULT', curses.COLOR_BLACK, -1),
#('WHITE_ON_DEFAULT', curses.COLOR_WHITE, -1),
('BLUE_BLACK', curses.COLOR_BLUE, curses.COLOR_BLACK),
('CYAN_BLACK', curses.COLOR_CYAN, curses.COLOR_BLACK),
('GREEN_BLACK', curses.COLOR_GREEN, curses.COLOR_BLACK),
('MAGENTA_BLACK', curses.COLOR_MAGENTA, curses.COLOR_BLACK),
('RED_BLACK', curses.COLOR_RED, curses.COLOR_BLACK),
('YELLOW_BLACK', curses.COLOR_YELLOW, curses.COLOR_BLACK),
('BLACK_RED', curses.COLOR_BLACK, curses.COLOR_RED),
('BLACK_GREEN', curses.COLOR_BLACK, curses.COLOR_GREEN),
('BLACK_YELLOW', curses.COLOR_BLACK, curses.COLOR_YELLOW),
('BLUE_WHITE', curses.COLOR_BLUE, curses.COLOR_WHITE),
('CYAN_WHITE', curses.COLOR_CYAN, curses.COLOR_WHITE),
('GREEN_WHITE', curses.COLOR_GREEN, curses.COLOR_WHITE),
('MAGENTA_WHITE', curses.COLOR_MAGENTA, curses.COLOR_WHITE),
('RED_WHITE', curses.COLOR_RED, curses.COLOR_WHITE),
('YELLOW_WHITE', curses.COLOR_YELLOW, curses.COLOR_WHITE),
)
default_colors = {
'DEFAULT' : 'WHITE_BLACK',
'FORMDEFAULT' : 'WHITE_BLACK',
'NO_EDIT' : 'BLUE_BLACK',
'STANDOUT' : 'CYAN_BLACK',
'CURSOR' : 'WHITE_BLACK',
'LABEL' : 'GREEN_BLACK',
'LABELBOLD' : 'WHITE_BLACK',
'CONTROL' : 'YELLOW_BLACK',
'IMPORTANT' : 'GREEN_BLACK',
'SAFE' : 'GREEN_BLACK',
'WARNING' : 'YELLOW_BLACK',
'DANGER' : 'RED_BLACK',
'CRITICAL' : 'BLACK_RED',
'GOOD' : 'GREEN_BLACK',
'GOODHL' : 'GREEN_BLACK',
'VERYGOOD' : 'BLACK_GREEN',
'CAUTION' : 'YELLOW_BLACK',
'CAUTIONHL' : 'BLACK_YELLOW',
}
def __init__(self):
#curses.use_default_colors()
self._defined_pairs = {}
self._names = {}
try:
self._max_pairs = curses.COLOR_PAIRS - 1
do_color = True
except AttributeError:
# curses.start_color has failed or has not been called
do_color = False
# Disable all color use across the application
disable_color()
if do_color and curses.has_colors():
self.initialize_pairs()
self.initialize_names()
<|fim▁hole|> if not curses.has_colors() or global_options.DISABLE_ALL_COLORS:
return False
if request == 'DEFAULT':
request = caller.color
# Locate the requested color pair. Default to default if not found.
try:
pair = self._defined_pairs[self._names[request]]
except:
pair = self._defined_pairs[self._names['DEFAULT']]
# now make the actual attribute
color_attribute = curses.color_pair(pair[0])
return color_attribute
def set_default(self, caller):
return False
def initialize_pairs(self):
# White on Black is fixed as color_pair 0
self._defined_pairs['WHITE_BLACK'] = (0, curses.COLOR_WHITE, curses.COLOR_BLACK)
for cp in self.__class__._colors_to_define:
if cp[0] == 'WHITE_BLACK':
# silently protect the user from breaking things.
continue
self.initalize_pair(cp[0], cp[1], cp[2])
def initialize_names(self):
self._names.update(self.__class__.default_colors)
def initalize_pair(self, name, fg, bg):
#Initialize a color_pair for the required color and return the number.
#Raise an exception if this is not possible.
if (len(list(self._defined_pairs.keys())) + 1) == self._max_pairs:
raise Exception("Too many colors")
_this_pair_number = len(list(self._defined_pairs.keys())) + 1
curses.init_pair(_this_pair_number, fg, bg)
self._defined_pairs[name] = (_this_pair_number, fg, bg)
return _this_pair_number
def get_pair_number(self, name):
return self._defined_pairs[name][0]<|fim▁end|> | def find_pair(self, caller, request='DEFAULT'): |
<|file_name|>server.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import falcon
import json
class QuoteResource:
def on_get(self, req, resp):
"""Handles GET requests"""
quote = {
'quote': 'I\'ve always been more interested in the future than in the past.',
'author': 'Grace Hopper'
}
resp.body = json.dumps(quote)
api = falcon.API()
api.add_route('/quote', QuoteResource())<|fim▁end|> | |
<|file_name|>motion.rs<|end_file_name|><|fim▁begin|>use super::*;
impl Editor {
/// Convert an instruction to a motion (new coordinate). Returns None if the instructions given
/// either is invalid or has no movement.
///
/// A motion is a namespace (i.e. non mode-specific set of commands), which represents
/// movements. These are useful for commands which takes a motion as post-parameter, such as d.
/// d deletes the text given by the motion following. Other commands can make use of motions,
/// using this method.<|fim▁hole|> match cmd.key {
Char('h') => Some(self.left(n.d())),
Char('l') => Some(self.right(n.d())),
Char('j') => Some(self.down(n.d())),
Char('k') => Some(self.up(n.d())),
Char('g') => Some((0, n.or(1) - 1)),
Char('G') => Some((0, self.text.len() - 1)),
Char('L') => Some(self.ln_end()),
Char('H') => Some((0, self.y())),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(o)
} else {
None
}
},
Char(c) => {
self.status_bar.msg = format!("Motion not defined: '{}'", c);
self.redraw_status_bar();
None
},
_ => {
self.status_bar.msg = format!("Motion not defined");
None
},
}
}
/// Like to_motion() but does not bound to the text. Therefore it returns an isize, and in some
/// cases it's a position which is out of bounds. This is useful when commands want to mesure
/// the relative movement over the movement.
pub fn to_motion_unbounded(&mut self, Inst(n, cmd): Inst) -> Option<(isize, isize)> {
use super::Key::*;
match cmd.key {
Char('h') => Some(self.left_unbounded(n.d())),
Char('l') => Some(self.right_unbounded(n.d())),
Char('j') => Some(self.down_unbounded(n.d())),
Char('k') => Some(self.up_unbounded(n.d())),
Char('g') => Some((0, n.or(1) as isize - 1)),
Char('G') => Some((0, self.text.len() as isize - 1)),
Char('L') => Some(to_signed_pos(self.ln_end())),
Char('H') => Some((0, self.y() as isize)),
Char('t') => {
let ch = self.get_char();
if let Some(o) = self.next_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
Char('f') => {
let ch = self.get_char();
if let Some(o) = self.previous_ocur(ch, n.d()) {
Some(to_signed_pos(o))
} else {
None
}
},
_ => None,
}
}
}<|fim▁end|> | pub fn to_motion(&mut self, Inst(n, cmd): Inst) -> Option<(usize, usize)> {
use super::Key::*; |
<|file_name|>test-user-agent.js<|end_file_name|><|fim▁begin|>var test = require('tape');
var url = require('url');
var curli = require('../');
var testServer = require('./server.js');
var buildUAString = require('../lib/util').buildUAString;
test('Default user agent being set', function(t) {
var server = testServer.createServer();
var ua = buildUAString();
server.listen(0, function() {
var port = server.address().port;
var host = '//localhost:' + port;
var href = 'http:' + host + '/';
server.on('/', function(req, res) {<|fim▁hole|> res.end();
});
curli(href, function(err, headers) {
t.ok(headers, 'Headers sent');
t.error(err, 'Shouldn\'t error');
server.close();
t.end();
});
});
});
test('Custom user agent', function(t) {
var server = testServer.createServer();
server.listen(0, function() {
var port = server.address().port;
var host = '//localhost:' + port;
var href = 'http:' + host + '/';
var options = url.parse(href);
options.headers = {
'User-Agent': 'Node'
};
var ua = options.headers['User-Agent'];
server.on('/', function(req, res) {
t.equal(req.headers['user-agent'], ua, 'Custom user agent set to "' + ua + '"');
res.writeHead(200);
res.end();
});
curli(options, function(err, headers) {
t.ok(headers, 'Headers sent');
t.error(err, 'Shouldn\'t error');
server.close();
t.end();
});
});
});
test('Custom user agent, funky header', function(t) {
var server = testServer.createServer();
server.listen(0, function() {
var port = server.address().port;
var host = '//localhost:' + port;
var href = 'http:' + host + '/';
var options = url.parse(href);
options.headers = {
'UsER-AgeNt': 'kNode'
};
var ua = options.headers['UsER-AgeNt'];
server.on('/', function(req, res) {
t.equal(req.headers['user-agent'], ua, 'Custom user agent set to "' + ua + '"');
res.writeHead(200);
res.end();
});
curli(options, function(err, headers) {
t.ok(headers, 'Headers sent');
t.error(err, 'Shouldn\'t error');
server.close();
t.end();
});
});
});<|fim▁end|> | t.equal(req.headers['user-agent'], ua, 'Default user agent set to "' + ua + '"');
res.writeHead(200); |
<|file_name|>ui.py<|end_file_name|><|fim▁begin|># coding: utf-8
import http.server
import socketserver<|fim▁hole|>
httpd = socketserver.TCPServer(("0.0.0.0", PORT), Handler)
print("serving at port", PORT)
httpd.serve_forever()<|fim▁end|> |
PORT = 8000
Handler = http.server.SimpleHTTPRequestHandler |
<|file_name|>schema.js<|end_file_name|><|fim▁begin|>export class Schema {
constructor(){
}
<|fim▁hole|> let errorList = errors.reduce((errorListTemp, error, index)=>{
return `${errorListTemp}
${index+1}. ${error}`
}, '')
let errorMessage = `
Schema Error:
Path: ${pathName}
Errors: ${errorList}
`
return errorMessage;
}
};<|fim▁end|> | createErrorMessage(schemaErrorObj, root){
let {errors, path} = schemaErrorObj;
let pathName = path.replace('$root', root);
|
<|file_name|>bluebrain_data_io.py<|end_file_name|><|fim▁begin|># coding: utf-8
import os
import urllib
import numpy as np
import pickle
from Experiment import Experiment
ROOT_PATH = './full_dataset/article_4_data/grouped_ephys'
ZIPFILE_PATH = './full_dataset/article_4_data'
EXPM_PATH = './results/experiments/'
URL = 'http://microcircuits.epfl.ch/data/released_data/'
if not os.path.exists(EXPM_PATH):
os.makedirs(EXPM_PATH)
if not os.path.exists(ROOT_PATH):
print('It seems that the directory of the raw data does not exist. It is expected to be at: ' + ROOT_PATH)
if not os.path.exists(ROOT_PATH):
print('It seems that the directory with the zip files does not exist. It is expected to be at: ' + ZIPFILE_PATH)
# ==============================================================================
# General io function
# ==============================================================================
def download_info_from_url(url):
"""
Download content from url and return it.
"""
r = urllib.request.urlopen(url)
data = r.read()
data = data.decode(encoding='UTF-8')
return data
def get_genetic_cell_infos(filepath):
"""
Downloads genetic information from cells in the directory at filepath.
"""
filelist = os.listdir(filepath)
raw_names = [name[0:-4] for name in filelist]
cell_names = []
for name in raw_names:<|fim▁hole|> for cell in cell_names:
url_complete = URL + cell + '.txt'
try:
infos[cell] = download_info_from_url(url_complete)
except Exception:
next
return infos
def save_filtered_cell_infos(filtername, criterion1='SOM:1', criterion2='PV:0', criterion3='VIP:0'):
"""
Gets genetic information from all cells in ZIPFILE_PATH directory, filters them by the given
criterions and saves the filtered list with pickle.
"""
infos = get_genetic_cell_infos(ZIPFILE_PATH)
desired_cells = {}
for cell in infos.keys():
if criterion1 in infos[cell] and criterion2 in infos[cell] and criterion3 in infos[cell]:
desired_cells[cell] = infos[cell]
with open(filtername + '_infos.pkl', 'wb') as f:
pickle.dump(desired_cells, f)
def save_all_cell_infos(filepath):
"""
Saves genetic information from all cells in ZIPFILE_PATH directory in one list with pickle.
"""
infos = get_genetic_cell_infos(filepath)
with open('cell_infos_full.pkl', 'wb') as f:
pickle.dump(infos, f)
def open_filtered_cell_info_list(filtername):
"""
Opens the list that was saved with save_filtered_cell_infos with the given filtername.
"""
with open(filtername + '_infos.pkl', 'rb') as f:
filtered_list = pickle.load(f)
return filtered_list
def create_experiments_from_list(cells, cell_type, verbose=True):
"""
Creates Experiment objects for cells in cells, adds all existing traces and saves them.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
ncells = len(cell_names)
for i in range(ncells):
PATH = os.path.join(ROOT_PATH, cell_names[i])
animal_files = sorted(os.listdir(PATH))
ntraces = int(len(animal_files) / 2)
current_exp = Experiment('Cell_' + cell_names[i] + '_single_traces', cell_type=cell_type)
exp_merged_traces = Experiment('Cell_' + cell_names[i] + '_merged_idrest_traces', cell_type=cell_type)
nincluded_idrest_traces = 0
for j in np.arange(ntraces):
# files end with 'recordingType_recordingNumber.ibw'
file_split = str.split(animal_files[j][0:-4], '_')
file_identifier = file_split[-2] + '_' + file_split[-1] + '.ibw'
current_recording_type = file_split[-2]
# find indeces of matching files in folder (current file always comes first because it's always Ch0)
file_idc = [i for i, elem in enumerate(animal_files) if file_identifier in elem]
current_file = animal_files[file_idc[0]]
voltage_file = animal_files[file_idc[1]]
current_exp.add_trainingset_trace(os.path.join(PATH, voltage_file), 10 ** -3,
os.path.join(PATH, current_file), 10 ** -12, FILETYPE='Igor',
verbose=verbose)
tr = current_exp.trainingset_traces[j]
tr.recording_type = current_recording_type
tr.estimate_input_amp()
if current_recording_type == 'IDRest':
exp_merged_traces.add_trainingset_trace(os.path.join(PATH, voltage_file), 10 ** -3,
os.path.join(PATH, current_file), 10 ** -12, FILETYPE='Igor',
verbose=verbose)
tr = current_exp.trainingset_traces[nincluded_idrest_traces]
tr.recording_type = current_recording_type
tr.estimate_input_amp()
nincluded_idrest_traces += 1
if not len(exp_merged_traces.trainingset_traces) < 3:
exp_merged_traces.mergeTrainingTraces()
exp_merged_traces.save(os.path.join(EXPM_PATH), verbose=verbose)
current_exp.save(os.path.join(EXPM_PATH), verbose=verbose)
def load_merged_traces_experiments_from_list(cells, verbose=True):
"""
Load experiments where IDRest traces have been merged.
This function will try to load an experiment with merged IDRest traces for all cells
in the list and just skip the ones for which it is not found. If no experiments were
found, None is returned.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
See also:
load_single_traces_experiments_from_list()
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
expms = []
for i in range(len(cell_names)):
current_expm_name = 'Experiment_Cell_' + cell_names[i] + '_merged_idrest_traces.pkl'
current_expm_path = os.path.join(EXPM_PATH, current_expm_name)
try:
current_expm = Experiment.load(current_expm_path, verbose=verbose)
expms.append(current_expm)
except:
pass
if not len(expms) == 0:
return expms
else:
return None
def load_single_traces_experiments_from_list(cells, verbose=True):
"""
Load experiments where traces have been added separately.
Params:
- cells: List with cell names or dictionairy where the keys are the cell names.
See also:
load_merged_traces_experiments_from_list()
"""
if type(cells) is dict:
cell_names = list(cells.keys())
else:
cell_names = cells
expms = []
for i in range(len(cell_names)):
current_expm_name = 'Experiment_Cell_' + cell_names[i] + '_single_traces.pkl'
current_expm_path = os.path.join(EXPM_PATH, current_expm_name)
try:
current_expm = Experiment.load(current_expm_path, verbose=verbose)
expms.append(current_expm)
except:
pass
if not len(expms) == 0:
return expms
else:
return None
# ==============================================================================
# From here on it's interneuron-specific functions
# ==============================================================================
def create_interneuron_specific_experiments(verbose=True):
"""
Filters cell infos for SOM, PV and VIP neurons, loads them and creates
Experiment objects.
"""
# create and save filtered info lists for SOM, PV and VIP neurons
save_filtered_cell_infos('som_cells', criterion1='SOM:1', criterion2='PV:0', criterion3='VIP:0')
save_filtered_cell_infos('pv_cells', criterion1='SOM:0', criterion2='PV:1', criterion3='VIP:0')
save_filtered_cell_infos('vip_cells', criterion1='SOM:0', criterion2='PV:0', criterion3='VIP:1')
# get saved lists
som_dict = open_filtered_cell_info_list('som_cells')
vip_dict = open_filtered_cell_info_list('vip_cells')
pv_dict = open_filtered_cell_info_list('pv_cells')
# create experiment objects
create_experiments_from_list(vip_dict, cell_type='vip', verbose=verbose)
create_experiments_from_list(som_dict, cell_type='som', verbose=verbose)
create_experiments_from_list(pv_dict, cell_type='pv', verbose=verbose)
def get_som_expms(merged=False, verbose=True):
som_dict = open_filtered_cell_info_list('som_cells')
if merged:
return load_merged_traces_experiments_from_list(som_dict, verbose=verbose)
else:
return load_single_traces_experiments_from_list(som_dict, verbose=verbose)
def get_pv_expms(merged=False, verbose=True):
pv_dict = open_filtered_cell_info_list('pv_cells')
if merged:
return load_merged_traces_experiments_from_list(pv_dict, verbose=verbose)
else:
return load_single_traces_experiments_from_list(pv_dict, verbose=verbose)
def get_vip_expms(merged=False, verbose=True):
vip_dict = open_filtered_cell_info_list('vip_cells')
if merged:
return load_merged_traces_experiments_from_list(vip_dict, verbose=verbose)
else:
return load_single_traces_experiments_from_list(vip_dict, verbose=verbose)<|fim▁end|> | # if name.rfind('ET') == -1:
cell_names.append(name)
infos = {} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""CactusBot."""
<|fim▁hole|>__all__ = ["__version__", "run"]<|fim▁end|> | from .cactus import run, __version__
|
<|file_name|>AKPeakingParametricEqualizerFilterDSPKernel.hpp<|end_file_name|><|fim▁begin|>//
// AKPeakingParametricEqualizerFilterDSPKernel.hpp
// AudioKit
//
// Created by Aurelius Prochazka, revision history on Github.
// Copyright (c) 2016 Aurelius Prochazka. All rights reserved.
//
#ifndef AKPeakingParametricEqualizerFilterDSPKernel_hpp
#define AKPeakingParametricEqualizerFilterDSPKernel_hpp
#import "DSPKernel.hpp"
#import "ParameterRamper.hpp"
#import <AudioKit/AudioKit-Swift.h>
extern "C" {
#include "soundpipe.h"
}
enum {
centerFrequencyAddress = 0,
gainAddress = 1,
qAddress = 2
};
class AKPeakingParametricEqualizerFilterDSPKernel : public DSPKernel {
public:
// MARK: Member Functions
AKPeakingParametricEqualizerFilterDSPKernel() {}
void init(int channelCount, double inSampleRate) {
channels = channelCount;
sampleRate = float(inSampleRate);
sp_create(&sp);
sp->sr = sampleRate;
sp->nchan = channels;
sp_pareq_create(&pareq);
sp_pareq_init(sp, pareq);
pareq->fc = 1000;
pareq->v = 1.0;
pareq->q = 0.707;
pareq->mode = 0;
}
void start() {
started = true;
}
void stop() {
started = false;
}
void destroy() {
sp_pareq_destroy(&pareq);<|fim▁hole|>
void reset() {
resetted = true;
}
void setCenterFrequency(float fc) {
centerFrequency = fc;
centerFrequencyRamper.setImmediate(fc);
}
void setGain(float v) {
gain = v;
gainRamper.setImmediate(v);
}
void setQ(float q) {
q = q;
qRamper.setImmediate(q);
}
void setParameter(AUParameterAddress address, AUValue value) {
switch (address) {
case centerFrequencyAddress:
centerFrequencyRamper.setUIValue(clamp(value, (float)12.0, (float)20000.0));
break;
case gainAddress:
gainRamper.setUIValue(clamp(value, (float)0.0, (float)10.0));
break;
case qAddress:
qRamper.setUIValue(clamp(value, (float)0.0, (float)2.0));
break;
}
}
AUValue getParameter(AUParameterAddress address) {
switch (address) {
case centerFrequencyAddress:
return centerFrequencyRamper.getUIValue();
case gainAddress:
return gainRamper.getUIValue();
case qAddress:
return qRamper.getUIValue();
default: return 0.0f;
}
}
void startRamp(AUParameterAddress address, AUValue value, AUAudioFrameCount duration) override {
switch (address) {
case centerFrequencyAddress:
centerFrequencyRamper.startRamp(clamp(value, (float)12.0, (float)20000.0), duration);
break;
case gainAddress:
gainRamper.startRamp(clamp(value, (float)0.0, (float)10.0), duration);
break;
case qAddress:
qRamper.startRamp(clamp(value, (float)0.0, (float)2.0), duration);
break;
}
}
void setBuffers(AudioBufferList *inBufferList, AudioBufferList *outBufferList) {
inBufferListPtr = inBufferList;
outBufferListPtr = outBufferList;
}
void process(AUAudioFrameCount frameCount, AUAudioFrameCount bufferOffset) override {
for (int frameIndex = 0; frameIndex < frameCount; ++frameIndex) {
int frameOffset = int(frameIndex + bufferOffset);
centerFrequency = centerFrequencyRamper.getAndStep();
pareq->fc = (float)centerFrequency;
gain = gainRamper.getAndStep();
pareq->v = (float)gain;
q = qRamper.getAndStep();
pareq->q = (float)q;
for (int channel = 0; channel < channels; ++channel) {
float *in = (float *)inBufferListPtr->mBuffers[channel].mData + frameOffset;
float *out = (float *)outBufferListPtr->mBuffers[channel].mData + frameOffset;
if (started) {
sp_pareq_compute(sp, pareq, in, out);
} else {
*out = *in;
}
}
}
}
// MARK: Member Variables
private:
int channels = AKSettings.numberOfChannels;
float sampleRate = AKSettings.sampleRate;
AudioBufferList *inBufferListPtr = nullptr;
AudioBufferList *outBufferListPtr = nullptr;
sp_data *sp;
sp_pareq *pareq;
float centerFrequency = 1000;
float gain = 1.0;
float q = 0.707;
public:
bool started = true;
bool resetted = false;
ParameterRamper centerFrequencyRamper = 1000;
ParameterRamper gainRamper = 1.0;
ParameterRamper qRamper = 0.707;
};
#endif /* AKPeakingParametricEqualizerFilterDSPKernel_hpp */<|fim▁end|> | sp_destroy(&sp);
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Simple time handling.
//!
//! # Usage
//!
//! This crate is [on crates.io](https://crates.io/crates/time) and can be
//! used by adding `time` to the dependencies in your project's `Cargo.toml`.
//!
//! ```toml
//! [dependencies]
//! time = "0.1"
//! ```
//!
//! And this in your crate root:
//!
//! ```rust
//! extern crate time;
//! ```
//!
//! This crate uses the same syntax for format strings as the [strftime()]
//! (http://man7.org/linux/man-pages/man3/strftime.3.html) function from the C
//! standard library.
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/time/")]
#![allow(trivial_numeric_casts)]
#![cfg_attr(test, deny(warnings))]
#[cfg(unix)] extern crate libc;
#[cfg(windows)] extern crate kernel32;
#[cfg(windows)] extern crate winapi;
#[cfg(feature = "rustc-serialize")] extern crate rustc_serialize;
#[cfg(test)] #[macro_use] extern crate log;
#[cfg(all(windows, test))] extern crate advapi32;
use std::cmp::Ordering;
use std::error::Error;
use std::fmt;
use std::ops::{Add, Sub};
pub use duration::Duration;
use self::ParseError::{InvalidDay, InvalidDayOfMonth, InvalidDayOfWeek,
InvalidDayOfYear, InvalidFormatSpecifier, InvalidHour,
InvalidMinute, InvalidMonth, InvalidSecond, InvalidTime,
InvalidYear, InvalidZoneOffset, InvalidSecondsSinceEpoch,
MissingFormatConverter, UnexpectedCharacter};
pub use parse::strptime;
mod display;
mod duration;
mod parse;
mod sys;
static NSEC_PER_SEC: i32 = 1_000_000_000;
/// A record specifying a time value in seconds and nanoseconds, where
/// nanoseconds represent the offset from the given second.
///
/// For example a timespec of 1.2 seconds after the beginning of the epoch would
/// be represented as {sec: 1, nsec: 200000000}.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
#[cfg_attr(feature = "rustc-serialize", derive(RustcEncodable, RustcDecodable))]
pub struct Timespec { pub sec: i64, pub nsec: i32 }
/*
* Timespec assumes that pre-epoch Timespecs have negative sec and positive
* nsec fields. Darwin's and Linux's struct timespec functions handle pre-
* epoch timestamps using a "two steps back, one step forward" representation,
* though the man pages do not actually document this. For example, the time
* -1.2 seconds before the epoch is represented by `Timespec { sec: -2_i64,
* nsec: 800_000_000 }`.
*/
impl Timespec {
pub fn new(sec: i64, nsec: i32) -> Timespec {
assert!(nsec >= 0 && nsec < NSEC_PER_SEC);
Timespec { sec: sec, nsec: nsec }
}
}
impl Add<Duration> for Timespec {
type Output = Timespec;
fn add(self, other: Duration) -> Timespec {
let d_sec = other.num_seconds();
// It is safe to unwrap the nanoseconds, because there cannot be
// more than one second left, which fits in i64 and in i32.
let d_nsec = (other - Duration::seconds(d_sec))
.num_nanoseconds().unwrap() as i32;
let mut sec = self.sec + d_sec;
let mut nsec = self.nsec + d_nsec;
if nsec >= NSEC_PER_SEC {
nsec -= NSEC_PER_SEC;
sec += 1;
} else if nsec < 0 {
nsec += NSEC_PER_SEC;
sec -= 1;
}
Timespec::new(sec, nsec)
}
}
impl Sub<Duration> for Timespec {
type Output = Timespec;
fn sub(self, other: Duration) -> Timespec {
let d_sec = other.num_seconds();
// It is safe to unwrap the nanoseconds, because there cannot be
// more than one second left, which fits in i64 and in i32.
let d_nsec = (other - Duration::seconds(d_sec))
.num_nanoseconds().unwrap() as i32;
let mut sec = self.sec - d_sec;
let mut nsec = self.nsec - d_nsec;
if nsec >= NSEC_PER_SEC {
nsec -= NSEC_PER_SEC;
sec += 1;
} else if nsec < 0 {
nsec += NSEC_PER_SEC;
sec -= 1;
}
Timespec::new(sec, nsec)
}
}
impl Sub<Timespec> for Timespec {
type Output = Duration;
fn sub(self, other: Timespec) -> Duration {
let sec = self.sec - other.sec;
let nsec = self.nsec - other.nsec;
Duration::seconds(sec) + Duration::nanoseconds(nsec as i64)
}
}
/**
* Returns the current time as a `timespec` containing the seconds and
* nanoseconds since 1970-01-01T00:00:00Z.
*/
pub fn get_time() -> Timespec {
let (sec, nsec) = sys::get_time();
Timespec::new(sec, nsec)
}
/**
* Returns the current value of a high-resolution performance counter
* in nanoseconds since an unspecified epoch.
*/
pub fn precise_time_ns() -> u64 {
sys::get_precise_ns()
}
/**
* Returns the current value of a high-resolution performance counter
* in seconds since an unspecified epoch.
*/
pub fn precise_time_s() -> f64 {
return (precise_time_ns() as f64) / 1000000000.;
}
/// An opaque structure representing a moment in time.
///
/// The only operation that can be performed on a `PreciseTime` is the
/// calculation of the `Duration` of time that lies between them.
///
/// # Examples
///
/// Repeatedly call a function for 1 second:
///
/// ```rust
/// use time::{Duration, PreciseTime};
/// # fn do_some_work() {}
///
/// let start = PreciseTime::now();
///
/// while start.to(PreciseTime::now()) < Duration::seconds(1) {
/// do_some_work();
/// }
/// ```
#[derive(Copy, Clone)]
pub struct PreciseTime(u64);
impl PreciseTime {
/// Returns a `PreciseTime` representing the current moment in time.
pub fn now() -> PreciseTime {
PreciseTime(precise_time_ns())
}
/// Returns a `Duration` representing the span of time from the value of
/// `self` to the value of `later`.
///
/// # Notes
///
/// If `later` represents a time before `self`, the result of this method
/// is unspecified.
///
/// If `later` represents a time more than 293 years after `self`, the
/// result of this method is unspecified.
#[inline]
pub fn to(&self, later: PreciseTime) -> Duration {
// NB: even if later is less than self due to overflow, this will work
// since the subtraction will underflow properly as well.
//
// We could deal with the overflow when casting to an i64, but all that
// gets us is the ability to handle intervals of up to 584 years, which
// seems not very useful :)
Duration::nanoseconds((later.0 - self.0) as i64)
}
}
/// A structure representing a moment in time.
///
/// `SteadyTime`s are generated by a "steady" clock, that is, a clock which
/// never experiences discontinuous jumps and for which time always flows at
/// the same rate.
///
/// # Examples
///
/// Repeatedly call a function for 1 second:
///
/// ```rust
/// # use time::{Duration, SteadyTime};
/// # fn do_some_work() {}
/// let start = SteadyTime::now();
///
/// while SteadyTime::now() - start < Duration::seconds(1) {
/// do_some_work();
/// }
/// ```
#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Debug)]
pub struct SteadyTime(sys::SteadyTime);
impl SteadyTime {
/// Returns a `SteadyTime` representing the current moment in time.
pub fn now() -> SteadyTime {
SteadyTime(sys::SteadyTime::now())
}
}
impl fmt::Display for SteadyTime {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
// TODO: needs a display customization
fmt::Debug::fmt(self, fmt)
}
}
impl Sub for SteadyTime {
type Output = Duration;
fn sub(self, other: SteadyTime) -> Duration {
self.0 - other.0
}
}
impl Sub<Duration> for SteadyTime {
type Output = SteadyTime;
fn sub(self, other: Duration) -> SteadyTime {
SteadyTime(self.0 - other)
}
}
impl Add<Duration> for SteadyTime {
type Output = SteadyTime;
fn add(self, other: Duration) -> SteadyTime {
SteadyTime(self.0 + other)
}
}
#[cfg(not(windows))]
pub fn tzset() {
extern { fn tzset(); }
unsafe { tzset() }
}
#[cfg(windows)]
pub fn tzset() {}
/// Holds a calendar date and time broken down into its components (year, month,
/// day, and so on), also called a broken-down time value.
// FIXME: use c_int instead of i32?
#[repr(C)]
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
pub struct Tm {
/// Seconds after the minute - [0, 60]
pub tm_sec: i32,
/// Minutes after the hour - [0, 59]
pub tm_min: i32,
/// Hours after midnight - [0, 23]
pub tm_hour: i32,
/// Day of the month - [1, 31]
pub tm_mday: i32,
/// Months since January - [0, 11]
pub tm_mon: i32,
/// Years since 1900
pub tm_year: i32,
/// Days since Sunday - [0, 6]. 0 = Sunday, 1 = Monday, ..., 6 = Saturday.
pub tm_wday: i32,
/// Days since January 1 - [0, 365]
pub tm_yday: i32,
/// Daylight Saving Time flag.
///
/// This value is positive if Daylight Saving Time is in effect, zero if
/// Daylight Saving Time is not in effect, and negative if this information
/// is not available.
pub tm_isdst: i32,
/// Identifies the time zone that was used to compute this broken-down time
/// value, including any adjustment for Daylight Saving Time. This is the
/// number of seconds east of UTC. For example, for U.S. Pacific Daylight
/// Time, the value is -7*60*60 = -25200.
pub tm_utcoff: i32,
/// Nanoseconds after the second - [0, 10<sup>9</sup> - 1]
pub tm_nsec: i32,
}
impl Add<Duration> for Tm {
type Output = Tm;
/// The resulting Tm is in UTC.
// FIXME: The resulting Tm should have the same timezone as `self`;
// however, we need a function such as `at_tm(clock: Timespec, offset: i32)`
// for this.
fn add(self, other: Duration) -> Tm {
at_utc(self.to_timespec() + other)
}
}
impl Sub<Duration> for Tm {
type Output = Tm;
/// The resulting Tm is in UTC.
// FIXME: The resulting Tm should have the same timezone as `self`;
// however, we need a function such as `at_tm(clock: Timespec, offset: i32)`
// for this.
fn sub(self, other: Duration) -> Tm {
at_utc(self.to_timespec() - other)
}
}
impl Sub<Tm> for Tm {
type Output = Duration;
fn sub(self, other: Tm) -> Duration {
self.to_timespec() - other.to_timespec()
}
}
impl PartialOrd for Tm {
fn partial_cmp(&self, other: &Tm) -> Option<Ordering> {
self.to_timespec().partial_cmp(&other.to_timespec())
}
}
impl Ord for Tm {
fn cmp(&self, other: &Tm) -> Ordering {
self.to_timespec().cmp(&other.to_timespec())
}
}
pub fn empty_tm() -> Tm {
Tm {
tm_sec: 0,
tm_min: 0,
tm_hour: 0,
tm_mday: 0,
tm_mon: 0,
tm_year: 0,
tm_wday: 0,
tm_yday: 0,
tm_isdst: 0,
tm_utcoff: 0,
tm_nsec: 0,
}
}<|fim▁hole|> let mut tm = empty_tm();
sys::time_to_utc_tm(sec, &mut tm);
tm.tm_nsec = nsec;
tm
}
/// Returns the current time in UTC
pub fn now_utc() -> Tm {
at_utc(get_time())
}
/// Returns the specified time in the local timezone
pub fn at(clock: Timespec) -> Tm {
let Timespec { sec, nsec } = clock;
let mut tm = empty_tm();
sys::time_to_local_tm(sec, &mut tm);
tm.tm_nsec = nsec;
tm
}
/// Returns the current time in the local timezone
pub fn now() -> Tm {
at(get_time())
}
impl Tm {
/// Convert time to the seconds from January 1, 1970
pub fn to_timespec(&self) -> Timespec {
let sec = match self.tm_utcoff {
0 => sys::utc_tm_to_time(self),
_ => sys::local_tm_to_time(self)
};
Timespec::new(sec, self.tm_nsec)
}
/// Convert time to the local timezone
pub fn to_local(&self) -> Tm {
at(self.to_timespec())
}
/// Convert time to the UTC
pub fn to_utc(&self) -> Tm {
match self.tm_utcoff {
0 => *self,
_ => at_utc(self.to_timespec())
}
}
/**
* Returns a TmFmt that outputs according to the `asctime` format in ISO
* C, in the local timezone.
*
* Example: "Thu Jan 1 00:00:00 1970"
*/
pub fn ctime(&self) -> TmFmt {
TmFmt {
tm: self,
format: Fmt::Ctime,
}
}
/**
* Returns a TmFmt that outputs according to the `asctime` format in ISO
* C.
*
* Example: "Thu Jan 1 00:00:00 1970"
*/
pub fn asctime(&self) -> TmFmt {
TmFmt {
tm: self,
format: Fmt::Str("%c"),
}
}
/// Formats the time according to the format string.
pub fn strftime<'a>(&'a self, format: &'a str) -> Result<TmFmt<'a>, ParseError> {
validate_format(TmFmt {
tm: self,
format: Fmt::Str(format),
})
}
/**
* Returns a TmFmt that outputs according to RFC 822.
*
* local: "Thu, 22 Mar 2012 07:53:18 PST"
* utc: "Thu, 22 Mar 2012 14:53:18 GMT"
*/
pub fn rfc822(&self) -> TmFmt {
let fmt = if self.tm_utcoff == 0 {
"%a, %d %b %Y %T GMT"
} else {
"%a, %d %b %Y %T %Z"
};
TmFmt {
tm: self,
format: Fmt::Str(fmt),
}
}
/**
* Returns a TmFmt that outputs according to RFC 822 with Zulu time.
*
* local: "Thu, 22 Mar 2012 07:53:18 -0700"
* utc: "Thu, 22 Mar 2012 14:53:18 -0000"
*/
pub fn rfc822z(&self) -> TmFmt {
TmFmt {
tm: self,
format: Fmt::Str("%a, %d %b %Y %T %z"),
}
}
/**
* Returns a TmFmt that outputs according to RFC 3339. RFC 3339 is
* compatible with ISO 8601.
*
* local: "2012-02-22T07:53:18-07:00"
* utc: "2012-02-22T14:53:18Z"
*/
pub fn rfc3339<'a>(&'a self) -> TmFmt {
TmFmt {
tm: self,
format: Fmt::Rfc3339,
}
}
}
#[derive(Copy, PartialEq, Debug, Clone)]
pub enum ParseError {
InvalidSecond,
InvalidMinute,
InvalidHour,
InvalidDay,
InvalidMonth,
InvalidYear,
InvalidDayOfWeek,
InvalidDayOfMonth,
InvalidDayOfYear,
InvalidZoneOffset,
InvalidTime,
InvalidSecondsSinceEpoch,
MissingFormatConverter,
InvalidFormatSpecifier(char),
UnexpectedCharacter(char, char),
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidFormatSpecifier(ch) => {
write!(f, "{}: %{}", self.description(), ch)
}
UnexpectedCharacter(a, b) => {
write!(f, "expected: `{}`, found: `{}`", a, b)
}
_ => write!(f, "{}", self.description())
}
}
}
impl Error for ParseError {
fn description(&self) -> &str {
match *self {
InvalidSecond => "Invalid second.",
InvalidMinute => "Invalid minute.",
InvalidHour => "Invalid hour.",
InvalidDay => "Invalid day.",
InvalidMonth => "Invalid month.",
InvalidYear => "Invalid year.",
InvalidDayOfWeek => "Invalid day of the week.",
InvalidDayOfMonth => "Invalid day of the month.",
InvalidDayOfYear => "Invalid day of the year.",
InvalidZoneOffset => "Invalid zone offset.",
InvalidTime => "Invalid time.",
InvalidSecondsSinceEpoch => "Invalid seconds since epoch.",
MissingFormatConverter => "missing format converter after `%`",
InvalidFormatSpecifier(..) => "invalid format specifier",
UnexpectedCharacter(..) => "Unexpected character.",
}
}
}
/// A wrapper around a `Tm` and format string that implements Display.
#[derive(Debug)]
pub struct TmFmt<'a> {
tm: &'a Tm,
format: Fmt<'a>
}
#[derive(Debug)]
enum Fmt<'a> {
Str(&'a str),
Rfc3339,
Ctime,
}
fn validate_format<'a>(fmt: TmFmt<'a>) -> Result<TmFmt<'a>, ParseError> {
match (fmt.tm.tm_wday, fmt.tm.tm_mon) {
(0...6, 0...11) => (),
(_wday, 0...11) => return Err(InvalidDayOfWeek),
(0...6, _mon) => return Err(InvalidMonth),
_ => return Err(InvalidDay)
}
match fmt.format {
Fmt::Str(ref s) => {
let mut chars = s.chars();
loop {
match chars.next() {
Some('%') => {
match chars.next() {
Some('A') | Some('a') | Some('B') | Some('b') |
Some('C') | Some('c') | Some('D') | Some('d') |
Some('e') | Some('F') | Some('f') | Some('G') |
Some('g') | Some('H') | Some('h') | Some('I') |
Some('j') | Some('k') | Some('l') | Some('M') |
Some('m') | Some('n') | Some('P') | Some('p') |
Some('R') | Some('r') | Some('S') | Some('s') |
Some('T') | Some('t') | Some('U') | Some('u') |
Some('V') | Some('v') | Some('W') | Some('w') |
Some('X') | Some('x') | Some('Y') | Some('y') |
Some('Z') | Some('z') | Some('+') | Some('%') => (),
Some(c) => return Err(InvalidFormatSpecifier(c)),
None => return Err(MissingFormatConverter),
}
},
None => break,
_ => ()
}
}
},
_ => ()
}
Ok(fmt)
}
/// Formats the time according to the format string.
pub fn strftime(format: &str, tm: &Tm) -> Result<String, ParseError> {
tm.strftime(format).map(|fmt| fmt.to_string())
}
#[cfg(test)]
mod tests {
use super::{Timespec, get_time, precise_time_ns, precise_time_s,
at_utc, at, strptime, PreciseTime, SteadyTime, ParseError, Duration};
use super::ParseError::{InvalidTime, InvalidYear, MissingFormatConverter,
InvalidFormatSpecifier};
use std::sync::{Once, ONCE_INIT, Mutex, MutexGuard, LockResult};
use std::mem;
struct TzReset {
_tzreset: ::sys::TzReset,
_lock: LockResult<MutexGuard<'static, ()>>,
}
fn set_time_zone() -> TzReset {
static mut LOCK: *mut Mutex<()> = 0 as *mut _;
static INIT: Once = ONCE_INIT;
unsafe {
INIT.call_once(|| {
LOCK = mem::transmute(Box::new(Mutex::new(())));
});
TzReset {
_lock: (*LOCK).lock(),
_tzreset: ::sys::set_los_angeles_time_zone(),
}
}
}
#[test]
fn test_get_time() {
static SOME_RECENT_DATE: i64 = 1325376000i64; // 2012-01-01T00:00:00Z
static SOME_FUTURE_DATE: i64 = 1577836800i64; // 2020-01-01T00:00:00Z
let tv1 = get_time();
debug!("tv1={} sec + {} nsec", tv1.sec, tv1.nsec);
assert!(tv1.sec > SOME_RECENT_DATE);
assert!(tv1.nsec < 1000000000i32);
let tv2 = get_time();
debug!("tv2={} sec + {} nsec", tv2.sec, tv2.nsec);
assert!(tv2.sec >= tv1.sec);
assert!(tv2.sec < SOME_FUTURE_DATE);
assert!(tv2.nsec < 1000000000i32);
if tv2.sec == tv1.sec {
assert!(tv2.nsec >= tv1.nsec);
}
}
#[test]
fn test_precise_time() {
let s0 = precise_time_s();
debug!("s0={} sec", s0);
assert!(s0 > 0.);
let ns0 = precise_time_ns();
let ns1 = precise_time_ns();
debug!("ns0={} ns", ns0);
debug!("ns1={} ns", ns1);
assert!(ns1 >= ns0);
let ns2 = precise_time_ns();
debug!("ns2={} ns", ns2);
assert!(ns2 >= ns1);
}
#[test]
fn test_precise_time_to() {
let t0 = PreciseTime(1000);
let t1 = PreciseTime(1023);
assert_eq!(Duration::nanoseconds(23), t0.to(t1));
}
#[test]
fn test_at_utc() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
assert_eq!(utc.tm_sec, 30);
assert_eq!(utc.tm_min, 31);
assert_eq!(utc.tm_hour, 23);
assert_eq!(utc.tm_mday, 13);
assert_eq!(utc.tm_mon, 1);
assert_eq!(utc.tm_year, 109);
assert_eq!(utc.tm_wday, 5);
assert_eq!(utc.tm_yday, 43);
assert_eq!(utc.tm_isdst, 0);
assert_eq!(utc.tm_utcoff, 0);
assert_eq!(utc.tm_nsec, 54321);
}
#[test]
fn test_at() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let local = at(time);
debug!("time_at: {:?}", local);
assert_eq!(local.tm_sec, 30);
assert_eq!(local.tm_min, 31);
assert_eq!(local.tm_hour, 15);
assert_eq!(local.tm_mday, 13);
assert_eq!(local.tm_mon, 1);
assert_eq!(local.tm_year, 109);
assert_eq!(local.tm_wday, 5);
assert_eq!(local.tm_yday, 43);
assert_eq!(local.tm_isdst, 0);
assert_eq!(local.tm_utcoff, -28800);
assert_eq!(local.tm_nsec, 54321);
}
#[test]
fn test_to_timespec() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
assert_eq!(utc.to_timespec(), time);
assert_eq!(utc.to_local().to_timespec(), time);
}
#[test]
fn test_conversions() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
let local = at(time);
assert!(local.to_local() == local);
assert!(local.to_utc() == utc);
assert!(local.to_utc().to_local() == local);
assert!(utc.to_utc() == utc);
assert!(utc.to_local() == local);
assert!(utc.to_local().to_utc() == utc);
}
#[test]
fn test_strptime() {
let _reset = set_time_zone();
match strptime("", "") {
Ok(ref tm) => {
assert!(tm.tm_sec == 0);
assert!(tm.tm_min == 0);
assert!(tm.tm_hour == 0);
assert!(tm.tm_mday == 0);
assert!(tm.tm_mon == 0);
assert!(tm.tm_year == 0);
assert!(tm.tm_wday == 0);
assert!(tm.tm_isdst == 0);
assert!(tm.tm_utcoff == 0);
assert!(tm.tm_nsec == 0);
}
Err(_) => ()
}
let format = "%a %b %e %T.%f %Y";
assert_eq!(strptime("", format), Err(ParseError::InvalidDay));
assert_eq!(strptime("Fri Feb 13 15:31:30", format),
Err(InvalidTime));
match strptime("Fri Feb 13 15:31:30.01234 2009", format) {
Err(e) => panic!("{}", e),
Ok(ref tm) => {
assert_eq!(tm.tm_sec, 30);
assert_eq!(tm.tm_min, 31);
assert_eq!(tm.tm_hour, 15);
assert_eq!(tm.tm_mday, 13);
assert_eq!(tm.tm_mon, 1);
assert_eq!(tm.tm_year, 109);
assert_eq!(tm.tm_wday, 5);
assert_eq!(tm.tm_yday, 0);
assert_eq!(tm.tm_isdst, 0);
assert_eq!(tm.tm_utcoff, 0);
assert_eq!(tm.tm_nsec, 12340000);
}
}
fn test(s: &str, format: &str) -> bool {
match strptime(s, format) {
Ok(tm) => {
tm.strftime(format).unwrap().to_string() == s.to_string()
},
Err(e) => panic!("{:?}, s={:?}, format={:?}", e, s, format)
}
}
fn test_oneway(s : &str, format : &str) -> bool {
match strptime(s, format) {
Ok(_) => {
// oneway tests are used when reformatting the parsed Tm
// back into a string can generate a different string
// from the original (i.e. leading zeroes)
true
},
Err(e) => panic!("{:?}, s={:?}, format={:?}", e, s, format)
}
}
let days = [
"Sunday".to_string(),
"Monday".to_string(),
"Tuesday".to_string(),
"Wednesday".to_string(),
"Thursday".to_string(),
"Friday".to_string(),
"Saturday".to_string()
];
for day in days.iter() {
assert!(test(&day, "%A"));
}
let days = [
"Sun".to_string(),
"Mon".to_string(),
"Tue".to_string(),
"Wed".to_string(),
"Thu".to_string(),
"Fri".to_string(),
"Sat".to_string()
];
for day in days.iter() {
assert!(test(&day, "%a"));
}
let months = [
"January".to_string(),
"February".to_string(),
"March".to_string(),
"April".to_string(),
"May".to_string(),
"June".to_string(),
"July".to_string(),
"August".to_string(),
"September".to_string(),
"October".to_string(),
"November".to_string(),
"December".to_string()
];
for day in months.iter() {
assert!(test(&day, "%B"));
}
let months = [
"Jan".to_string(),
"Feb".to_string(),
"Mar".to_string(),
"Apr".to_string(),
"May".to_string(),
"Jun".to_string(),
"Jul".to_string(),
"Aug".to_string(),
"Sep".to_string(),
"Oct".to_string(),
"Nov".to_string(),
"Dec".to_string()
];
for day in months.iter() {
assert!(test(&day, "%b"));
}
assert!(test("19", "%C"));
assert!(test("Fri Feb 3 23:31:30 2009", "%c"));
assert!(test("Fri Feb 13 23:31:30 2009", "%c"));
assert!(test("02/13/09", "%D"));
assert!(test("03", "%d"));
assert!(test("13", "%d"));
assert!(test(" 3", "%e"));
assert!(test("13", "%e"));
assert!(test("2009-02-13", "%F"));
assert!(test("03", "%H"));
assert!(test("13", "%H"));
assert!(test("03", "%I")); // FIXME (#2350): flesh out
assert!(test("11", "%I")); // FIXME (#2350): flesh out
assert!(test("044", "%j"));
assert!(test(" 3", "%k"));
assert!(test("13", "%k"));
assert!(test(" 1", "%l"));
assert!(test("11", "%l"));
assert!(test("03", "%M"));
assert!(test("13", "%M"));
assert!(test("\n", "%n"));
assert!(test("am", "%P"));
assert!(test("pm", "%P"));
assert!(test("AM", "%p"));
assert!(test("PM", "%p"));
assert!(test("23:31", "%R"));
assert!(test("11:31:30 AM", "%r"));
assert!(test("11:31:30 PM", "%r"));
assert!(test("03", "%S"));
assert!(test("13", "%S"));
assert!(test("15:31:30", "%T"));
assert!(test("\t", "%t"));
assert!(test("1", "%u"));
assert!(test("7", "%u"));
assert!(test("13-Feb-2009", "%v"));
assert!(test("0", "%w"));
assert!(test("6", "%w"));
assert!(test("2009", "%Y"));
assert!(test("09", "%y"));
assert!(test_oneway("3", "%d"));
assert!(test_oneway("3", "%H"));
assert!(test_oneway("3", "%e"));
assert!(test_oneway("3", "%M"));
assert!(test_oneway("3", "%S"));
assert!(strptime("-0000", "%z").unwrap().tm_utcoff == 0);
assert!(strptime("-00:00", "%z").unwrap().tm_utcoff == 0);
assert!(strptime("Z", "%z").unwrap().tm_utcoff == 0);
assert_eq!(-28800, strptime("-0800", "%z").unwrap().tm_utcoff);
assert_eq!(-28800, strptime("-08:00", "%z").unwrap().tm_utcoff);
assert_eq!(28800, strptime("+0800", "%z").unwrap().tm_utcoff);
assert_eq!(28800, strptime("+08:00", "%z").unwrap().tm_utcoff);
assert_eq!(5400, strptime("+0130", "%z").unwrap().tm_utcoff);
assert_eq!(5400, strptime("+01:30", "%z").unwrap().tm_utcoff);
assert!(test("%", "%%"));
// Test for #7256
assert_eq!(strptime("360", "%Y-%m-%d"), Err(InvalidYear));
// Test for epoch seconds parsing
{
assert!(test("1428035610", "%s"));
let tm = strptime("1428035610", "%s").unwrap();
assert_eq!(tm.tm_utcoff, 0);
assert_eq!(tm.tm_isdst, 0);
assert_eq!(tm.tm_yday, 92);
assert_eq!(tm.tm_wday, 5);
assert_eq!(tm.tm_year, 115);
assert_eq!(tm.tm_mon, 3);
assert_eq!(tm.tm_mday, 3);
assert_eq!(tm.tm_hour, 4);
}
}
#[test]
fn test_asctime() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
let local = at(time);
debug!("test_ctime: {} {}", utc.asctime(), local.asctime());
assert_eq!(utc.asctime().to_string(), "Fri Feb 13 23:31:30 2009".to_string());
assert_eq!(local.asctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
}
#[test]
fn test_ctime() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
let local = at(time);
debug!("test_ctime: {} {}", utc.ctime(), local.ctime());
assert_eq!(utc.ctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
assert_eq!(local.ctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
}
#[test]
fn test_strftime() {
let _reset = set_time_zone();
let time = Timespec::new(1234567890, 54321);
let utc = at_utc(time);
let local = at(time);
assert_eq!(local.strftime("").unwrap().to_string(), "".to_string());
assert_eq!(local.strftime("%A").unwrap().to_string(), "Friday".to_string());
assert_eq!(local.strftime("%a").unwrap().to_string(), "Fri".to_string());
assert_eq!(local.strftime("%B").unwrap().to_string(), "February".to_string());
assert_eq!(local.strftime("%b").unwrap().to_string(), "Feb".to_string());
assert_eq!(local.strftime("%C").unwrap().to_string(), "20".to_string());
assert_eq!(local.strftime("%c").unwrap().to_string(),
"Fri Feb 13 15:31:30 2009".to_string());
assert_eq!(local.strftime("%D").unwrap().to_string(), "02/13/09".to_string());
assert_eq!(local.strftime("%d").unwrap().to_string(), "13".to_string());
assert_eq!(local.strftime("%e").unwrap().to_string(), "13".to_string());
assert_eq!(local.strftime("%F").unwrap().to_string(), "2009-02-13".to_string());
assert_eq!(local.strftime("%f").unwrap().to_string(), "000054321".to_string());
assert_eq!(local.strftime("%G").unwrap().to_string(), "2009".to_string());
assert_eq!(local.strftime("%g").unwrap().to_string(), "09".to_string());
assert_eq!(local.strftime("%H").unwrap().to_string(), "15".to_string());
assert_eq!(local.strftime("%h").unwrap().to_string(), "Feb".to_string());
assert_eq!(local.strftime("%I").unwrap().to_string(), "03".to_string());
assert_eq!(local.strftime("%j").unwrap().to_string(), "044".to_string());
assert_eq!(local.strftime("%k").unwrap().to_string(), "15".to_string());
assert_eq!(local.strftime("%l").unwrap().to_string(), " 3".to_string());
assert_eq!(local.strftime("%M").unwrap().to_string(), "31".to_string());
assert_eq!(local.strftime("%m").unwrap().to_string(), "02".to_string());
assert_eq!(local.strftime("%n").unwrap().to_string(), "\n".to_string());
assert_eq!(local.strftime("%P").unwrap().to_string(), "pm".to_string());
assert_eq!(local.strftime("%p").unwrap().to_string(), "PM".to_string());
assert_eq!(local.strftime("%R").unwrap().to_string(), "15:31".to_string());
assert_eq!(local.strftime("%r").unwrap().to_string(), "03:31:30 PM".to_string());
assert_eq!(local.strftime("%S").unwrap().to_string(), "30".to_string());
assert_eq!(local.strftime("%s").unwrap().to_string(), "1234567890".to_string());
assert_eq!(local.strftime("%T").unwrap().to_string(), "15:31:30".to_string());
assert_eq!(local.strftime("%t").unwrap().to_string(), "\t".to_string());
assert_eq!(local.strftime("%U").unwrap().to_string(), "06".to_string());
assert_eq!(local.strftime("%u").unwrap().to_string(), "5".to_string());
assert_eq!(local.strftime("%V").unwrap().to_string(), "07".to_string());
assert_eq!(local.strftime("%v").unwrap().to_string(), "13-Feb-2009".to_string());
assert_eq!(local.strftime("%W").unwrap().to_string(), "06".to_string());
assert_eq!(local.strftime("%w").unwrap().to_string(), "5".to_string());
// FIXME (#2350): support locale
assert_eq!(local.strftime("%X").unwrap().to_string(), "15:31:30".to_string());
// FIXME (#2350): support locale
assert_eq!(local.strftime("%x").unwrap().to_string(), "02/13/09".to_string());
assert_eq!(local.strftime("%Y").unwrap().to_string(), "2009".to_string());
assert_eq!(local.strftime("%y").unwrap().to_string(), "09".to_string());
// FIXME (#2350): support locale
assert_eq!(local.strftime("%Z").unwrap().to_string(), "".to_string());
assert_eq!(local.strftime("%z").unwrap().to_string(), "-0800".to_string());
assert_eq!(local.strftime("%+").unwrap().to_string(),
"2009-02-13T15:31:30-08:00".to_string());
assert_eq!(local.strftime("%%").unwrap().to_string(), "%".to_string());
let invalid_specifiers = ["%E", "%J", "%K", "%L", "%N", "%O", "%o", "%Q", "%q"];
for &sp in invalid_specifiers.iter() {
assert_eq!(local.strftime(sp).unwrap_err(),
InvalidFormatSpecifier(sp[1..].chars().next().unwrap()));
}
assert_eq!(local.strftime("%").unwrap_err(), MissingFormatConverter);
assert_eq!(local.strftime("%A %").unwrap_err(), MissingFormatConverter);
assert_eq!(local.asctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
assert_eq!(local.ctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
assert_eq!(local.rfc822z().to_string(), "Fri, 13 Feb 2009 15:31:30 -0800".to_string());
assert_eq!(local.rfc3339().to_string(), "2009-02-13T15:31:30-08:00".to_string());
assert_eq!(utc.asctime().to_string(), "Fri Feb 13 23:31:30 2009".to_string());
assert_eq!(utc.ctime().to_string(), "Fri Feb 13 15:31:30 2009".to_string());
assert_eq!(utc.rfc822().to_string(), "Fri, 13 Feb 2009 23:31:30 GMT".to_string());
assert_eq!(utc.rfc822z().to_string(), "Fri, 13 Feb 2009 23:31:30 -0000".to_string());
assert_eq!(utc.rfc3339().to_string(), "2009-02-13T23:31:30Z".to_string());
}
#[test]
fn test_timespec_eq_ord() {
let a = &Timespec::new(-2, 1);
let b = &Timespec::new(-1, 2);
let c = &Timespec::new(1, 2);
let d = &Timespec::new(2, 1);
let e = &Timespec::new(2, 1);
assert!(d.eq(e));
assert!(c.ne(e));
assert!(a.lt(b));
assert!(b.lt(c));
assert!(c.lt(d));
assert!(a.le(b));
assert!(b.le(c));
assert!(c.le(d));
assert!(d.le(e));
assert!(e.le(d));
assert!(b.ge(a));
assert!(c.ge(b));
assert!(d.ge(c));
assert!(e.ge(d));
assert!(d.ge(e));
assert!(b.gt(a));
assert!(c.gt(b));
assert!(d.gt(c));
}
#[test]
fn test_timespec_hash() {
use std::hash::{Hash, Hasher};
let c = &Timespec::new(3, 2);
let d = &Timespec::new(2, 1);
let e = &Timespec::new(2, 1);
let mut hasher = ::std::hash::SipHasher::new();
let d_hash:u64 = {
d.hash(&mut hasher);
hasher.finish()
};
hasher = ::std::hash::SipHasher::new();
let e_hash:u64 = {
e.hash(&mut hasher);
hasher.finish()
};
hasher = ::std::hash::SipHasher::new();
let c_hash:u64 = {
c.hash(&mut hasher);
hasher.finish()
};
assert_eq!(d_hash, e_hash);
assert!(c_hash != e_hash);
}
#[test]
fn test_timespec_add() {
let a = Timespec::new(1, 2);
let b = Duration::seconds(2) + Duration::nanoseconds(3);
let c = a + b;
assert_eq!(c.sec, 3);
assert_eq!(c.nsec, 5);
let p = Timespec::new(1, super::NSEC_PER_SEC - 2);
let q = Duration::seconds(2) + Duration::nanoseconds(2);
let r = p + q;
assert_eq!(r.sec, 4);
assert_eq!(r.nsec, 0);
let u = Timespec::new(1, super::NSEC_PER_SEC - 2);
let v = Duration::seconds(2) + Duration::nanoseconds(3);
let w = u + v;
assert_eq!(w.sec, 4);
assert_eq!(w.nsec, 1);
let k = Timespec::new(1, 0);
let l = Duration::nanoseconds(-1);
let m = k + l;
assert_eq!(m.sec, 0);
assert_eq!(m.nsec, 999_999_999);
}
#[test]
fn test_timespec_sub() {
let a = Timespec::new(2, 3);
let b = Timespec::new(1, 2);
let c = a - b;
assert_eq!(c.num_nanoseconds(), Some(super::NSEC_PER_SEC as i64 + 1));
let p = Timespec::new(2, 0);
let q = Timespec::new(1, 2);
let r = p - q;
assert_eq!(r.num_nanoseconds(), Some(super::NSEC_PER_SEC as i64 - 2));
let u = Timespec::new(1, 2);
let v = Timespec::new(2, 3);
let w = u - v;
assert_eq!(w.num_nanoseconds(), Some(-super::NSEC_PER_SEC as i64 - 1));
}
#[test]
fn test_time_sub() {
let a = ::now();
let b = at(a.to_timespec() + Duration::seconds(5));
let c = b - a;
assert_eq!(c.num_nanoseconds(), Some(super::NSEC_PER_SEC as i64 * 5));
}
#[test]
fn test_steadytime_sub() {
let a = SteadyTime::now();
let b = a + Duration::seconds(1);
assert_eq!(b - a, Duration::seconds(1));
assert_eq!(a - b, Duration::seconds(-1));
}
}<|fim▁end|> |
/// Returns the specified time in UTC
pub fn at_utc(clock: Timespec) -> Tm {
let Timespec { sec, nsec } = clock; |
<|file_name|>agent.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
'''
Created on 24 дек. 20%0
@author: ivan
'''
import random<|fim▁hole|>all_agents = """
Mozilla/5.0 (X11; U; Linux x86_64; en-US; rv:1.9.1.3) Gecko/20090913 Firefox/3.5.3
Mozilla/5.0 (Windows; U; Windows NT 6.1; en; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.1.3) Gecko/20090824 Firefox/3.5.3 (.NET CLR 3.5.30729)
Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.1) Gecko/20090718 Firefox/3.5.1
Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.1 (KHTML, like Gecko) Chrome/4.0.219.6 Safari/532.1
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; InfoPath.2)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; SLCC1; .NET CLR 2.0.50727; .NET CLR 1.1.4322; .NET CLR 3.5.30729; .NET CLR 3.0.30729)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.2; Win64; x64; Trident/4.0)
Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1; Trident/4.0; SV1; .NET CLR 2.0.50727; InfoPath.2)Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)
Mozilla/4.0 (compatible; MSIE 6.1; Windows XP)
"""
def get_ranmom_agent():
agents = None
for i in xrange(10):
agents = all_agents.replace(str(i), str(random.randint(0, 10)))
return agents.splitlines()[random.randint(1, 10)]<|fim▁end|> | |
<|file_name|>OpenSurfaceTypeListener.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | energymodels.OpenSurfaceTypeListener |
<|file_name|>AnnotationParamResourceMetaFactoryImpl.java<|end_file_name|><|fim▁begin|>package io.github.varvelworld.var.ioc.core.annotation.meta.factory;
import io.github.varvelworld.var.ioc.core.annotation.Resource;
import io.github.varvelworld.var.ioc.core.meta.ParamResourceMeta;
import io.github.varvelworld.var.ioc.core.meta.factory.ParamResourceMetaFactory;
import java.lang.reflect.Parameter;
/**
* Created by luzhonghao on 2016/12/4.
*/
public class AnnotationParamResourceMetaFactoryImpl implements ParamResourceMetaFactory {
private final ParamResourceMeta paramResourceMeta;
public AnnotationParamResourceMetaFactoryImpl(Parameter parameter) {
this(parameter, parameter.getAnnotation(Resource.class));
}
public AnnotationParamResourceMetaFactoryImpl(Parameter parameter, Resource annotation) {
String id = annotation.value();
if(id.isEmpty()) {
if(parameter.isNamePresent()){
id = parameter.getName();<|fim▁hole|> else {
throw new RuntimeException("id is empty");
}
}
this.paramResourceMeta = new ParamResourceMeta(id);
}
@Override
public ParamResourceMeta paramResourceMeta() {
return paramResourceMeta;
}
}<|fim▁end|> | } |
<|file_name|>fabric.fontweight.enum.ts<|end_file_name|><|fim▁begin|>import { Enum } from '../utility/enum'
Enum.register(FontWeight, "FontWeight", { jsStringPrefix: 'ms-fontWeight-' });
export enum FontWeight {
unspecified,
light,
semilight,
regular,<|fim▁hole|><|fim▁end|> | semiBold
} |
<|file_name|>testyacc.py<|end_file_name|><|fim▁begin|># testyacc.py
import unittest
try:
import StringIO
except ImportError:
import io as StringIO
import sys
import os
sys.path.insert(0,"..")
sys.tracebacklimit = 0
import ply.yacc
def check_expected(result,expected):
resultlines = []
for line in result.splitlines():
if line.startswith("WARNING: "):
line = line[9:]
elif line.startswith("ERROR: "):
line = line[7:]
resultlines.append(line)
expectedlines = expected.splitlines()
if len(resultlines) != len(expectedlines):
return False
for rline,eline in zip(resultlines,expectedlines):
if not rline.endswith(eline):
return False
return True
def run_import(module):
code = "import "+module
exec(code)
del sys.modules[module]
# Tests related to errors and warnings when building parsers
class YaccErrorWarningTests(unittest.TestCase):
def setUp(self):
sys.stderr = StringIO.StringIO()
sys.stdout = StringIO.StringIO()
try:
os.remove("parsetab.py")
os.remove("parsetab.pyc")
except OSError:
pass
def tearDown(self):
sys.stderr = sys.__stderr__
sys.stdout = sys.__stdout__
def test_yacc_badargs(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badargs")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_badargs.py:23: Rule 'p_statement_assign' has too many arguments\n"
"yacc_badargs.py:27: Rule 'p_statement_expr' requires an argument\n"
))
def test_yacc_badid(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badid")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_badid.py:32: Illegal name 'bad&rule' in rule 'statement'\n"
"yacc_badid.py:36: Illegal rule name 'bad&rule'\n"
))
def test_yacc_badprec(self):
try:
run_import("yacc_badprec")
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"precedence must be a list or tuple\n"
))
def test_yacc_badprec2(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badprec2")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Bad precedence table\n"
))
def test_yacc_badprec3(self):
run_import("yacc_badprec3")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Precedence already specified for terminal 'MINUS'\n"
"Generating LALR tables\n"
))<|fim▁hole|> self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badrule")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_badrule.py:24: Syntax error. Expected ':'\n"
"yacc_badrule.py:28: Syntax error in rule 'statement'\n"
"yacc_badrule.py:33: Syntax error. Expected ':'\n"
"yacc_badrule.py:42: Syntax error. Expected ':'\n"
))
def test_yacc_badtok(self):
try:
run_import("yacc_badtok")
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"tokens must be a list or tuple\n"))
def test_yacc_dup(self):
run_import("yacc_dup")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_dup.py:27: Function p_statement redefined. Previously defined on line 23\n"
"Token 'EQUALS' defined, but not used\n"
"There is 1 unused token\n"
"Generating LALR tables\n"
))
def test_yacc_error1(self):
try:
run_import("yacc_error1")
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_error1.py:61: p_error() requires 1 argument\n"))
def test_yacc_error2(self):
try:
run_import("yacc_error2")
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_error2.py:61: p_error() requires 1 argument\n"))
def test_yacc_error3(self):
try:
run_import("yacc_error3")
except ply.yacc.YaccError:
e = sys.exc_info()[1]
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"'p_error' defined, but is not a function or method\n"))
def test_yacc_error4(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_error4")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_error4.py:62: Illegal rule name 'error'. Already defined as a token\n"
))
def test_yacc_inf(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_inf")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Token 'NUMBER' defined, but not used\n"
"There is 1 unused token\n"
"Infinite recursion detected for symbol 'statement'\n"
"Infinite recursion detected for symbol 'expression'\n"
))
def test_yacc_literal(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_literal")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_literal.py:36: Literal token '**' in rule 'expression' may only be a single character\n"
))
def test_yacc_misplaced(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_misplaced")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_misplaced.py:32: Misplaced '|'\n"
))
def test_yacc_missing1(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_missing1")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_missing1.py:24: Symbol 'location' used, but not defined as a token or a rule\n"
))
def test_yacc_nested(self):
run_import("yacc_nested")
result = sys.stdout.getvalue()
self.assert_(check_expected(result,
"A\n"
"A\n"
"A\n",
))
def test_yacc_nodoc(self):
run_import("yacc_nodoc")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_nodoc.py:27: No documentation string specified in function 'p_statement_expr' (ignored)\n"
"Generating LALR tables\n"
))
def test_yacc_noerror(self):
run_import("yacc_noerror")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"no p_error() function is defined\n"
"Generating LALR tables\n"
))
def test_yacc_nop(self):
run_import("yacc_nop")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_nop.py:27: Possible grammar rule 'statement_expr' defined without p_ prefix\n"
"Generating LALR tables\n"
))
def test_yacc_notfunc(self):
run_import("yacc_notfunc")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"'p_statement_assign' not defined as a function\n"
"Token 'EQUALS' defined, but not used\n"
"There is 1 unused token\n"
"Generating LALR tables\n"
))
def test_yacc_notok(self):
try:
run_import("yacc_notok")
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"No token list is defined\n"))
def test_yacc_rr(self):
run_import("yacc_rr")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Generating LALR tables\n"
"1 reduce/reduce conflict\n"
"reduce/reduce conflict in state 15 resolved using rule (statement -> NAME EQUALS NUMBER)\n"
"rejected rule (expression -> NUMBER)\n"
))
def test_yacc_simple(self):
run_import("yacc_simple")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Generating LALR tables\n"
))
def test_yacc_sr(self):
run_import("yacc_sr")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Generating LALR tables\n"
"20 shift/reduce conflicts\n"
))
def test_yacc_term1(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_term1")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_term1.py:24: Illegal rule name 'NUMBER'. Already defined as a token\n"
))
def test_yacc_unused(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_unused")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_unused.py:62: Symbol 'COMMA' used, but not defined as a token or a rule\n"
"Symbol 'COMMA' is unreachable\n"
"Symbol 'exprlist' is unreachable\n"
))
def test_yacc_unused_rule(self):
run_import("yacc_unused_rule")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_unused_rule.py:62: Rule 'integer' defined, but not used\n"
"There is 1 unused rule\n"
"Symbol 'integer' is unreachable\n"
"Generating LALR tables\n"
))
def test_yacc_uprec(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec")
result = sys.stderr.getvalue()
print repr(result)
self.assert_(check_expected(result,
"yacc_uprec.py:37: Nothing known about the precedence of 'UMINUS'\n"
))
def test_yacc_uprec2(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec2")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"yacc_uprec2.py:37: Syntax error. Nothing follows %prec\n"
))
def test_yacc_prec1(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_prec1")
result = sys.stderr.getvalue()
self.assert_(check_expected(result,
"Precedence rule 'left' defined for unknown symbol '+'\n"
"Precedence rule 'left' defined for unknown symbol '*'\n"
"Precedence rule 'left' defined for unknown symbol '-'\n"
"Precedence rule 'left' defined for unknown symbol '/'\n"
))
unittest.main()<|fim▁end|> |
def test_yacc_badrule(self): |
<|file_name|>test_loading.py<|end_file_name|><|fim▁begin|>from os.path import dirname
import sys
from django.test import TestCase
from django.conf import settings
from django.test.utils import override_settings
import oscar
from oscar.core.loading import (
get_model, AppNotFoundError, get_classes, get_class, ClassNotFoundError)
from oscar.test.factories import create_product, WishListFactory, UserFactory
from tests import temporary_python_path
class TestClassLoading(TestCase):
"""
Oscar's class loading utilities
"""
def test_load_oscar_classes_correctly(self):
Product, Category = get_classes('catalogue.models', ('Product', 'Category'))
self.assertEqual('oscar.apps.catalogue.models', Product.__module__)
self.assertEqual('oscar.apps.catalogue.models', Category.__module__)
def test_load_oscar_class_correctly(self):
Product = get_class('catalogue.models', 'Product')
self.assertEqual('oscar.apps.catalogue.models', Product.__module__)
def test_load_oscar_class_from_dashboard_subapp(self):
ReportForm = get_class('dashboard.reports.forms', 'ReportForm')
self.assertEqual('oscar.apps.dashboard.reports.forms', ReportForm.__module__)
def test_raise_exception_when_bad_appname_used(self):
with self.assertRaises(AppNotFoundError):
get_classes('fridge.models', ('Product', 'Category'))
def test_raise_exception_when_bad_classname_used(self):
with self.assertRaises(ClassNotFoundError):
get_class('catalogue.models', 'Monkey')
def test_raise_importerror_if_app_raises_importerror(self):
"""
This tests that Oscar doesn't fall back to using the Oscar catalogue
app if the overriding app throws an ImportError.
"""
apps = list(settings.INSTALLED_APPS)
apps[apps.index('oscar.apps.catalogue')] = 'tests._site.import_error_app.catalogue'
with override_settings(INSTALLED_APPS=apps):
with self.assertRaises(ImportError):
get_class('catalogue.app', 'CatalogueApplication')
class ClassLoadingWithLocalOverrideTests(TestCase):
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
self.installed_apps[self.installed_apps.index('oscar.apps.shipping')] = 'tests._site.shipping'
def test_loading_class_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('tests._site.shipping.methods', Free.__module__)
def test_loading_class_which_is_not_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(FixedPrice,) = get_classes('shipping.methods', ('FixedPrice',))
self.assertEqual('oscar.apps.shipping.methods', FixedPrice.__module__)
def test_loading_class_from_module_not_defined_in_local_app(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Repository,) = get_classes('shipping.repository', ('Repository',))
self.assertEqual('oscar.apps.shipping.repository', Repository.__module__)
def test_loading_classes_defined_in_both_local_and_oscar_modules(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free, FixedPrice) = get_classes('shipping.methods', ('Free', 'FixedPrice'))
self.assertEqual('tests._site.shipping.methods', Free.__module__)
self.assertEqual('oscar.apps.shipping.methods', FixedPrice.__module__)
def test_loading_classes_with_root_app(self):
import tests._site.shipping
path = dirname(dirname(tests._site.shipping.__file__))
with temporary_python_path([path]):
self.installed_apps[
self.installed_apps.index('tests._site.shipping')] = 'shipping'
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('shipping.methods', Free.__module__)
def test_overriding_view_is_possible_without_overriding_app(self):
from oscar.apps.customer.app import application, CustomerApplication
# If test fails, it's helpful to know if it's caused by order of
# execution
self.assertEqual(CustomerApplication().summary_view.__module__,
'tests._site.apps.customer.views')
self.assertEqual(application.summary_view.__module__,
'tests._site.apps.customer.views')
class ClassLoadingWithLocalOverrideWithMultipleSegmentsTests(TestCase):
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
self.installed_apps[self.installed_apps.index('oscar.apps.shipping')] = 'tests._site.apps.shipping'
def test_loading_class_defined_in_local_module(self):
with override_settings(INSTALLED_APPS=self.installed_apps):
(Free,) = get_classes('shipping.methods', ('Free',))
self.assertEqual('tests._site.apps.shipping.methods', Free.__module__)
class TestGetCoreAppsFunction(TestCase):
"""
oscar.get_core_apps function
"""
def test_returns_core_apps_when_no_overrides_specified(self):
apps = oscar.get_core_apps()
self.assertEqual(oscar.OSCAR_CORE_APPS, apps)
def test_uses_non_dashboard_override_when_specified(self):
apps = oscar.get_core_apps(overrides=['apps.shipping'])
self.assertTrue('apps.shipping' in apps)
self.assertTrue('oscar.apps.shipping' not in apps)
def test_uses_dashboard_override_when_specified(self):
apps = oscar.get_core_apps(overrides=['apps.dashboard.catalogue'])
self.assertTrue('apps.dashboard.catalogue' in apps)
self.assertTrue('oscar.apps.dashboard.catalogue' not in apps)
self.assertTrue('oscar.apps.catalogue' in apps)
class TestOverridingCoreApps(TestCase):
def test_means_the_overriding_model_is_registered_first(self):
klass = get_model('partner', 'StockRecord')
self.assertEqual(
'tests._site.apps.partner.models', klass.__module__)
class TestAppLabelsForModels(TestCase):
def test_all_oscar_models_have_app_labels(self):
from django.apps import apps
models = apps.get_models()
missing = []
for model in models:
# Ignore non-Oscar models
if 'oscar' not in repr(model):
continue
# Don't know how to get the actual model's Meta class. But if
# the parent doesn't have a Meta class, it's doesn't have an
# base in Oscar anyway and is not intended to be overridden
abstract_model = model.__base__
meta_class = getattr(abstract_model, 'Meta', None)
if meta_class is None:
continue
if not hasattr(meta_class, 'app_label'):
missing.append(model)
if missing:
self.fail("Those models don't have an app_label set: %s" % missing)
class TestDynamicLoadingOn3rdPartyApps(TestCase):
core_app_prefix = 'thirdparty_package.apps'
def setUp(self):
self.installed_apps = list(settings.INSTALLED_APPS)
sys.path.append('./tests/_site/')
def tearDown(self):
sys.path.remove('./tests/_site/')
def test_load_core_3rd_party_class_correctly(self):
self.installed_apps.append('thirdparty_package.apps.myapp')
with override_settings(INSTALLED_APPS=self.installed_apps):
Cow, Goat = get_classes('myapp.models', ('Cow', 'Goat'), self.core_app_prefix)
self.assertEqual('thirdparty_package.apps.myapp.models', Cow.__module__)
self.assertEqual('thirdparty_package.apps.myapp.models', Goat.__module__)
def test_load_overriden_3rd_party_class_correctly(self):
self.installed_apps.append('apps.myapp')
with override_settings(INSTALLED_APPS=self.installed_apps):
Cow, Goat = get_classes('myapp.models', ('Cow', 'Goat'), self.core_app_prefix)
self.assertEqual('thirdparty_package.apps.myapp.models', Cow.__module__)
self.assertEqual('apps.myapp.models', Goat.__module__)
class TestMovedClasses(TestCase):
def setUp(self):<|fim▁hole|>
def test_load_formset_old_destination(self):
BaseBasketLineFormSet = get_class('basket.forms', 'BaseBasketLineFormSet')
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
StockRecordFormSet = get_class('dashboard.catalogue.forms', 'StockRecordFormSet')
self.assertEqual('oscar.apps.dashboard.catalogue.formsets', StockRecordFormSet.__module__)
OrderedProductFormSet = get_class('dashboard.promotions.forms', 'OrderedProductFormSet')
OrderedProductForm = get_class('dashboard.promotions.forms', 'OrderedProductForm')
# Since OrderedProductFormSet created with metaclass, it has __module__
# attribute pointing to the Django module. Thus, we test if formset was
# loaded correctly by initiating class instance and checking its forms.
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset = get_class('wishlists.forms', 'LineFormset')
WishListLineForm = get_class('wishlists.forms', 'WishListLineForm')
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))
def test_load_formset_new_destination(self):
BaseBasketLineFormSet = get_class('basket.formsets', 'BaseBasketLineFormSet')
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
StockRecordFormSet = get_class('dashboard.catalogue.formsets', 'StockRecordFormSet')
self.assertEqual('oscar.apps.dashboard.catalogue.formsets', StockRecordFormSet.__module__)
OrderedProductFormSet = get_class('dashboard.promotions.formsets', 'OrderedProductFormSet')
OrderedProductForm = get_class('dashboard.promotions.forms', 'OrderedProductForm')
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset = get_class('wishlists.formsets', 'LineFormset')
WishListLineForm = get_class('wishlists.forms', 'WishListLineForm')
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))
def test_load_formsets_mixed_destination(self):
BaseBasketLineFormSet, BasketLineForm = get_classes('basket.forms', ('BaseBasketLineFormSet', 'BasketLineForm'))
self.assertEqual('oscar.apps.basket.formsets', BaseBasketLineFormSet.__module__)
self.assertEqual('oscar.apps.basket.forms', BasketLineForm.__module__)
StockRecordForm, StockRecordFormSet = get_classes(
'dashboard.catalogue.forms', ('StockRecordForm', 'StockRecordFormSet')
)
self.assertEqual('oscar.apps.dashboard.catalogue.forms', StockRecordForm.__module__)
OrderedProductForm, OrderedProductFormSet = get_classes(
'dashboard.promotions.forms', ('OrderedProductForm', 'OrderedProductFormSet')
)
self.assertEqual('oscar.apps.dashboard.promotions.forms', OrderedProductForm.__module__)
self.assertTrue(isinstance(OrderedProductFormSet().forms[0], OrderedProductForm))
LineFormset, WishListLineForm = get_classes('wishlists.forms', ('LineFormset', 'WishListLineForm'))
self.assertEqual('oscar.apps.wishlists.forms', WishListLineForm.__module__)
self.assertTrue(isinstance(LineFormset(instance=self.wishlist).forms[0], WishListLineForm))<|fim▁end|> | user = UserFactory()
product = create_product()
self.wishlist = WishListFactory(owner=user)
self.wishlist.add(product) |
<|file_name|>unboxed-closures-extern-fn.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Checks that extern fn pointers implement the full range of Fn traits.
#![feature(unboxed_closures)]
#![feature(unboxed_closures)]
use std::ops::{Fn,FnMut,FnOnce};
fn square(x: int) -> int { x * x }
fn call_it<F:Fn(int)->int>(f: &F, x: int) -> int {<|fim▁hole|> f(x)
}
fn call_it_mut<F:FnMut(int)->int>(f: &mut F, x: int) -> int {
f(x)
}
fn call_it_once<F:FnOnce(int)->int>(f: F, x: int) -> int {
f(x)
}
fn main() {
let x = call_it(&square, 22);
let y = call_it_mut(&mut square, 22);
let z = call_it_once(square, 22);
assert_eq!(x, square(22));
assert_eq!(y, square(22));
assert_eq!(z, square(22));
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# This file is part of Py6S.
#
# Copyright 2012 Robin Wilson and contributors listed in the CONTRIBUTORS file.
#
# Py6S is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Py6S is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Py6S. If not, see <http://www.gnu.org/licenses/>.
import os
from setuptools import setup
PROJECT_ROOT = os.path.dirname(__file__)
def read_file(filepath, root=PROJECT_ROOT):
"""
Return the contents of the specified `filepath`.
* `root` is the base path and it defaults to the `PROJECT_ROOT` directory.
* `filepath` should be a relative path, starting from `root`.
"""
with open(os.path.join(root, filepath)) as fd:
text = fd.read()
return text
LONG_DESCRIPTION = read_file("README.rst")
SHORT_DESCRIPTION = "A wrapper for the 6S Radiative Transfer Model to make it easy to run simulations with a variety of input parameters, and to produce outputs in an easily processable form."
REQS = [
'pysolar==0.6',
'matplotlib',
'scipy'
]
setup(
name = "Py6S",
packages = ['Py6S', 'Py6S.Params', 'Py6S.SixSHelpers'],
install_requires = REQS,
version = "1.6.2",
author = "Robin Wilson",
author_email = "[email protected]",
description = SHORT_DESCRIPTION,<|fim▁hole|> classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Software Development :: Libraries :: Python Modules",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2"
],
)<|fim▁end|> | license = "GPL",
test_suite = 'nose.collector',
url = "http://py6s.rtwilson.com/",
long_description = LONG_DESCRIPTION, |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
if __name__ == "__main__":
app_init()
MANAGER.run()<|fim▁end|> | #!/usr/bin/env python
import dockci.commands
from dockci.server import APP, app_init, MANAGER |
<|file_name|>QBoxSetSlots.cpp<|end_file_name|><|fim▁begin|>/*
Qt5xHb - Bindings libraries for Harbour/xHarbour and Qt Framework 5
Copyright (C) 2020 Marcos Antonio Gambeta <marcosgambeta AT outlook DOT com>
*/
/*
DO NOT EDIT THIS FILE - the content was created using a source code generator
*/
#include "QBoxSetSlots.h"
QBoxSetSlots::QBoxSetSlots( QObject *parent ) : QObject( parent )
{
}
QBoxSetSlots::~QBoxSetSlots()
{
}
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::brushChanged()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "brushChanged()" );
if( cb )
<|fim▁hole|>
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::cleared()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "cleared()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::clicked()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "clicked()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::doubleClicked()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "doubleClicked()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::hovered( bool status )
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "hovered(bool)" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
PHB_ITEM pstatus = hb_itemPutL( NULL, status );
hb_vmEvalBlockV( cb, 2, psender, pstatus );
hb_itemRelease( psender );
hb_itemRelease( pstatus );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::penChanged()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "penChanged()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::pressed()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "pressed()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::released()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "released()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::valueChanged( int index )
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "valueChanged(int)" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
PHB_ITEM pindex = hb_itemPutNI( NULL, index );
hb_vmEvalBlockV( cb, 2, psender, pindex );
hb_itemRelease( psender );
hb_itemRelease( pindex );
}
}
#endif
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
void QBoxSetSlots::valuesChanged()
{
QObject *object = qobject_cast<QObject *>(sender());
PHB_ITEM cb = Qt5xHb::Signals_return_codeblock( object, "valuesChanged()" );
if( cb )
{
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
hb_itemRelease( psender );
}
}
#endif
void QBoxSetSlots_connect_signal( const QString & signal, const QString & slot )
{
#if (QT_VERSION >= QT_VERSION_CHECK(5,7,0))
QBoxSet * obj = (QBoxSet *) Qt5xHb::itemGetPtrStackSelfItem();
if( obj )
{
QBoxSetSlots * s = QCoreApplication::instance()->findChild<QBoxSetSlots *>();
if( s == NULL )
{
s = new QBoxSetSlots();
s->moveToThread( QCoreApplication::instance()->thread() );
s->setParent( QCoreApplication::instance() );
}
hb_retl( Qt5xHb::Signals_connection_disconnection( s, signal, slot ) );
}
else
{
hb_retl( false );
}
#else
hb_retl( false );
#endif
}<|fim▁end|> | {
PHB_ITEM psender = Qt5xHb::Signals_return_qobject( (QObject *) object, "QBOXSET" );
hb_vmEvalBlockV( cb, 1, psender );
|
<|file_name|>group_by.rs<|end_file_name|><|fim▁begin|>/*
Copyright ⓒ 2015 grabbag contributors.
Licensed under the MIT license (see LICENSE or <http://opensource.org
/licenses/MIT>) or the Apache License, Version 2.0 (see LICENSE of
<http://www.apache.org/licenses/LICENSE-2.0>), at your option. All
files in the project carrying such notice may not be copied, modified,
or distributed except according to those terms.
*/
use std::cell::RefCell;
use std::cmp::min;<|fim▁hole|>Sequence of iterators containing successive elements of the subject which have the same group according to a group function.
*/
pub trait GroupByIterator<E>: Iterator<Item=E> + Sized {
/**
Creates an iterator that yields a succession of `(group, sub_iterator)` pairs. Each `sub_iterator` yields successive elements of the input iterator that have the same `group`. An element's `group` is computed using the `f` closure.
For example:
```
# extern crate grabbag;
# use grabbag::iter::GroupByIterator;
# fn main () {
let v = vec![7usize, 5, 6, 2, 4, 7, 6, 1, 6, 4, 4, 6, 0, 0, 8, 8, 6, 1, 8, 7];
let is_even = |n: &usize| if *n & 1 == 0 { true } else { false };
for (even, mut ns) in v.into_iter().group_by(is_even) {
println!("{}...", if even { "Evens" } else { "Odds" });
for n in ns {
println!(" - {}", n);
}
}
# }
```
*/
fn group_by<GroupFn: FnMut(&E) -> G, G>(self, group: GroupFn) -> GroupBy<Self, GroupFn, E, G> {
GroupBy {
state: Rc::new(RefCell::new(GroupByShared {
iter: self,
group: group,
last_group: None,
push_back: None,
})),
}
}
}
impl<E, It> GroupByIterator<E> for It where It: Iterator<Item=E> {}
// **NOTE**: Although `Clone` *can* be implemented for this, you *should not* do so, since you cannot clone the underlying `GroupByItemsShared` value.
pub struct GroupBy<It, GroupFn, E, G> {
state: Rc<RefCell<GroupByShared<It, GroupFn, E, G>>>,
}
pub struct GroupByShared<It, GroupFn, E, G> {
iter: It,
group: GroupFn,
last_group: Option<G>,
push_back: Option<(G, E)>,
}
impl<It, GroupFn, E, G> Iterator for GroupBy<It, GroupFn, E, G> where GroupFn: FnMut(&E) -> G, It: Iterator<Item=E>, G: Clone + Eq {
type Item = (G, Group<It, GroupFn, E, G>);
fn next(&mut self) -> Option<(G, Group<It, GroupFn, E, G>)> {
// First, get a mutable borrow to the underlying state.
let mut state = self.state.borrow_mut();
let state = &mut *state;
// If we have a push-back element, immediately construct a sub iterator.
if let Some((g, e)) = replace(&mut state.push_back, None) {
return Some((
g.clone(),
Group {
state: self.state.clone(),
group_value: g,
first_value: Some(e),
}
));
}
// Otherwise, try to pull the next element from the input iterator.
// Complication: a sub iterator *might* stop *before* the group is exhausted. We need to account for this (so that users can easily skip groups).
let (e, g) = match replace(&mut state.last_group, None) {
None => {
// We don't *have* a previous group, just grab the next element.
let e = match state.iter.next() {
Some(e) => e,
None => return None
};
let g = (state.group)(&e);
(e, g)
},
Some(last_g) => {
// We have to keep pulling elements until the group changes.
let mut e;
let mut g;
loop {
e = match state.iter.next() {
Some(e) => e,
None => return None
};
g = (state.group)(&e);
if g != last_g { break; }
}
(e, g)
}
};
// Remember this group.
state.last_group = Some(g.clone());
// Construct the sub-iterator and yield it.
Some((
g.clone(),
Group {
state: self.state.clone(),
group_value: g,
first_value: Some(e),
}
))
}
fn size_hint(&self) -> (usize, Option<usize>) {
let (lb, mub) = self.state.borrow().iter.size_hint();
let lb = min(lb, 1);
(lb, mub)
}
}
// **NOTE**: Although `Clone` *can* be implemented for this, you *should not* do so, since you cannot clone the underlying `GroupByShared` value.
pub struct Group<It, GroupFn, E, G> {
state: Rc<RefCell<GroupByShared<It, GroupFn, E, G>>>,
group_value: G,
first_value: Option<E>,
}
impl<It, GroupFn, E, G> Iterator for Group<It, GroupFn, E, G> where GroupFn: FnMut(&E) -> G, It: Iterator<Item=E>, G: Eq {
type Item = E;
fn next(&mut self) -> Option<E> {
// If we have a first_value, consume and yield that.
if let Some(e) = replace(&mut self.first_value, None) {
return Some(e)
}
// Get a mutable borrow to the shared state.
let mut state = self.state.borrow_mut();
let state = &mut *state;
let e = match state.iter.next() {
Some(e) => e,
None => return None
};
let g = (state.group)(&e);
match g == self.group_value {
true => {
// Still in the same group.
Some(e)
},
false => {
// Different group! We need to push (g, e) back into the master iterator.
state.push_back = Some((g, e));
None
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let state = self.state.borrow();
let lb = if self.first_value.is_some() { 1 } else { 0 };
let (_, mub) = state.iter.size_hint();
(lb, mub)
}
}
#[test]
fn test_group_by() {
{
let v = vec![0usize, 1, 2, 3, 5, 4, 6, 8, 7];
let mut oi = v.into_iter().group_by(|&e| e & 1);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 0);
assert_eq!(ii.next(), Some(0));
assert_eq!(ii.next(), None);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 1);
assert_eq!(ii.next(), Some(1));
assert_eq!(ii.next(), None);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 0);
assert_eq!(ii.next(), Some(2));
assert_eq!(ii.next(), None);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 1);
assert_eq!(ii.next(), Some(3));
assert_eq!(ii.next(), Some(5));
assert_eq!(ii.next(), None);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 0);
assert_eq!(ii.next(), Some(4));
assert_eq!(ii.next(), Some(6));
assert_eq!(ii.next(), Some(8));
assert_eq!(ii.next(), None);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 1);
assert_eq!(ii.next(), Some(7));
assert_eq!(ii.next(), None);
assert!(oi.next().is_none());
}
{
let v = vec![0usize, 1, 2, 3, 5, 4, 6, 8, 7];
let mut oi = v.into_iter().group_by(|&e| e & 1);
let (g, _) = oi.next().unwrap();
assert_eq!(g, 0);
let (g, _) = oi.next().unwrap();
assert_eq!(g, 1);
let (g, _) = oi.next().unwrap();
assert_eq!(g, 0);
let (g, _) = oi.next().unwrap();
assert_eq!(g, 1);
let (g, mut ii) = oi.next().unwrap();
assert_eq!(g, 0);
assert_eq!(ii.next(), Some(4));
assert_eq!(ii.next(), Some(6));
let (g, _) = oi.next().unwrap();
assert_eq!(g, 1);
assert!(oi.next().is_none());
}
}<|fim▁end|> | use std::mem::replace;
use std::rc::Rc;
/** |
<|file_name|>bin2015.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>extern crate rust2018;
use rust2018::do_thing;
fn main() {
do_thing();
}<|fim▁end|> | |
<|file_name|>rico_change.js<|end_file_name|><|fim▁begin|>/**
*
* Copyright 2005 Sabre Airline Solutions
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
**/
//-------------------- rico.js
var Rico = {
Version: '1.1.2',
prototypeVersion: parseFloat(Prototype.Version.split(".")[0] + "." + Prototype.Version.split(".")[1])
}
if((typeof Prototype=='undefined') || Rico.prototypeVersion < 1.3)
throw("Rico requires the Prototype JavaScript framework >= 1.3");
Rico.ArrayExtensions = new Array();
if (Object.prototype.extend) {
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Object.prototype.extend;
}else{
Object.prototype.extend = function(object) {
return Object.extend.apply(this, [this, object]);
}
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Object.prototype.extend;
}
if (Array.prototype.push) {
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Array.prototype.push;
}
if (!Array.prototype.remove) {
Array.prototype.remove = function(dx) {
if( isNaN(dx) || dx > this.length )
<|fim▁hole|> this.length-=1;
};
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Array.prototype.remove;
}
if (!Array.prototype.removeItem) {
Array.prototype.removeItem = function(item) {
for ( var i = 0 ; i < this.length ; i++ )
if ( this[i] == item ) {
this.remove(i);
break;
}
};
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Array.prototype.removeItem;
}
if (!Array.prototype.indices) {
Array.prototype.indices = function() {
var indexArray = new Array();
for ( index in this ) {
var ignoreThis = false;
for ( var i = 0 ; i < Rico.ArrayExtensions.length ; i++ ) {
if ( this[index] == Rico.ArrayExtensions[i] ) {
ignoreThis = true;
break;
}
}
if ( !ignoreThis )
indexArray[ indexArray.length ] = index;
}
return indexArray;
}
Rico.ArrayExtensions[ Rico.ArrayExtensions.length ] = Array.prototype.indices;
}
// Create the loadXML method and xml getter for Mozilla
if ( window.DOMParser &&
window.XMLSerializer &&
window.Node && Node.prototype && Node.prototype.__defineGetter__ ) {
if (!Document.prototype.loadXML) {
Document.prototype.loadXML = function (s) {
var doc2 = (new DOMParser()).parseFromString(s, "text/xml");
while (this.hasChildNodes())
this.removeChild(this.lastChild);
for (var i = 0; i < doc2.childNodes.length; i++) {
this.appendChild(this.importNode(doc2.childNodes[i], true));
}
};
}
Document.prototype.__defineGetter__( "xml",
function () {
return (new XMLSerializer()).serializeToString(this);
}
);
}
document.getElementsByTagAndClassName = function(tagName, className) {
if ( tagName == null )
tagName = '*';
var children = document.getElementsByTagName(tagName) || document.all;
var elements = new Array();
if ( className == null )
return children;
for (var i = 0; i < children.length; i++) {
var child = children[i];
var classNames = child.className.split(' ');
for (var j = 0; j < classNames.length; j++) {
if (classNames[j] == className) {
elements.push(child);
break;
}
}
}
return elements;
}
//-------------------- ricoAccordion.js
Rico.Accordion = Class.create();
Rico.Accordion.prototype = {
initialize: function(container, options) {
this.container = $(container);
this.lastExpandedTab = null;
this.accordionTabs = new Array();
this.setOptions(options);
this._attachBehaviors();
if(!container) return;
this.container.style.borderBottom = '1px solid ' + this.options.borderColor;
// validate onloadShowTab
if (this.options.onLoadShowTab >= this.accordionTabs.length)
this.options.onLoadShowTab = 0;
// set the initial visual state...
for ( var i=0 ; i < this.accordionTabs.length ; i++ )
{
if (i != this.options.onLoadShowTab){
this.accordionTabs[i].collapse();
this.accordionTabs[i].content.style.display = 'none';
}
}
this.lastExpandedTab = this.accordionTabs[this.options.onLoadShowTab];
if (this.options.panelHeight == 'auto'){
var tabToCheck = (this.options.onloadShowTab === 0)? 1 : 0;
var titleBarSize = parseInt(RicoUtil.getElementsComputedStyle(this.accordionTabs[tabToCheck].titleBar, 'height'));
if (isNaN(titleBarSize))
titleBarSize = this.accordionTabs[tabToCheck].titleBar.offsetHeight;
var totalTitleBarSize = this.accordionTabs.length * titleBarSize;
var parentHeight = parseInt(RicoUtil.getElementsComputedStyle(this.container.parentNode, 'height'));
if (isNaN(parentHeight))
parentHeight = this.container.parentNode.offsetHeight;
this.options.panelHeight = parentHeight - totalTitleBarSize-2;
}
this.lastExpandedTab.content.style.height = this.options.panelHeight + "px";
this.lastExpandedTab.showExpanded();
this.lastExpandedTab.titleBar.style.fontWeight = this.options.expandedFontWeight;
},
setOptions: function(options) {
this.options = {
expandedBg : '#545985',
hoverBg : '#63699c',
collapsedBg : '#6b79a5',
expandedTextColor : '#ffffff',
expandedFontWeight : 'bold',
hoverTextColor : '#ffffff',
collapsedTextColor : '#ced7ef',
collapsedFontWeight : 'normal',
hoverTextColor : '#ffffff',
borderColor : '#ffffff',
panelHeight : 200,
onHideTab : null,
onShowTab : null,
onLoadShowTab : 0
}
Object.extend(this.options, options || {});
},
showTabByIndex: function( anIndex, animate ) {
var doAnimate = arguments.length == 1 ? true : animate;
this.showTab( this.accordionTabs[anIndex], doAnimate );
},
showTab: function( accordionTab, animate ) {
if ( this.lastExpandedTab == accordionTab )
return;
var doAnimate = arguments.length == 1 ? true : animate;
if ( this.options.onHideTab )
this.options.onHideTab(this.lastExpandedTab);
this.lastExpandedTab.showCollapsed();
var accordion = this;
var lastExpandedTab = this.lastExpandedTab;
this.lastExpandedTab.content.style.height = (this.options.panelHeight - 1) + 'px';
accordionTab.content.style.display = '';
accordionTab.titleBar.style.fontWeight = this.options.expandedFontWeight;
if ( doAnimate ) {
new Rico.Effect.AccordionSize( this.lastExpandedTab.content,
accordionTab.content,
1,
this.options.panelHeight,
100, 10,
{ complete: function() {accordion.showTabDone(lastExpandedTab)} } );
this.lastExpandedTab = accordionTab;
}
else {
this.lastExpandedTab.content.style.height = "1px";
accordionTab.content.style.height = this.options.panelHeight + "px";
this.lastExpandedTab = accordionTab;
this.showTabDone(lastExpandedTab);
}
},
showTabDone: function(collapsedTab) {
collapsedTab.content.style.display = 'none';
this.lastExpandedTab.showExpanded();
if ( this.options.onShowTab )
this.options.onShowTab(this.lastExpandedTab);
},
_attachBehaviors: function() {
var panels = this._getDirectChildrenByTag(this.container, 'DIV');
for ( var i = 0 ; i < panels.length ; i++ ) {
var tabChildren = this._getDirectChildrenByTag(panels[i],'DIV');
if ( tabChildren.length != 2 )
continue; // unexpected
var tabTitleBar = tabChildren[0];
var tabContentBox = tabChildren[1];
this.accordionTabs.push( new Rico.Accordion.Tab(this,tabTitleBar,tabContentBox) );
}
},
_getDirectChildrenByTag: function(e, tagName) {
var kids = new Array();
var allKids = e.childNodes;
for( var i = 0 ; i < allKids.length ; i++ )
if ( allKids[i] && allKids[i].tagName && allKids[i].tagName == tagName )
kids.push(allKids[i]);
return kids;
}
};
Rico.Accordion.Tab = Class.create();
Rico.Accordion.Tab.prototype = {
initialize: function(accordion, titleBar, content) {
this.accordion = accordion;
this.titleBar = titleBar;
this.content = content;
this._attachBehaviors();
},
collapse: function() {
this.showCollapsed();
this.content.style.height = "1px";
},
showCollapsed: function() {
this.expanded = false;
this.titleBar.style.backgroundColor = this.accordion.options.collapsedBg;
this.titleBar.style.color = this.accordion.options.collapsedTextColor;
this.titleBar.style.fontWeight = this.accordion.options.collapsedFontWeight;
this.content.style.overflow = "hidden";
},
showExpanded: function() {
this.expanded = true;
this.titleBar.style.backgroundColor = this.accordion.options.expandedBg;
this.titleBar.style.color = this.accordion.options.expandedTextColor;
this.content.style.overflow = "auto";
},
titleBarClicked: function(e) {
if ( this.accordion.lastExpandedTab == this )
return;
this.accordion.showTab(this);
},
hover: function(e) {
this.titleBar.style.backgroundColor = this.accordion.options.hoverBg;
this.titleBar.style.color = this.accordion.options.hoverTextColor;
},
unhover: function(e) {
if ( this.expanded ) {
this.titleBar.style.backgroundColor = this.accordion.options.expandedBg;
this.titleBar.style.color = this.accordion.options.expandedTextColor;
}
else {
this.titleBar.style.backgroundColor = this.accordion.options.collapsedBg;
this.titleBar.style.color = this.accordion.options.collapsedTextColor;
}
},
_attachBehaviors: function() {
this.content.style.border = "1px solid " + this.accordion.options.borderColor;
this.content.style.borderTopWidth = "0px";
this.content.style.borderBottomWidth = "0px";
this.content.style.margin = "0px";
this.titleBar.onclick = this.titleBarClicked.bindAsEventListener(this);
this.titleBar.onmouseover = this.hover.bindAsEventListener(this);
this.titleBar.onmouseout = this.unhover.bindAsEventListener(this);
}
};
//-------------------- ricoAjaxEngine.js
Rico.AjaxEngine = Class.create();
Rico.AjaxEngine.prototype = {
initialize: function() {
this.ajaxElements = new Array();
this.ajaxObjects = new Array();
this.requestURLS = new Array();
this.options = {};
},
registerAjaxElement: function( anId, anElement ) {
if ( !anElement )
anElement = $(anId);
this.ajaxElements[anId] = anElement;
},
registerAjaxObject: function( anId, anObject ) {
this.ajaxObjects[anId] = anObject;
},
registerRequest: function (requestLogicalName, requestURL) {
this.requestURLS[requestLogicalName] = requestURL;
},
sendRequest: function(requestName, options) {
// Allow for backwards Compatibility
if ( arguments.length >= 2 )
if (typeof arguments[1] == 'string')
options = {parameters: this._createQueryString(arguments, 1)};
this.sendRequestWithData(requestName, null, options);
},
sendRequestWithData: function(requestName, xmlDocument, options) {
var requestURL = this.requestURLS[requestName];
if ( requestURL == null )
return;
// Allow for backwards Compatibility
if ( arguments.length >= 3 )
if (typeof arguments[2] == 'string')
options.parameters = this._createQueryString(arguments, 2);
new Ajax.Request(requestURL, this._requestOptions(options,xmlDocument));
},
sendRequestAndUpdate: function(requestName,container,options) {
// Allow for backwards Compatibility
if ( arguments.length >= 3 )
if (typeof arguments[2] == 'string')
options.parameters = this._createQueryString(arguments, 2);
this.sendRequestWithDataAndUpdate(requestName, null, container, options);
},
sendRequestWithDataAndUpdate: function(requestName,xmlDocument,container,options) {
var requestURL = this.requestURLS[requestName];
if ( requestURL == null )
return;
// Allow for backwards Compatibility
if ( arguments.length >= 4 )
if (typeof arguments[3] == 'string')
options.parameters = this._createQueryString(arguments, 3);
var updaterOptions = this._requestOptions(options,xmlDocument);
new Ajax.Updater(container, requestURL, updaterOptions);
},
// Private -- not part of intended engine API --------------------------------------------------------------------
_requestOptions: function(options,xmlDoc) {
var requestHeaders = ['X-Rico-Version', Rico.Version ];
var sendMethod = 'post';
if ( xmlDoc == null )
if (Rico.prototypeVersion < 1.4)
requestHeaders.push( 'Content-type', 'text/xml' );
else
sendMethod = 'get';
(!options) ? options = {} : '';
if (!options._RicoOptionsProcessed){
// Check and keep any user onComplete functions
if (options.onComplete)
options.onRicoComplete = options.onComplete;
// Fix onComplete
if (options.overrideOnComplete)
options.onComplete = options.overrideOnComplete;
else
options.onComplete = this._onRequestComplete.bind(this);
options._RicoOptionsProcessed = true;
}
// Set the default options and extend with any user options
this.options = {
requestHeaders: requestHeaders,
parameters: options.parameters,
postBody: xmlDoc,
method: sendMethod,
onComplete: options.onComplete
};
// Set any user options:
Object.extend(this.options, options);
return this.options;
},
_createQueryString: function( theArgs, offset ) {
var queryString = ""
for ( var i = offset ; i < theArgs.length ; i++ ) {
if ( i != offset )
queryString += "&";
var anArg = theArgs[i];
if ( anArg.name != undefined && anArg.value != undefined ) {
queryString += anArg.name + "=" + escape(anArg.value);
}
else {
var ePos = anArg.indexOf('=');
var argName = anArg.substring( 0, ePos );
var argValue = anArg.substring( ePos + 1 );
queryString += argName + "=" + escape(argValue);
}
}
return queryString;
},
_onRequestComplete : function(request) {
if(!request)
return;
// User can set an onFailure option - which will be called by prototype
if (request.status != 200)
return;
var response = request.responseXML.getElementsByTagName("ajax-response");
if (response == null || response.length != 1)
return;
this._processAjaxResponse( response[0].childNodes );
// Check if user has set a onComplete function
var onRicoComplete = this.options.onRicoComplete;
if (onRicoComplete != null)
onRicoComplete();
},
_processAjaxResponse: function( xmlResponseElements ) {
for ( var i = 0 ; i < xmlResponseElements.length ; i++ ) {
var responseElement = xmlResponseElements[i];
// only process nodes of type element.....
if ( responseElement.nodeType != 1 )
continue;
var responseType = responseElement.getAttribute("type");
var responseId = responseElement.getAttribute("id");
if ( responseType == "object" )
this._processAjaxObjectUpdate( this.ajaxObjects[ responseId ], responseElement );
else if ( responseType == "element" )
this._processAjaxElementUpdate( this.ajaxElements[ responseId ], responseElement );
else
alert('unrecognized AjaxResponse type : ' + responseType );
}
},
_processAjaxObjectUpdate: function( ajaxObject, responseElement ) {
ajaxObject.ajaxUpdate( responseElement );
},
_processAjaxElementUpdate: function( ajaxElement, responseElement ) {
ajaxElement.innerHTML = RicoUtil.getContentAsString(responseElement);
}
}
var ajaxEngine = new Rico.AjaxEngine();
//-------------------- ricoColor.js
Rico.Color = Class.create();
Rico.Color.prototype = {
initialize: function(red, green, blue) {
this.rgb = { r: red, g : green, b : blue };
},
setRed: function(r) {
this.rgb.r = r;
},
setGreen: function(g) {
this.rgb.g = g;
},
setBlue: function(b) {
this.rgb.b = b;
},
setHue: function(h) {
// get an HSB model, and set the new hue...
var hsb = this.asHSB();
hsb.h = h;
// convert back to RGB...
this.rgb = Rico.Color.HSBtoRGB(hsb.h, hsb.s, hsb.b);
},
setSaturation: function(s) {
// get an HSB model, and set the new hue...
var hsb = this.asHSB();
hsb.s = s;
// convert back to RGB and set values...
this.rgb = Rico.Color.HSBtoRGB(hsb.h, hsb.s, hsb.b);
},
setBrightness: function(b) {
// get an HSB model, and set the new hue...
var hsb = this.asHSB();
hsb.b = b;
// convert back to RGB and set values...
this.rgb = Rico.Color.HSBtoRGB( hsb.h, hsb.s, hsb.b );
},
darken: function(percent) {
var hsb = this.asHSB();
this.rgb = Rico.Color.HSBtoRGB(hsb.h, hsb.s, Math.max(hsb.b - percent,0));
},
brighten: function(percent) {
var hsb = this.asHSB();
this.rgb = Rico.Color.HSBtoRGB(hsb.h, hsb.s, Math.min(hsb.b + percent,1));
},
blend: function(other) {
this.rgb.r = Math.floor((this.rgb.r + other.rgb.r)/2);
this.rgb.g = Math.floor((this.rgb.g + other.rgb.g)/2);
this.rgb.b = Math.floor((this.rgb.b + other.rgb.b)/2);
},
isBright: function() {
var hsb = this.asHSB();
return this.asHSB().b > 0.5;
},
isDark: function() {
return ! this.isBright();
},
asRGB: function() {
return "rgb(" + this.rgb.r + "," + this.rgb.g + "," + this.rgb.b + ")";
},
asHex: function() {
return "#" + this.rgb.r.toColorPart() + this.rgb.g.toColorPart() + this.rgb.b.toColorPart();
},
asHSB: function() {
return Rico.Color.RGBtoHSB(this.rgb.r, this.rgb.g, this.rgb.b);
},
toString: function() {
return this.asHex();
}
};
Rico.Color.createFromHex = function(hexCode) {
if(hexCode.length==4) {
var shortHexCode = hexCode;
var hexCode = '#';
for(var i=1;i<4;i++) hexCode += (shortHexCode.charAt(i) +
shortHexCode.charAt(i));
}
if ( hexCode.indexOf('#') == 0 )
hexCode = hexCode.substring(1);
var red = hexCode.substring(0,2);
var green = hexCode.substring(2,4);
var blue = hexCode.substring(4,6);
return new Rico.Color( parseInt(red,16), parseInt(green,16), parseInt(blue,16) );
}
/**
* Factory method for creating a color from the background of
* an HTML element.
*/
Rico.Color.createColorFromBackground = function(elem) {
var actualColor = RicoUtil.getElementsComputedStyle($(elem), "backgroundColor", "background-color");
if ( actualColor == "transparent" && elem.parentNode )
return Rico.Color.createColorFromBackground(elem.parentNode);
if ( actualColor == null )
return new Rico.Color(255,255,255);
if ( actualColor.indexOf("rgb(") == 0 ) {
var colors = actualColor.substring(4, actualColor.length - 1 );
var colorArray = colors.split(",");
return new Rico.Color( parseInt( colorArray[0] ),
parseInt( colorArray[1] ),
parseInt( colorArray[2] ) );
}
else if ( actualColor.indexOf("#") == 0 ) {
return Rico.Color.createFromHex(actualColor);
}
else
return new Rico.Color(255,255,255);
}
Rico.Color.HSBtoRGB = function(hue, saturation, brightness) {
var red = 0;
var green = 0;
var blue = 0;
if (saturation == 0) {
red = parseInt(brightness * 255.0 + 0.5);
green = red;
blue = red;
}
else {
var h = (hue - Math.floor(hue)) * 6.0;
var f = h - Math.floor(h);
var p = brightness * (1.0 - saturation);
var q = brightness * (1.0 - saturation * f);
var t = brightness * (1.0 - (saturation * (1.0 - f)));
switch (parseInt(h)) {
case 0:
red = (brightness * 255.0 + 0.5);
green = (t * 255.0 + 0.5);
blue = (p * 255.0 + 0.5);
break;
case 1:
red = (q * 255.0 + 0.5);
green = (brightness * 255.0 + 0.5);
blue = (p * 255.0 + 0.5);
break;
case 2:
red = (p * 255.0 + 0.5);
green = (brightness * 255.0 + 0.5);
blue = (t * 255.0 + 0.5);
break;
case 3:
red = (p * 255.0 + 0.5);
green = (q * 255.0 + 0.5);
blue = (brightness * 255.0 + 0.5);
break;
case 4:
red = (t * 255.0 + 0.5);
green = (p * 255.0 + 0.5);
blue = (brightness * 255.0 + 0.5);
break;
case 5:
red = (brightness * 255.0 + 0.5);
green = (p * 255.0 + 0.5);
blue = (q * 255.0 + 0.5);
break;
}
}
return { r : parseInt(red), g : parseInt(green) , b : parseInt(blue) };
}
Rico.Color.RGBtoHSB = function(r, g, b) {
var hue;
var saturation;
var brightness;
var cmax = (r > g) ? r : g;
if (b > cmax)
cmax = b;
var cmin = (r < g) ? r : g;
if (b < cmin)
cmin = b;
brightness = cmax / 255.0;
if (cmax != 0)
saturation = (cmax - cmin)/cmax;
else
saturation = 0;
if (saturation == 0)
hue = 0;
else {
var redc = (cmax - r)/(cmax - cmin);
var greenc = (cmax - g)/(cmax - cmin);
var bluec = (cmax - b)/(cmax - cmin);
if (r == cmax)
hue = bluec - greenc;
else if (g == cmax)
hue = 2.0 + redc - bluec;
else
hue = 4.0 + greenc - redc;
hue = hue / 6.0;
if (hue < 0)
hue = hue + 1.0;
}
return { h : hue, s : saturation, b : brightness };
}
//-------------------- ricoCorner.js
Rico.Corner = {
round: function(e, options) {
var e = $(e);
this._setOptions(options);
var color = this.options.color;
if ( this.options.color == "fromElement" )
color = this._background(e);
var bgColor = this.options.bgColor;
if ( this.options.bgColor == "fromParent" )
bgColor = this._background(e.offsetParent);
this._roundCornersImpl(e, color, bgColor);
},
_roundCornersImpl: function(e, color, bgColor) {
if(this.options.border)
this._renderBorder(e,bgColor);
if(this._isTopRounded())
this._roundTopCorners(e,color,bgColor);
if(this._isBottomRounded())
this._roundBottomCorners(e,color,bgColor);
},
_renderBorder: function(el,bgColor) {
var borderValue = "1px solid " + this._borderColor(bgColor);
var borderL = "border-left: " + borderValue;
var borderR = "border-right: " + borderValue;
var style = "style='" + borderL + ";" + borderR + "'";
el.innerHTML = "<div " + style + ">" + el.innerHTML + "</div>"
},
_roundTopCorners: function(el, color, bgColor) {
var corner = this._createCorner(bgColor);
for(var i=0 ; i < this.options.numSlices ; i++ )
corner.appendChild(this._createCornerSlice(color,bgColor,i,"top"));
el.style.paddingTop = 0;
el.insertBefore(corner,el.firstChild);
},
_roundBottomCorners: function(el, color, bgColor) {
var corner = this._createCorner(bgColor);
for(var i=(this.options.numSlices-1) ; i >= 0 ; i-- )
corner.appendChild(this._createCornerSlice(color,bgColor,i,"bottom"));
el.style.paddingBottom = 0;
el.appendChild(corner);
},
_createCorner: function(bgColor) {
var corner = document.createElement("div");
corner.style.backgroundColor = (this._isTransparent() ? "transparent" : bgColor);
return corner;
},
_createCornerSlice: function(color,bgColor, n, position) {
var slice = document.createElement("span");
var inStyle = slice.style;
inStyle.backgroundColor = color;
inStyle.display = "block";
inStyle.height = "1px";
inStyle.overflow = "hidden";
inStyle.fontSize = "1px";
var borderColor = this._borderColor(color,bgColor);
if ( this.options.border && n == 0 ) {
inStyle.borderTopStyle = "solid";
inStyle.borderTopWidth = "1px";
inStyle.borderLeftWidth = "0px";
inStyle.borderRightWidth = "0px";
inStyle.borderBottomWidth = "0px";
inStyle.height = "0px"; // assumes css compliant box model
inStyle.borderColor = borderColor;
}
else if(borderColor) {
inStyle.borderColor = borderColor;
inStyle.borderStyle = "solid";
inStyle.borderWidth = "0px 1px";
}
if ( !this.options.compact && (n == (this.options.numSlices-1)) )
inStyle.height = "2px";
this._setMargin(slice, n, position);
this._setBorder(slice, n, position);
return slice;
},
_setOptions: function(options) {
this.options = {
corners : "all",
color : "fromElement",
bgColor : "fromParent",
blend : true,
border : false,
compact : false
}
Object.extend(this.options, options || {});
this.options.numSlices = this.options.compact ? 2 : 4;
if ( this._isTransparent() )
this.options.blend = false;
},
_whichSideTop: function() {
if ( this._hasString(this.options.corners, "all", "top") )
return "";
if ( this.options.corners.indexOf("tl") >= 0 && this.options.corners.indexOf("tr") >= 0 )
return "";
if (this.options.corners.indexOf("tl") >= 0)
return "left";
else if (this.options.corners.indexOf("tr") >= 0)
return "right";
return "";
},
_whichSideBottom: function() {
if ( this._hasString(this.options.corners, "all", "bottom") )
return "";
if ( this.options.corners.indexOf("bl")>=0 && this.options.corners.indexOf("br")>=0 )
return "";
if(this.options.corners.indexOf("bl") >=0)
return "left";
else if(this.options.corners.indexOf("br")>=0)
return "right";
return "";
},
_borderColor : function(color,bgColor) {
if ( color == "transparent" )
return bgColor;
else if ( this.options.border )
return this.options.border;
else if ( this.options.blend )
return this._blend( bgColor, color );
else
return "";
},
_setMargin: function(el, n, corners) {
var marginSize = this._marginSize(n);
var whichSide = corners == "top" ? this._whichSideTop() : this._whichSideBottom();
if ( whichSide == "left" ) {
el.style.marginLeft = marginSize + "px"; el.style.marginRight = "0px";
}
else if ( whichSide == "right" ) {
el.style.marginRight = marginSize + "px"; el.style.marginLeft = "0px";
}
else {
el.style.marginLeft = marginSize + "px"; el.style.marginRight = marginSize + "px";
}
},
_setBorder: function(el,n,corners) {
var borderSize = this._borderSize(n);
var whichSide = corners == "top" ? this._whichSideTop() : this._whichSideBottom();
if ( whichSide == "left" ) {
el.style.borderLeftWidth = borderSize + "px"; el.style.borderRightWidth = "0px";
}
else if ( whichSide == "right" ) {
el.style.borderRightWidth = borderSize + "px"; el.style.borderLeftWidth = "0px";
}
else {
el.style.borderLeftWidth = borderSize + "px"; el.style.borderRightWidth = borderSize + "px";
}
if (this.options.border != false)
el.style.borderLeftWidth = borderSize + "px"; el.style.borderRightWidth = borderSize + "px";
},
_marginSize: function(n) {
if ( this._isTransparent() )
return 0;
var marginSizes = [ 5, 3, 2, 1 ];
var blendedMarginSizes = [ 3, 2, 1, 0 ];
var compactMarginSizes = [ 2, 1 ];
var smBlendedMarginSizes = [ 1, 0 ];
if ( this.options.compact && this.options.blend )
return smBlendedMarginSizes[n];
else if ( this.options.compact )
return compactMarginSizes[n];
else if ( this.options.blend )
return blendedMarginSizes[n];
else
return marginSizes[n];
},
_borderSize: function(n) {
var transparentBorderSizes = [ 5, 3, 2, 1 ];
var blendedBorderSizes = [ 2, 1, 1, 1 ];
var compactBorderSizes = [ 1, 0 ];
var actualBorderSizes = [ 0, 2, 0, 0 ];
if ( this.options.compact && (this.options.blend || this._isTransparent()) )
return 1;
else if ( this.options.compact )
return compactBorderSizes[n];
else if ( this.options.blend )
return blendedBorderSizes[n];
else if ( this.options.border )
return actualBorderSizes[n];
else if ( this._isTransparent() )
return transparentBorderSizes[n];
return 0;
},
_hasString: function(str) { for(var i=1 ; i<arguments.length ; i++) if (str.indexOf(arguments[i]) >= 0) return true; return false; },
_blend: function(c1, c2) { var cc1 = Rico.Color.createFromHex(c1); cc1.blend(Rico.Color.createFromHex(c2)); return cc1; },
_background: function(el) { try { return Rico.Color.createColorFromBackground(el).asHex(); } catch(err) { return "#ffffff"; } },
_isTransparent: function() { return this.options.color == "transparent"; },
_isTopRounded: function() { return this._hasString(this.options.corners, "all", "top", "tl", "tr"); },
_isBottomRounded: function() { return this._hasString(this.options.corners, "all", "bottom", "bl", "br"); },
_hasSingleTextChild: function(el) { return el.childNodes.length == 1 && el.childNodes[0].nodeType == 3; }
}
//-------------------- ricoDragAndDrop.js
Rico.DragAndDrop = Class.create();
Rico.DragAndDrop.prototype = {
initialize: function() {
this.dropZones = new Array();
this.draggables = new Array();
this.currentDragObjects = new Array();
this.dragElement = null;
this.lastSelectedDraggable = null;
this.currentDragObjectVisible = false;
this.interestedInMotionEvents = false;
this._mouseDown = this._mouseDownHandler.bindAsEventListener(this);
this._mouseMove = this._mouseMoveHandler.bindAsEventListener(this);
this._mouseUp = this._mouseUpHandler.bindAsEventListener(this);
},
registerDropZone: function(aDropZone) {
this.dropZones[ this.dropZones.length ] = aDropZone;
},
deregisterDropZone: function(aDropZone) {
var newDropZones = new Array();
var j = 0;
for ( var i = 0 ; i < this.dropZones.length ; i++ ) {
if ( this.dropZones[i] != aDropZone )
newDropZones[j++] = this.dropZones[i];
}
this.dropZones = newDropZones;
},
clearDropZones: function() {
this.dropZones = new Array();
},
registerDraggable: function( aDraggable ) {
this.draggables[ this.draggables.length ] = aDraggable;
this._addMouseDownHandler( aDraggable );
},
clearSelection: function() {
for ( var i = 0 ; i < this.currentDragObjects.length ; i++ )
this.currentDragObjects[i].deselect();
this.currentDragObjects = new Array();
this.lastSelectedDraggable = null;
},
hasSelection: function() {
return this.currentDragObjects.length > 0;
},
setStartDragFromElement: function( e, mouseDownElement ) {
this.origPos = RicoUtil.toDocumentPosition(mouseDownElement);
this.startx = e.screenX - this.origPos.x
this.starty = e.screenY - this.origPos.y
//this.startComponentX = e.layerX ? e.layerX : e.offsetX;
//this.startComponentY = e.layerY ? e.layerY : e.offsetY;
//this.adjustedForDraggableSize = false;
this.interestedInMotionEvents = this.hasSelection();
this._terminateEvent(e);
},
updateSelection: function( draggable, extendSelection ) {
if ( ! extendSelection )
this.clearSelection();
if ( draggable.isSelected() ) {
this.currentDragObjects.removeItem(draggable);
draggable.deselect();
if ( draggable == this.lastSelectedDraggable )
this.lastSelectedDraggable = null;
}
else {
this.currentDragObjects[ this.currentDragObjects.length ] = draggable;
draggable.select();
this.lastSelectedDraggable = draggable;
}
},
_mouseDownHandler: function(e) {
if ( arguments.length == 0 )
e = event;
// if not button 1 ignore it...
var nsEvent = e.which != undefined;
if ( (nsEvent && e.which != 1) || (!nsEvent && e.button != 1))
return;
var eventTarget = e.target ? e.target : e.srcElement;
var draggableObject = eventTarget.draggable;
var candidate = eventTarget;
while (draggableObject == null && candidate.parentNode) {
candidate = candidate.parentNode;
draggableObject = candidate.draggable;
}
if ( draggableObject == null )
return;
this.updateSelection( draggableObject, e.ctrlKey );
// clear the drop zones postion cache...
if ( this.hasSelection() )
for ( var i = 0 ; i < this.dropZones.length ; i++ )
this.dropZones[i].clearPositionCache();
this.setStartDragFromElement( e, draggableObject.getMouseDownHTMLElement() );
},
_mouseMoveHandler: function(e) {
var nsEvent = e.which != undefined;
if ( !this.interestedInMotionEvents ) {
//this._terminateEvent(e);
return;
}
if ( ! this.hasSelection() )
return;
if ( ! this.currentDragObjectVisible )
this._startDrag(e);
if ( !this.activatedDropZones )
this._activateRegisteredDropZones();
//if ( !this.adjustedForDraggableSize )
// this._adjustForDraggableSize(e);
this._updateDraggableLocation(e);
this._updateDropZonesHover(e);
this._terminateEvent(e);
},
_makeDraggableObjectVisible: function(e)
{
if ( !this.hasSelection() )
return;
var dragElement;
if ( this.currentDragObjects.length > 1 )
dragElement = this.currentDragObjects[0].getMultiObjectDragGUI(this.currentDragObjects);
else
dragElement = this.currentDragObjects[0].getSingleObjectDragGUI();
// go ahead and absolute position it...
if ( RicoUtil.getElementsComputedStyle(dragElement, "position") != "absolute" )
dragElement.style.position = "absolute";
// need to parent him into the document...
if ( dragElement.parentNode == null || dragElement.parentNode.nodeType == 11 )
document.body.appendChild(dragElement);
this.dragElement = dragElement;
this._updateDraggableLocation(e);
this.currentDragObjectVisible = true;
},
/**
_adjustForDraggableSize: function(e) {
var dragElementWidth = this.dragElement.offsetWidth;
var dragElementHeight = this.dragElement.offsetHeight;
if ( this.startComponentX > dragElementWidth )
this.startx -= this.startComponentX - dragElementWidth + 2;
if ( e.offsetY ) {
if ( this.startComponentY > dragElementHeight )
this.starty -= this.startComponentY - dragElementHeight + 2;
}
this.adjustedForDraggableSize = true;
},
**/
_leftOffset: function(e) {
return e.offsetX ? document.body.scrollLeft : 0
},
_topOffset: function(e) {
return e.offsetY ? document.body.scrollTop:0
},
_updateDraggableLocation: function(e) {
var dragObjectStyle = this.dragElement.style;
dragObjectStyle.left = (e.screenX + this._leftOffset(e) - this.startx) + "px"
dragObjectStyle.top = (e.screenY + this._topOffset(e) - this.starty) + "px";
},
_updateDropZonesHover: function(e) {
var n = this.dropZones.length;
for ( var i = 0 ; i < n ; i++ ) {
if ( ! this._mousePointInDropZone( e, this.dropZones[i] ) )
this.dropZones[i].hideHover();
}
for ( var i = 0 ; i < n ; i++ ) {
if ( this._mousePointInDropZone( e, this.dropZones[i] ) ) {
if ( this.dropZones[i].canAccept(this.currentDragObjects) )
this.dropZones[i].showHover();
}
}
},
_startDrag: function(e) {
for ( var i = 0 ; i < this.currentDragObjects.length ; i++ )
this.currentDragObjects[i].startDrag();
this._makeDraggableObjectVisible(e);
},
_mouseUpHandler: function(e) {
if ( ! this.hasSelection() )
return;
var nsEvent = e.which != undefined;
if ( (nsEvent && e.which != 1) || (!nsEvent && e.button != 1))
return;
this.interestedInMotionEvents = false;
if ( this.dragElement == null ) {
this._terminateEvent(e);
return;
}
if ( this._placeDraggableInDropZone(e) )
this._completeDropOperation(e);
else {
this._terminateEvent(e);
new Rico.Effect.Position( this.dragElement,
this.origPos.x,
this.origPos.y,
200,
20,
{ complete : this._doCancelDragProcessing.bind(this) } );
}
Event.stopObserving(document.body, "mousemove", this._mouseMove);
Event.stopObserving(document.body, "mouseup", this._mouseUp);
},
_retTrue: function () {
return true;
},
_completeDropOperation: function(e) {
if ( this.dragElement != this.currentDragObjects[0].getMouseDownHTMLElement() ) {
if ( this.dragElement.parentNode != null )
this.dragElement.parentNode.removeChild(this.dragElement);
}
this._deactivateRegisteredDropZones();
this._endDrag();
this.clearSelection();
this.dragElement = null;
this.currentDragObjectVisible = false;
this._terminateEvent(e);
},
_doCancelDragProcessing: function() {
this._cancelDrag();
if ( this.dragElement != this.currentDragObjects[0].getMouseDownHTMLElement() && this.dragElement)
if ( this.dragElement.parentNode != null )
this.dragElement.parentNode.removeChild(this.dragElement);
this._deactivateRegisteredDropZones();
this.dragElement = null;
this.currentDragObjectVisible = false;
},
_placeDraggableInDropZone: function(e) {
var foundDropZone = false;
var n = this.dropZones.length;
for ( var i = 0 ; i < n ; i++ ) {
if ( this._mousePointInDropZone( e, this.dropZones[i] ) ) {
if ( this.dropZones[i].canAccept(this.currentDragObjects) ) {
this.dropZones[i].hideHover();
this.dropZones[i].accept(this.currentDragObjects);
foundDropZone = true;
break;
}
}
}
return foundDropZone;
},
_cancelDrag: function() {
for ( var i = 0 ; i < this.currentDragObjects.length ; i++ )
this.currentDragObjects[i].cancelDrag();
},
_endDrag: function() {
for ( var i = 0 ; i < this.currentDragObjects.length ; i++ )
this.currentDragObjects[i].endDrag();
},
_mousePointInDropZone: function( e, dropZone ) {
var absoluteRect = dropZone.getAbsoluteRect();
return e.clientX > absoluteRect.left + this._leftOffset(e) &&
e.clientX < absoluteRect.right + this._leftOffset(e) &&
e.clientY > absoluteRect.top + this._topOffset(e) &&
e.clientY < absoluteRect.bottom + this._topOffset(e);
},
_addMouseDownHandler: function( aDraggable )
{
htmlElement = aDraggable.getMouseDownHTMLElement();
if ( htmlElement != null ) {
htmlElement.draggable = aDraggable;
Event.observe(htmlElement , "mousedown", this._onmousedown.bindAsEventListener(this));
Event.observe(htmlElement, "mousedown", this._mouseDown);
}
},
_activateRegisteredDropZones: function() {
var n = this.dropZones.length;
for ( var i = 0 ; i < n ; i++ ) {
var dropZone = this.dropZones[i];
if ( dropZone.canAccept(this.currentDragObjects) )
dropZone.activate();
}
this.activatedDropZones = true;
},
_deactivateRegisteredDropZones: function() {
var n = this.dropZones.length;
for ( var i = 0 ; i < n ; i++ )
this.dropZones[i].deactivate();
this.activatedDropZones = false;
},
_onmousedown: function () {
Event.observe(document.body, "mousemove", this._mouseMove);
Event.observe(document.body, "mouseup", this._mouseUp);
},
_terminateEvent: function(e) {
if ( e.stopPropagation != undefined )
e.stopPropagation();
else if ( e.cancelBubble != undefined )
e.cancelBubble = true;
if ( e.preventDefault != undefined )
e.preventDefault();
else
e.returnValue = false;
},
initializeEventHandlers: function() {
if ( typeof document.implementation != "undefined" &&
document.implementation.hasFeature("HTML", "1.0") &&
document.implementation.hasFeature("Events", "2.0") &&
document.implementation.hasFeature("CSS", "2.0") ) {
document.addEventListener("mouseup", this._mouseUpHandler.bindAsEventListener(this), false);
document.addEventListener("mousemove", this._mouseMoveHandler.bindAsEventListener(this), false);
}
else {
document.attachEvent( "onmouseup", this._mouseUpHandler.bindAsEventListener(this) );
document.attachEvent( "onmousemove", this._mouseMoveHandler.bindAsEventListener(this) );
}
}
}
var dndMgr = new Rico.DragAndDrop();
dndMgr.initializeEventHandlers();
//-------------------- ricoDraggable.js
Rico.Draggable = Class.create();
Rico.Draggable.prototype = {
initialize: function( type, htmlElement ) {
this.type = type;
this.htmlElement = $(htmlElement);
this.selected = false;
},
/**
* Returns the HTML element that should have a mouse down event
* added to it in order to initiate a drag operation
*
**/
getMouseDownHTMLElement: function() {
return this.htmlElement;
},
select: function() {
this.selected = true;
if ( this.showingSelected )
return;
var htmlElement = this.getMouseDownHTMLElement();
var color = Rico.Color.createColorFromBackground(htmlElement);
color.isBright() ? color.darken(0.033) : color.brighten(0.033);
this.saveBackground = RicoUtil.getElementsComputedStyle(htmlElement, "backgroundColor", "background-color");
htmlElement.style.backgroundColor = color.asHex();
this.showingSelected = true;
},
deselect: function() {
this.selected = false;
if ( !this.showingSelected )
return;
var htmlElement = this.getMouseDownHTMLElement();
htmlElement.style.backgroundColor = this.saveBackground;
this.showingSelected = false;
},
isSelected: function() {
return this.selected;
},
startDrag: function() {
},
cancelDrag: function() {
},
endDrag: function() {
},
getSingleObjectDragGUI: function() {
return this.htmlElement;
},
getMultiObjectDragGUI: function( draggables ) {
return this.htmlElement;
},
getDroppedGUI: function() {
return this.htmlElement;
},
toString: function() {
return this.type + ":" + this.htmlElement + ":";
}
}
//-------------------- ricoDropzone.js
Rico.Dropzone = Class.create();
Rico.Dropzone.prototype = {
initialize: function( htmlElement ) {
this.htmlElement = $(htmlElement);
this.absoluteRect = null;
},
getHTMLElement: function() {
return this.htmlElement;
},
clearPositionCache: function() {
this.absoluteRect = null;
},
getAbsoluteRect: function() {
if ( this.absoluteRect == null ) {
var htmlElement = this.getHTMLElement();
var pos = RicoUtil.toViewportPosition(htmlElement);
this.absoluteRect = {
top: pos.y,
left: pos.x,
bottom: pos.y + htmlElement.offsetHeight,
right: pos.x + htmlElement.offsetWidth
};
}
return this.absoluteRect;
},
activate: function() {
var htmlElement = this.getHTMLElement();
if (htmlElement == null || this.showingActive)
return;
this.showingActive = true;
this.saveBackgroundColor = htmlElement.style.backgroundColor;
var fallbackColor = "#ffea84";
var currentColor = Rico.Color.createColorFromBackground(htmlElement);
if ( currentColor == null )
htmlElement.style.backgroundColor = fallbackColor;
else {
currentColor.isBright() ? currentColor.darken(0.2) : currentColor.brighten(0.2);
htmlElement.style.backgroundColor = currentColor.asHex();
}
},
deactivate: function() {
var htmlElement = this.getHTMLElement();
if (htmlElement == null || !this.showingActive)
return;
htmlElement.style.backgroundColor = this.saveBackgroundColor;
this.showingActive = false;
this.saveBackgroundColor = null;
},
showHover: function() {
var htmlElement = this.getHTMLElement();
if ( htmlElement == null || this.showingHover )
return;
this.saveBorderWidth = htmlElement.style.borderWidth;
this.saveBorderStyle = htmlElement.style.borderStyle;
this.saveBorderColor = htmlElement.style.borderColor;
this.showingHover = true;
htmlElement.style.borderWidth = "1px";
htmlElement.style.borderStyle = "solid";
//htmlElement.style.borderColor = "#ff9900";
htmlElement.style.borderColor = "#ffff00";
},
hideHover: function() {
var htmlElement = this.getHTMLElement();
if ( htmlElement == null || !this.showingHover )
return;
htmlElement.style.borderWidth = this.saveBorderWidth;
htmlElement.style.borderStyle = this.saveBorderStyle;
htmlElement.style.borderColor = this.saveBorderColor;
this.showingHover = false;
},
canAccept: function(draggableObjects) {
return true;
},
accept: function(draggableObjects) {
var htmlElement = this.getHTMLElement();
if ( htmlElement == null )
return;
n = draggableObjects.length;
for ( var i = 0 ; i < n ; i++ )
{
var theGUI = draggableObjects[i].getDroppedGUI();
if ( RicoUtil.getElementsComputedStyle( theGUI, "position" ) == "absolute" )
{
theGUI.style.position = "static";
theGUI.style.top = "";
theGUI.style.top = "";
}
htmlElement.appendChild(theGUI);
}
}
}
//-------------------- ricoEffects.js
Rico.Effect = {};
Rico.Effect.SizeAndPosition = Class.create();
Rico.Effect.SizeAndPosition.prototype = {
initialize: function(element, x, y, w, h, duration, steps, options) {
this.element = $(element);
this.x = x;
this.y = y;
this.w = w;
this.h = h;
this.duration = duration;
this.steps = steps;
this.options = arguments[7] || {};
this.sizeAndPosition();
},
sizeAndPosition: function() {
if (this.isFinished()) {
if(this.options.complete) this.options.complete(this);
return;
}
if (this.timer)
clearTimeout(this.timer);
var stepDuration = Math.round(this.duration/this.steps) ;
// Get original values: x,y = top left corner; w,h = width height
var currentX = this.element.offsetLeft;
var currentY = this.element.offsetTop;
var currentW = this.element.offsetWidth;
var currentH = this.element.offsetHeight;
// If values not set, or zero, we do not modify them, and take original as final as well
this.x = (this.x) ? this.x : currentX;
this.y = (this.y) ? this.y : currentY;
this.w = (this.w) ? this.w : currentW;
this.h = (this.h) ? this.h : currentH;
// how much do we need to modify our values for each step?
var difX = this.steps > 0 ? (this.x - currentX)/this.steps : 0;
var difY = this.steps > 0 ? (this.y - currentY)/this.steps : 0;
var difW = this.steps > 0 ? (this.w - currentW)/this.steps : 0;
var difH = this.steps > 0 ? (this.h - currentH)/this.steps : 0;
this.moveBy(difX, difY);
this.resizeBy(difW, difH);
this.duration -= stepDuration;
this.steps--;
this.timer = setTimeout(this.sizeAndPosition.bind(this), stepDuration);
},
isFinished: function() {
return this.steps <= 0;
},
moveBy: function( difX, difY ) {
var currentLeft = this.element.offsetLeft;
var currentTop = this.element.offsetTop;
var intDifX = parseInt(difX);
var intDifY = parseInt(difY);
var style = this.element.style;
if ( intDifX != 0 )
style.left = (currentLeft + intDifX) + "px";
if ( intDifY != 0 )
style.top = (currentTop + intDifY) + "px";
},
resizeBy: function( difW, difH ) {
var currentWidth = this.element.offsetWidth;
var currentHeight = this.element.offsetHeight;
var intDifW = parseInt(difW);
var intDifH = parseInt(difH);
var style = this.element.style;
if ( intDifW != 0 )
style.width = (currentWidth + intDifW) + "px";
if ( intDifH != 0 )
style.height = (currentHeight + intDifH) + "px";
}
}
Rico.Effect.Size = Class.create();
Rico.Effect.Size.prototype = {
initialize: function(element, w, h, duration, steps, options) {
new Rico.Effect.SizeAndPosition(element, null, null, w, h, duration, steps, options);
}
}
Rico.Effect.Position = Class.create();
Rico.Effect.Position.prototype = {
initialize: function(element, x, y, duration, steps, options) {
new Rico.Effect.SizeAndPosition(element, x, y, null, null, duration, steps, options);
}
}
Rico.Effect.Round = Class.create();
Rico.Effect.Round.prototype = {
initialize: function(tagName, className, options) {
var elements = document.getElementsByTagAndClassName(tagName,className);
for ( var i = 0 ; i < elements.length ; i++ )
Rico.Corner.round( elements[i], options );
}
};
Rico.Effect.FadeTo = Class.create();
Rico.Effect.FadeTo.prototype = {
initialize: function( element, opacity, duration, steps, options) {
this.element = $(element);
this.opacity = opacity;
this.duration = duration;
this.steps = steps;
this.options = arguments[4] || {};
this.fadeTo();
},
fadeTo: function() {
if (this.isFinished()) {
if(this.options.complete) this.options.complete(this);
return;
}
if (this.timer)
clearTimeout(this.timer);
var stepDuration = Math.round(this.duration/this.steps) ;
var currentOpacity = this.getElementOpacity();
var delta = this.steps > 0 ? (this.opacity - currentOpacity)/this.steps : 0;
this.changeOpacityBy(delta);
this.duration -= stepDuration;
this.steps--;
this.timer = setTimeout(this.fadeTo.bind(this), stepDuration);
},
changeOpacityBy: function(v) {
var currentOpacity = this.getElementOpacity();
var newOpacity = Math.max(0, Math.min(currentOpacity+v, 1));
this.element.ricoOpacity = newOpacity;
this.element.style.filter = "alpha(opacity:"+Math.round(newOpacity*100)+")";
this.element.style.opacity = newOpacity; /*//*/;
},
isFinished: function() {
return this.steps <= 0;
},
getElementOpacity: function() {
if ( this.element.ricoOpacity == undefined ) {
var opacity = RicoUtil.getElementsComputedStyle(this.element, 'opacity');
this.element.ricoOpacity = opacity != undefined ? opacity : 1.0;
}
return parseFloat(this.element.ricoOpacity);
}
}
Rico.Effect.AccordionSize = Class.create();
Rico.Effect.AccordionSize.prototype = {
initialize: function(e1, e2, start, end, duration, steps, options) {
this.e1 = $(e1);
this.e2 = $(e2);
this.start = start;
this.end = end;
this.duration = duration;
this.steps = steps;
this.options = arguments[6] || {};
this.accordionSize();
},
accordionSize: function() {
if (this.isFinished()) {
// just in case there are round errors or such...
this.e1.style.height = this.start + "px";
this.e2.style.height = this.end + "px";
if(this.options.complete)
this.options.complete(this);
return;
}
if (this.timer)
clearTimeout(this.timer);
var stepDuration = Math.round(this.duration/this.steps) ;
var diff = this.steps > 0 ? (parseInt(this.e1.offsetHeight) - this.start)/this.steps : 0;
this.resizeBy(diff);
this.duration -= stepDuration;
this.steps--;
this.timer = setTimeout(this.accordionSize.bind(this), stepDuration);
},
isFinished: function() {
return this.steps <= 0;
},
resizeBy: function(diff) {
var h1Height = this.e1.offsetHeight;
var h2Height = this.e2.offsetHeight;
var intDiff = parseInt(diff);
if ( diff != 0 ) {
this.e1.style.height = (h1Height - intDiff) + "px";
this.e2.style.height = (h2Height + intDiff) + "px";
}
}
};
//-------------------- ricoLiveGrid.js
// Rico.LiveGridMetaData -----------------------------------------------------
Rico.LiveGridMetaData = Class.create();
Rico.LiveGridMetaData.prototype = {
initialize: function( pageSize, totalRows, columnCount, options ) {
this.pageSize = pageSize;
this.totalRows = totalRows;
this.setOptions(options);
this.ArrowHeight = 16;
this.columnCount = columnCount;
},
setOptions: function(options) {
this.options = {
largeBufferSize : 7.0, // 7 pages
nearLimitFactor : 0.2 // 20% of buffer
};
Object.extend(this.options, options || {});
},
getPageSize: function() {
return this.pageSize;
},
getTotalRows: function() {
return this.totalRows;
},
setTotalRows: function(n) {
this.totalRows = n;
},
getLargeBufferSize: function() {
return parseInt(this.options.largeBufferSize * this.pageSize);
},
getLimitTolerance: function() {
return parseInt(this.getLargeBufferSize() * this.options.nearLimitFactor);
}
};
// Rico.LiveGridScroller -----------------------------------------------------
Rico.LiveGridScroller = Class.create();
Rico.LiveGridScroller.prototype = {
initialize: function(liveGrid, viewPort) {
this.isIE = navigator.userAgent.toLowerCase().indexOf("msie") >= 0;
this.liveGrid = liveGrid;
this.metaData = liveGrid.metaData;
this.createScrollBar();
this.scrollTimeout = null;
this.lastScrollPos = 0;
this.viewPort = viewPort;
this.rows = new Array();
},
isUnPlugged: function() {
return this.scrollerDiv.onscroll == null;
},
plugin: function() {
this.scrollerDiv.onscroll = this.handleScroll.bindAsEventListener(this);
},
unplug: function() {
this.scrollerDiv.onscroll = null;
},
sizeIEHeaderHack: function() {
if ( !this.isIE ) return;
var headerTable = $(this.liveGrid.tableId + "_header");
if ( headerTable )
headerTable.rows[0].cells[0].style.width =
(headerTable.rows[0].cells[0].offsetWidth + 1) + "px";
},
createScrollBar: function() {
var visibleHeight = this.liveGrid.viewPort.visibleHeight();
// create the outer div...
this.scrollerDiv = document.createElement("div");
var scrollerStyle = this.scrollerDiv.style;
scrollerStyle.borderRight = this.liveGrid.options.scrollerBorderRight;
scrollerStyle.position = "relative";
scrollerStyle.left = this.isIE ? "-6px" : "-3px";
scrollerStyle.width = "19px";
scrollerStyle.height = visibleHeight + "px";
scrollerStyle.overflow = "auto";
// create the inner div...
this.heightDiv = document.createElement("div");
this.heightDiv.style.width = "1px";
this.heightDiv.style.height = parseInt(visibleHeight *
this.metaData.getTotalRows()/this.metaData.getPageSize()) + "px" ;
this.scrollerDiv.appendChild(this.heightDiv);
this.scrollerDiv.onscroll = this.handleScroll.bindAsEventListener(this);
var table = this.liveGrid.table;
table.parentNode.parentNode.insertBefore( this.scrollerDiv, table.parentNode.nextSibling );
var eventName = this.isIE ? "mousewheel" : "DOMMouseScroll";
Event.observe(table, eventName,
function(evt) {
if (evt.wheelDelta>=0 || evt.detail < 0) //wheel-up
this.scrollerDiv.scrollTop -= (2*this.viewPort.rowHeight);
else
this.scrollerDiv.scrollTop += (2*this.viewPort.rowHeight);
this.handleScroll(false);
}.bindAsEventListener(this),
false);
},
updateSize: function() {
var table = this.liveGrid.table;
var visibleHeight = this.viewPort.visibleHeight();
this.heightDiv.style.height = parseInt(visibleHeight *
this.metaData.getTotalRows()/this.metaData.getPageSize()) + "px";
},
rowToPixel: function(rowOffset) {
return (rowOffset / this.metaData.getTotalRows()) * this.heightDiv.offsetHeight
},
moveScroll: function(rowOffset) {
this.scrollerDiv.scrollTop = this.rowToPixel(rowOffset);
if ( this.metaData.options.onscroll )
this.metaData.options.onscroll( this.liveGrid, rowOffset );
},
handleScroll: function() {
if ( this.scrollTimeout )
clearTimeout( this.scrollTimeout );
var scrollDiff = this.lastScrollPos-this.scrollerDiv.scrollTop;
if (scrollDiff != 0.00) {
var r = this.scrollerDiv.scrollTop % this.viewPort.rowHeight;
if (r != 0) {
this.unplug();
if ( scrollDiff < 0 ) {
this.scrollerDiv.scrollTop += (this.viewPort.rowHeight-r);
} else {
this.scrollerDiv.scrollTop -= r;
}
this.plugin();
}
}
var contentOffset = parseInt(this.scrollerDiv.scrollTop / this.viewPort.rowHeight);
this.liveGrid.requestContentRefresh(contentOffset);
this.viewPort.scrollTo(this.scrollerDiv.scrollTop);
if ( this.metaData.options.onscroll )
this.metaData.options.onscroll( this.liveGrid, contentOffset );
this.scrollTimeout = setTimeout(this.scrollIdle.bind(this), 1200 );
this.lastScrollPos = this.scrollerDiv.scrollTop;
},
scrollIdle: function() {
if ( this.metaData.options.onscrollidle )
this.metaData.options.onscrollidle();
}
};
// Rico.LiveGridBuffer -----------------------------------------------------
Rico.LiveGridBuffer = Class.create();
Rico.LiveGridBuffer.prototype = {
initialize: function(metaData, viewPort) {
this.startPos = 0;
this.size = 0;
this.metaData = metaData;
this.rows = new Array();
this.updateInProgress = false;
this.viewPort = viewPort;
this.maxBufferSize = metaData.getLargeBufferSize() * 2;
this.maxFetchSize = metaData.getLargeBufferSize();
this.lastOffset = 0;
},
getBlankRow: function() {
if (!this.blankRow ) {
this.blankRow = new Array();
for ( var i=0; i < this.metaData.columnCount ; i++ )
this.blankRow[i] = " ";
}
return this.blankRow;
},
loadRows: function(ajaxResponse) {
var rowsElement = ajaxResponse.getElementsByTagName('rows')[0];
this.updateUI = rowsElement.getAttribute("update_ui") == "true"
var newRows = new Array()
var trs = rowsElement.getElementsByTagName("tr");
for ( var i=0 ; i < trs.length; i++ ) {
var row = newRows[i] = new Array();
var cells = trs[i].getElementsByTagName("td");
for ( var j=0; j < cells.length ; j++ ) {
var cell = cells[j];
var convertSpaces = cell.getAttribute("convert_spaces") == "true";
var cellContent = RicoUtil.getContentAsString(cell);
row[j] = convertSpaces ? this.convertSpaces(cellContent) : cellContent;
if (!row[j])
row[j] = ' ';
}
}
return newRows;
},
update: function(ajaxResponse, start) {
var newRows = this.loadRows(ajaxResponse);
if (this.rows.length == 0) { // initial load
this.rows = newRows;
this.size = this.rows.length;
this.startPos = start;
return;
}
if (start > this.startPos) { //appending
if (this.startPos + this.rows.length < start) {
this.rows = newRows;
this.startPos = start;//
} else {
this.rows = this.rows.concat( newRows.slice(0, newRows.length));
if (this.rows.length > this.maxBufferSize) {
var fullSize = this.rows.length;
this.rows = this.rows.slice(this.rows.length - this.maxBufferSize, this.rows.length)
this.startPos = this.startPos + (fullSize - this.rows.length);
}
}
} else { //prepending
if (start + newRows.length < this.startPos) {
this.rows = newRows;
} else {
this.rows = newRows.slice(0, this.startPos).concat(this.rows);
if (this.rows.length > this.maxBufferSize)
this.rows = this.rows.slice(0, this.maxBufferSize)
}
this.startPos = start;
}
this.size = this.rows.length;
},
clear: function() {
this.rows = new Array();
this.startPos = 0;
this.size = 0;
},
isOverlapping: function(start, size) {
return ((start < this.endPos()) && (this.startPos < start + size)) || (this.endPos() == 0)
},
isInRange: function(position) {
return (position >= this.startPos) && (position + this.metaData.getPageSize() <= this.endPos());
//&& this.size() != 0;
},
isNearingTopLimit: function(position) {
return position - this.startPos < this.metaData.getLimitTolerance();
},
endPos: function() {
return this.startPos + this.rows.length;
},
isNearingBottomLimit: function(position) {
return this.endPos() - (position + this.metaData.getPageSize()) < this.metaData.getLimitTolerance();
},
isAtTop: function() {
return this.startPos == 0;
},
isAtBottom: function() {
return this.endPos() == this.metaData.getTotalRows();
},
isNearingLimit: function(position) {
return ( !this.isAtTop() && this.isNearingTopLimit(position)) ||
( !this.isAtBottom() && this.isNearingBottomLimit(position) )
},
getFetchSize: function(offset) {
var adjustedOffset = this.getFetchOffset(offset);
var adjustedSize = 0;
if (adjustedOffset >= this.startPos) { //apending
var endFetchOffset = this.maxFetchSize + adjustedOffset;
if (endFetchOffset > this.metaData.totalRows)
endFetchOffset = this.metaData.totalRows;
adjustedSize = endFetchOffset - adjustedOffset;
if(adjustedOffset == 0 && adjustedSize < this.maxFetchSize){
adjustedSize = this.maxFetchSize;
}
} else {//prepending
var adjustedSize = this.startPos - adjustedOffset;
if (adjustedSize > this.maxFetchSize)
adjustedSize = this.maxFetchSize;
}
return adjustedSize;
},
getFetchOffset: function(offset) {
var adjustedOffset = offset;
if (offset > this.startPos) //apending
adjustedOffset = (offset > this.endPos()) ? offset : this.endPos();
else { //prepending
if (offset + this.maxFetchSize >= this.startPos) {
var adjustedOffset = this.startPos - this.maxFetchSize;
if (adjustedOffset < 0)
adjustedOffset = 0;
}
}
this.lastOffset = adjustedOffset;
return adjustedOffset;
},
getRows: function(start, count) {
var begPos = start - this.startPos
var endPos = begPos + count
// er? need more data...
if ( endPos > this.size )
endPos = this.size
var results = new Array()
var index = 0;
for ( var i=begPos ; i < endPos; i++ ) {
results[index++] = this.rows[i]
}
return results
},
convertSpaces: function(s) {
return s.split(" ").join(" ");
}
};
//Rico.GridViewPort --------------------------------------------------
Rico.GridViewPort = Class.create();
Rico.GridViewPort.prototype = {
initialize: function(table, rowHeight, visibleRows, buffer, liveGrid) {
this.lastDisplayedStartPos = 0;
this.div = table.parentNode;
this.table = table
this.rowHeight = rowHeight;
this.div.style.height = (this.rowHeight * visibleRows) + "px";
this.div.style.overflow = "hidden";
this.buffer = buffer;
this.liveGrid = liveGrid;
this.visibleRows = visibleRows + 1;
this.lastPixelOffset = 0;
this.startPos = 0;
},
populateRow: function(htmlRow, row) {
for (var j=0; j < row.length; j++) {
htmlRow.cells[j].innerHTML = row[j]
}
},
bufferChanged: function() {
this.refreshContents( parseInt(this.lastPixelOffset / this.rowHeight));
},
clearRows: function() {
if (!this.isBlank) {
this.liveGrid.table.className = this.liveGrid.options.loadingClass;
for (var i=0; i < this.visibleRows; i++)
this.populateRow(this.table.rows[i], this.buffer.getBlankRow());
this.isBlank = true;
}
},
clearContents: function() {
this.clearRows();
this.scrollTo(0);
this.startPos = 0;
this.lastStartPos = -1;
},
refreshContents: function(startPos) {
if (startPos == this.lastRowPos && !this.isPartialBlank && !this.isBlank) {
return;
}
if ((startPos + this.visibleRows < this.buffer.startPos)
|| (this.buffer.startPos + this.buffer.size < startPos)
|| (this.buffer.size == 0)) {
this.clearRows();
return;
}
this.isBlank = false;
var viewPrecedesBuffer = this.buffer.startPos > startPos
var contentStartPos = viewPrecedesBuffer ? this.buffer.startPos: startPos;
var contentEndPos = (this.buffer.startPos + this.buffer.size < startPos + this.visibleRows)
? this.buffer.startPos + this.buffer.size
: startPos + this.visibleRows;
var rowSize = contentEndPos - contentStartPos;
var rows = this.buffer.getRows(contentStartPos, rowSize );
var blankSize = this.visibleRows - rowSize;
var blankOffset = viewPrecedesBuffer ? 0: rowSize;
var contentOffset = viewPrecedesBuffer ? blankSize: 0;
for (var i=0; i < rows.length; i++) {//initialize what we have
this.populateRow(this.table.rows[i + contentOffset], rows[i]);
}
for (var i=0; i < blankSize; i++) {// blank out the rest
this.populateRow(this.table.rows[i + blankOffset], this.buffer.getBlankRow());
}
this.isPartialBlank = blankSize > 0;
this.lastRowPos = startPos;
this.liveGrid.table.className = this.liveGrid.options.tableClass;
// Check if user has set a onRefreshComplete function
var onRefreshComplete = this.liveGrid.options.onRefreshComplete;
if (onRefreshComplete != null)
onRefreshComplete();
},
scrollTo: function(pixelOffset) {
if (this.lastPixelOffset == pixelOffset)
return;
this.refreshContents(parseInt(pixelOffset / this.rowHeight))
this.div.scrollTop = pixelOffset % this.rowHeight
this.lastPixelOffset = pixelOffset;
},
visibleHeight: function() {
return parseInt(RicoUtil.getElementsComputedStyle(this.div, 'height'));
}
};
Rico.LiveGridRequest = Class.create();
Rico.LiveGridRequest.prototype = {
initialize: function( requestOffset, options ) {
this.requestOffset = requestOffset;
}
};
// Rico.LiveGrid -----------------------------------------------------
Rico.LiveGrid = Class.create();
Rico.LiveGrid.prototype = {
initialize: function( tableId, visibleRows, totalRows, url, options, ajaxOptions ) {
this.options = {
tableClass: $(tableId).className,
loadingClass: $(tableId).className,
scrollerBorderRight: '1px solid #ababab',
bufferTimeout: 20000,
sortAscendImg: 'images/sort_asc.gif',
sortDescendImg: 'images/sort_desc.gif',
sortImageWidth: 9,
sortImageHeight: 5,
ajaxSortURLParms: [],
onRefreshComplete: null,
requestParameters: null,
inlineStyles: true
};
Object.extend(this.options, options || {});
this.ajaxOptions = {parameters: null};
Object.extend(this.ajaxOptions, ajaxOptions || {});
this.tableId = tableId;
this.table = $(tableId);
this.addLiveGridHtml();
var columnCount = this.table.rows[0].cells.length;
this.metaData = new Rico.LiveGridMetaData(visibleRows, totalRows, columnCount, options);
this.buffer = new Rico.LiveGridBuffer(this.metaData);
var rowCount = this.table.rows.length;
this.viewPort = new Rico.GridViewPort(this.table,
this.table.offsetHeight/rowCount,
visibleRows,
this.buffer, this);
this.scroller = new Rico.LiveGridScroller(this,this.viewPort);
this.options.sortHandler = this.sortHandler.bind(this);
if ( $(tableId + '_header') )
this.sort = new Rico.LiveGridSort(tableId + '_header', this.options)
this.processingRequest = null;
this.unprocessedRequest = null;
this.initAjax(url);
if ( this.options.prefetchBuffer || this.options.prefetchOffset > 0) {
var offset = 0;
if (this.options.offset ) {
offset = this.options.offset;
this.scroller.moveScroll(offset);
this.viewPort.scrollTo(this.scroller.rowToPixel(offset));
}
if (this.options.sortCol) {
this.sortCol = options.sortCol;
this.sortDir = options.sortDir;
}
this.requestContentRefresh(offset);
}
},
addLiveGridHtml: function() {
// Check to see if need to create a header table.
if (this.table.getElementsByTagName("thead").length > 0){
// Create Table this.tableId+'_header'
var tableHeader = this.table.cloneNode(true);
tableHeader.setAttribute('id', this.tableId+'_header');
tableHeader.setAttribute('class', this.table.className+'_header');
// Clean up and insert
for( var i = 0; i < tableHeader.tBodies.length; i++ )
tableHeader.removeChild(tableHeader.tBodies[i]);
this.table.deleteTHead();
this.table.parentNode.insertBefore(tableHeader,this.table);
}
new Insertion.Before(this.table, "<div id='"+this.tableId+"_container'></div>");
this.table.previousSibling.appendChild(this.table);
new Insertion.Before(this.table,"<div id='"+this.tableId+"_viewport' style='float:left;'></div>");
this.table.previousSibling.appendChild(this.table);
},
resetContents: function() {
this.scroller.moveScroll(0);
this.buffer.clear();
this.viewPort.clearContents();
},
sortHandler: function(column) {
if(!column) return ;
this.sortCol = column.name;
this.sortDir = column.currentSort;
this.resetContents();
this.requestContentRefresh(0)
},
adjustRowSize: function() {
},
setTotalRows: function( newTotalRows ) {
this.resetContents();
this.metaData.setTotalRows(newTotalRows);
this.scroller.updateSize();
},
initAjax: function(url) {
ajaxEngine.registerRequest( this.tableId + '_request', url );
ajaxEngine.registerAjaxObject( this.tableId + '_updater', this );
},
invokeAjax: function() {
},
handleTimedOut: function() {
//server did not respond in 4 seconds... assume that there could have been
//an error or something, and allow requests to be processed again...
this.processingRequest = null;
this.processQueuedRequest();
},
fetchBuffer: function(offset) {
if ( this.buffer.isInRange(offset) &&
!this.buffer.isNearingLimit(offset)) {
return;
}
if (this.processingRequest) {
this.unprocessedRequest = new Rico.LiveGridRequest(offset);
return;
}
var bufferStartPos = this.buffer.getFetchOffset(offset);
this.processingRequest = new Rico.LiveGridRequest(offset);
this.processingRequest.bufferOffset = bufferStartPos;
var fetchSize = this.buffer.getFetchSize(offset);
var partialLoaded = false;
var queryString
if (this.options.requestParameters)
queryString = this._createQueryString(this.options.requestParameters, 0);
queryString = (queryString == null) ? '' : queryString+'&';
queryString = queryString+'id='+this.tableId+'&page_size='+fetchSize+'&offset='+bufferStartPos;
if (this.sortCol)
queryString = queryString+'&sort_col='+escape(this.sortCol)+'&sort_dir='+this.sortDir;
this.ajaxOptions.parameters = queryString;
ajaxEngine.sendRequest( this.tableId + '_request', this.ajaxOptions );
this.timeoutHandler = setTimeout( this.handleTimedOut.bind(this), this.options.bufferTimeout);
},
setRequestParams: function() {
this.options.requestParameters = [];
for ( var i=0 ; i < arguments.length ; i++ )
this.options.requestParameters[i] = arguments[i];
},
requestContentRefresh: function(contentOffset) {
this.fetchBuffer(contentOffset);
},
ajaxUpdate: function(ajaxResponse) {
try {
clearTimeout( this.timeoutHandler );
this.buffer.update(ajaxResponse,this.processingRequest.bufferOffset);
this.viewPort.bufferChanged();
}
catch(err) {}
finally {this.processingRequest = null; }
this.processQueuedRequest();
},
_createQueryString: function( theArgs, offset ) {
var queryString = ""
if (!theArgs)
return queryString;
for ( var i = offset ; i < theArgs.length ; i++ ) {
if ( i != offset )
queryString += "&";
var anArg = theArgs[i];
if ( anArg.name != undefined && anArg.value != undefined ) {
queryString += anArg.name + "=" + escape(anArg.value);
}
else {
var ePos = anArg.indexOf('=');
var argName = anArg.substring( 0, ePos );
var argValue = anArg.substring( ePos + 1 );
queryString += argName + "=" + escape(argValue);
}
}
return queryString;
},
processQueuedRequest: function() {
if (this.unprocessedRequest != null) {
this.requestContentRefresh(this.unprocessedRequest.requestOffset);
this.unprocessedRequest = null
}
}
};
//-------------------- ricoLiveGridSort.js
Rico.LiveGridSort = Class.create();
Rico.LiveGridSort.prototype = {
initialize: function(headerTableId, options) {
this.headerTableId = headerTableId;
this.headerTable = $(headerTableId);
this.options = options;
this.setOptions();
this.applySortBehavior();
if ( this.options.sortCol ) {
this.setSortUI( this.options.sortCol, this.options.sortDir );
}
},
setSortUI: function( columnName, sortDirection ) {
var cols = this.options.columns;
for ( var i = 0 ; i < cols.length ; i++ ) {
if ( cols[i].name == columnName ) {
this.setColumnSort(i, sortDirection);
break;
}
}
},
setOptions: function() {
// preload the images...
new Image().src = this.options.sortAscendImg;
new Image().src = this.options.sortDescendImg;
this.sort = this.options.sortHandler;
if ( !this.options.columns )
this.options.columns = this.introspectForColumnInfo();
else {
// allow client to pass { columns: [ ["a", true], ["b", false] ] }
// and convert to an array of Rico.TableColumn objs...
this.options.columns = this.convertToTableColumns(this.options.columns);
}
},
applySortBehavior: function() {
var headerRow = this.headerTable.rows[0];
var headerCells = headerRow.cells;
for ( var i = 0 ; i < headerCells.length ; i++ ) {
this.addSortBehaviorToColumn( i, headerCells[i] );
}
},
addSortBehaviorToColumn: function( n, cell ) {
if ( this.options.columns[n].isSortable() ) {
cell.id = this.headerTableId + '_' + n;
cell.style.cursor = 'pointer';
cell.onclick = this.headerCellClicked.bindAsEventListener(this);
cell.innerHTML = cell.innerHTML + '<span id="' + this.headerTableId + '_img_' + n + '">'
+ ' </span>';
}
},
// event handler....
headerCellClicked: function(evt) {
var eventTarget = evt.target ? evt.target : evt.srcElement;
var cellId = eventTarget.id;
var columnNumber = parseInt(cellId.substring( cellId.lastIndexOf('_') + 1 ));
var sortedColumnIndex = this.getSortedColumnIndex();
if ( sortedColumnIndex != -1 ) {
if ( sortedColumnIndex != columnNumber ) {
this.removeColumnSort(sortedColumnIndex);
this.setColumnSort(columnNumber, Rico.TableColumn.SORT_ASC);
}
else
this.toggleColumnSort(sortedColumnIndex);
}
else
this.setColumnSort(columnNumber, Rico.TableColumn.SORT_ASC);
if (this.options.sortHandler) {
this.options.sortHandler(this.options.columns[columnNumber]);
}
},
removeColumnSort: function(n) {
this.options.columns[n].setUnsorted();
this.setSortImage(n);
},
setColumnSort: function(n, direction) {
if(isNaN(n)) return ;
this.options.columns[n].setSorted(direction);
this.setSortImage(n);
},
toggleColumnSort: function(n) {
this.options.columns[n].toggleSort();
this.setSortImage(n);
},
setSortImage: function(n) {
var sortDirection = this.options.columns[n].getSortDirection();
var sortImageSpan = $( this.headerTableId + '_img_' + n );
if ( sortDirection == Rico.TableColumn.UNSORTED )
sortImageSpan.innerHTML = ' ';
else if ( sortDirection == Rico.TableColumn.SORT_ASC )
sortImageSpan.innerHTML = ' <img width="' + this.options.sortImageWidth + '" ' +
'height="'+ this.options.sortImageHeight + '" ' +
'src="' + this.options.sortAscendImg + '"/>';
else if ( sortDirection == Rico.TableColumn.SORT_DESC )
sortImageSpan.innerHTML = ' <img width="' + this.options.sortImageWidth + '" ' +
'height="'+ this.options.sortImageHeight + '" ' +
'src="' + this.options.sortDescendImg + '"/>';
},
getSortedColumnIndex: function() {
var cols = this.options.columns;
for ( var i = 0 ; i < cols.length ; i++ ) {
if ( cols[i].isSorted() )
return i;
}
return -1;
},
introspectForColumnInfo: function() {
var columns = new Array();
var headerRow = this.headerTable.rows[0];
var headerCells = headerRow.cells;
for ( var i = 0 ; i < headerCells.length ; i++ )
columns.push( new Rico.TableColumn( this.deriveColumnNameFromCell(headerCells[i],i), true ) );
return columns;
},
convertToTableColumns: function(cols) {
var columns = new Array();
for ( var i = 0 ; i < cols.length ; i++ )
columns.push( new Rico.TableColumn( cols[i][0], cols[i][1] ) );
return columns;
},
deriveColumnNameFromCell: function(cell,columnNumber) {
var cellContent = cell.innerText != undefined ? cell.innerText : cell.textContent;
return cellContent ? cellContent.toLowerCase().split(' ').join('_') : "col_" + columnNumber;
}
};
Rico.TableColumn = Class.create();
Rico.TableColumn.UNSORTED = 0;
Rico.TableColumn.SORT_ASC = "ASC";
Rico.TableColumn.SORT_DESC = "DESC";
Rico.TableColumn.prototype = {
initialize: function(name, sortable) {
this.name = name;
this.sortable = sortable;
this.currentSort = Rico.TableColumn.UNSORTED;
},
isSortable: function() {
return this.sortable;
},
isSorted: function() {
return this.currentSort != Rico.TableColumn.UNSORTED;
},
getSortDirection: function() {
return this.currentSort;
},
toggleSort: function() {
if ( this.currentSort == Rico.TableColumn.UNSORTED || this.currentSort == Rico.TableColumn.SORT_DESC )
this.currentSort = Rico.TableColumn.SORT_ASC;
else if ( this.currentSort == Rico.TableColumn.SORT_ASC )
this.currentSort = Rico.TableColumn.SORT_DESC;
},
setUnsorted: function(direction) {
this.setSorted(Rico.TableColumn.UNSORTED);
},
setSorted: function(direction) {
// direction must by one of Rico.TableColumn.UNSORTED, .SORT_ASC, or .SORT_DESC...
this.currentSort = direction;
}
};
//-------------------- ricoUtil.js
var RicoUtil = {
getElementsComputedStyle: function ( htmlElement, cssProperty, mozillaEquivalentCSS) {
if ( arguments.length == 2 )
mozillaEquivalentCSS = cssProperty;
var el = $(htmlElement);
if ( el.currentStyle )
return el.currentStyle[cssProperty];
else
return document.defaultView.getComputedStyle(el, null).getPropertyValue(mozillaEquivalentCSS);
},
createXmlDocument : function() {
if (document.implementation && document.implementation.createDocument) {
var doc = document.implementation.createDocument("", "", null);
if (doc.readyState == null) {
doc.readyState = 1;
doc.addEventListener("load", function () {
doc.readyState = 4;
if (typeof doc.onreadystatechange == "function")
doc.onreadystatechange();
}, false);
}
return doc;
}
if (window.ActiveXObject)
return Try.these(
function() { return new ActiveXObject('MSXML2.DomDocument') },
function() { return new ActiveXObject('Microsoft.DomDocument')},
function() { return new ActiveXObject('MSXML.DomDocument') },
function() { return new ActiveXObject('MSXML3.DomDocument') }
) || false;
return null;
},
getContentAsString: function( parentNode ) {
return parentNode.xml != undefined ?
this._getContentAsStringIE(parentNode) :
this._getContentAsStringMozilla(parentNode);
},
_getContentAsStringIE: function(parentNode) {
var contentStr = "";
for ( var i = 0 ; i < parentNode.childNodes.length ; i++ ) {
var n = parentNode.childNodes[i];
if (n.nodeType == 4) {
contentStr += n.nodeValue;
}
else {
contentStr += n.xml;
}
}
return contentStr;
},
_getContentAsStringMozilla: function(parentNode) {
var xmlSerializer = new XMLSerializer();
var contentStr = "";
for ( var i = 0 ; i < parentNode.childNodes.length ; i++ ) {
var n = parentNode.childNodes[i];
if (n.nodeType == 4) { // CDATA node
contentStr += n.nodeValue;
}
else {
contentStr += xmlSerializer.serializeToString(n);
}
}
return contentStr;
},
toViewportPosition: function(element) {
return this._toAbsolute(element,true);
},
toDocumentPosition: function(element) {
return this._toAbsolute(element,false);
},
/**
* Compute the elements position in terms of the window viewport
* so that it can be compared to the position of the mouse (dnd)
* This is additions of all the offsetTop,offsetLeft values up the
* offsetParent hierarchy, ...taking into account any scrollTop,
* scrollLeft values along the way...
*
* IE has a bug reporting a correct offsetLeft of elements within a
* a relatively positioned parent!!!
**/
_toAbsolute: function(element,accountForDocScroll) {
if ( navigator.userAgent.toLowerCase().indexOf("msie") == -1 )
return this._toAbsoluteMozilla(element,accountForDocScroll);
var x = 0;
var y = 0;
var parent = element;
while ( parent ) {
var borderXOffset = 0;
var borderYOffset = 0;
if ( parent != element ) {
var borderXOffset = parseInt(this.getElementsComputedStyle(parent, "borderLeftWidth" ));
var borderYOffset = parseInt(this.getElementsComputedStyle(parent, "borderTopWidth" ));
borderXOffset = isNaN(borderXOffset) ? 0 : borderXOffset;
borderYOffset = isNaN(borderYOffset) ? 0 : borderYOffset;
}
x += parent.offsetLeft - parent.scrollLeft + borderXOffset;
y += parent.offsetTop - parent.scrollTop + borderYOffset;
parent = parent.offsetParent;
}
if ( accountForDocScroll ) {
x -= this.docScrollLeft();
y -= this.docScrollTop();
}
return { x:x, y:y };
},
/**
* Mozilla did not report all of the parents up the hierarchy via the
* offsetParent property that IE did. So for the calculation of the
* offsets we use the offsetParent property, but for the calculation of
* the scrollTop/scrollLeft adjustments we navigate up via the parentNode
* property instead so as to get the scroll offsets...
*
**/
_toAbsoluteMozilla: function(element,accountForDocScroll) {
var x = 0;
var y = 0;
var parent = element;
while ( parent ) {
x += parent.offsetLeft;
y += parent.offsetTop;
parent = parent.offsetParent;
}
parent = element;
while ( parent &&
parent != document.body &&
parent != document.documentElement ) {
if ( parent.scrollLeft )
x -= parent.scrollLeft;
if ( parent.scrollTop )
y -= parent.scrollTop;
parent = parent.parentNode;
}
if ( accountForDocScroll ) {
x -= this.docScrollLeft();
y -= this.docScrollTop();
}
return { x:x, y:y };
},
docScrollLeft: function() {
if ( window.pageXOffset )
return window.pageXOffset;
else if ( document.documentElement && document.documentElement.scrollLeft )
return document.documentElement.scrollLeft;
else if ( document.body )
return document.body.scrollLeft;
else
return 0;
},
docScrollTop: function() {
if ( window.pageYOffset )
return window.pageYOffset;
else if ( document.documentElement && document.documentElement.scrollTop )
return document.documentElement.scrollTop;
else if ( document.body )
return document.body.scrollTop;
else
return 0;
}
};<|fim▁end|> | return false;
for( var i=0,n=0; i<this.length; i++ )
if( i != dx )
this[n++]=this[i];
|
<|file_name|>atomic_state_client.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2017 Google Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import debounce from 'lodash/debounce';
import throttle from 'lodash/throttle';
import {RefCounted} from 'neuroglancer/util/disposable';
import {getRandomHexString} from 'neuroglancer/util/random';
import {Signal} from 'neuroglancer/util/signal';<|fim▁hole|> clientGeneration = -1;
private connected_ = false;
receiveUpdateRequested = new Signal<(lastGeneration: string) => void>();
sendUpdateRequested = new Signal<(value: any, generation: string) => void>();
private lastServerState: string|undefined;
private sendUpdates: boolean;
set connected(value: boolean) {
if (value !== this.connected_) {
this.connected_ = value;
if (value === true) {
if (this.receiveUpdates) {
this.receiveUpdateRequested.dispatch(this.serverGeneration);
}
this.handleStateChanged();
}
}
}
get connected() {
return this.connected_;
}
/**
* @param updateDelayMilliseconds If `null`, this client is receive only. No updates are sent.
* @param receiveUpdates If `false`, this client doesn't receive updates.
*/
constructor(
public state: Trackable, updateDelayMilliseconds: number|null = 100,
public receiveUpdates = true) {
super();
if (updateDelayMilliseconds !== null) {
this.sendUpdates = true;
this.registerDisposer(state.changed.add(this.registerCancellable(throttle(
this.registerCancellable(debounce(() => this.handleStateChanged(), 0)),
updateDelayMilliseconds, {leading: false}))));
} else {
this.sendUpdates = false;
}
}
setState(value: any, generation: string) {
if (!this.receiveUpdates) {
return;
}
if (generation !== this.serverGeneration) {
this.lastServerState = JSON.stringify(value);
this.state.reset();
this.state.restoreState(value);
this.serverGeneration = generation;
this.clientGeneration = this.state.changed.count;
}
}
private handleStateChanged() {
if (!this.sendUpdates) {
return;
}
if (!this.connected_ || (this.receiveUpdates && this.serverGeneration === '') ||
this.clientGeneration === this.state.changed.count) {
return;
}
const newStateJson = getCachedJson(this.state).value;
const newStateEncoded = JSON.stringify(newStateJson);
if (newStateEncoded === this.lastServerState) {
// Avoid sending back the exact same state just received from or sent to the server. This is
// also important for making things work in the presence of multiple simultaneous clients.
this.clientGeneration = this.state.changed.count;
return;
}
const generation = getRandomHexString(160);
this.serverGeneration = generation;
this.lastServerState = newStateEncoded;
this.sendUpdateRequested.dispatch(newStateJson, generation);
}
}<|fim▁end|> | import {getCachedJson, Trackable} from 'neuroglancer/util/trackable';
export class AtomicStateClient extends RefCounted {
serverGeneration = ''; |
<|file_name|>imagenet_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for ImageNet."""
import tensorflow as tf
import uncertainty_baselines as ub
# TODO(dusenberrymw): Use TFDS mocking.
class ImageNetDatasetTest(ub.datasets.DatasetTest):
# TODO(dusenberrymw): Rename to `test_dataset_size`.
def testDatasetSize(self):
super()._testDatasetSize(
ub.datasets.ImageNetDataset, (224, 224, 3), validation_percent=0.1)
def test_expected_features(self):
builder = ub.datasets.ImageNetDataset('train')
dataset = builder.load(batch_size=1)
self.assertEqual(list(dataset.element_spec.keys()), ['features', 'labels'])
builder_with_file_name = ub.datasets.ImageNetDataset(
'train', include_file_name=True)
dataset_with_file_name = builder_with_file_name.load(batch_size=1)
self.assertEqual(
list(dataset_with_file_name.element_spec.keys()),
['features', 'labels', 'file_name'])
if __name__ == '__main__':<|fim▁hole|><|fim▁end|> | tf.test.main() |
<|file_name|>PhoneRegionCode504Constants.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package de.knightsoftnet.validators.client.data;
import com.google.gwt.i18n.client.Constants;
import java.util.Map;
/**
* set of phone country region codes.
*
* @author Manfred Tremmel
*
*/
public interface PhoneRegionCode504Constants extends Constants {
Map<String, String> phoneRegionCodes504();
}<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>""" Setup file.
"""
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.rst')) as f:
README = f.read()
requires = ['cornice', 'metlog-py', 'mozsvc', 'PasteScript', 'waitress', 'PyBrowserID', 'Requests', 'webtest']
setup(name='fxap',
version=0.1,
description='fxap',
long_description=README,
license='MPLv2.0',
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application"
],
keywords="web services",
author='',
author_email='',
url='',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,<|fim▁hole|> [paste.app_factory]
main = fxap:main
""",
paster_plugins=['pyramid'],
)<|fim▁end|> | entry_points = """\ |
<|file_name|>upperair_soundings.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016,2017 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
===========================
Upper Air Sounding Tutorial
===========================
Upper air analysis is a staple of many synoptic and mesoscale analysis
problems. In this tutorial we will gather weather balloon data, plot it,
perform a series of thermodynamic calculations, and summarize the results.
To learn more about the Skew-T diagram and its use in weather analysis and
forecasting, checkout `this <https://homes.comet.ucar.edu/~alanbol/aws-tr-79-006.pdf>`_
air weather service guide.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import numpy as np
import pandas as pd
import metpy.calc as mpcalc
from metpy.cbook import get_test_data
from metpy.plots import Hodograph, SkewT
from metpy.units import units
#########################################################################
# Getting Data
# ------------
#
# Upper air data can be obtained using the siphon package, but for this tutorial we will use
# some of MetPy's sample data. This event is the Veterans Day tornado outbreak in 2002.
col_names = ['pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed']
df = pd.read_fwf(get_test_data('nov11_sounding.txt', as_file_obj=False),
skiprows=5, usecols=[0, 1, 2, 3, 6, 7], names=col_names)
df['u_wind'], df['v_wind'] = mpcalc.get_wind_components(df['speed'],
np.deg2rad(df['direction']))
# Drop any rows with all NaN values for T, Td, winds
df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed',
'u_wind', 'v_wind'), how='all').reset_index(drop=True)
##########################################################################
# We will pull the data out of the example dataset into individual variables and
# assign units.
p = df['pressure'].values * units.hPa
T = df['temperature'].values * units.degC
Td = df['dewpoint'].values * units.degC
wind_speed = df['speed'].values * units.knots
wind_dir = df['direction'].values * units.degrees
u, v = mpcalc.get_wind_components(wind_speed, wind_dir)
##########################################################################
# Thermodynamic Calculations
# --------------------------
#
# Often times we will want to calculate some thermodynamic parameters of a
# sounding. The MetPy calc module has many such calculations already implemented!
#
# * **Lifting Condensation Level (LCL)** - The level at which an air parcel's
# relative humidity becomes 100% when lifted along a dry adiabatic path.
# * **Parcel Path** - Path followed by a hypothetical parcel of air, beginning
# at the surface temperature/pressure and rising dry adiabatically until
# reaching the LCL, then rising moist adiabatially.
# Calculate the LCL
lcl_pressure, lcl_temperature = mpcalc.lcl(p[0], T[0], Td[0])
print(lcl_pressure, lcl_temperature)
# Calculate the parcel profile.
parcel_prof = mpcalc.parcel_profile(p, T[0], Td[0]).to('degC')
##########################################################################
# Basic Skew-T Plotting
# ---------------------
#
# The Skew-T (log-P) diagram is the standard way to view rawinsonde data. The
# y-axis is height in pressure coordinates and the x-axis is temperature. The
# y coordinates are plotted on a logarithmic scale and the x coordinate system
# is skewed. An explanation of skew-T interpretation is beyond the scope of this
# tutorial, but here we will plot one that can be used for analysis or
# publication.
#
# The most basic skew-T can be plotted with only five lines of Python.
# These lines perform the following tasks:
#
# 1. Create a ``Figure`` object and set the size of the figure.
#
# 2. Create a ``SkewT`` object
#
# 3. Plot the pressure and temperature (note that the pressure,
# the independent variable, is first even though it is plotted on the y-axis).
#
# 4. Plot the pressure and dewpoint temperature.
#
# 5. Plot the wind barbs at the appropriate pressure using the u and v wind
# components.
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r', linewidth=2)
skew.plot(p, Td, 'g', linewidth=2)
skew.plot_barbs(p, u, v)
# Show the plot
plt.show()
##########################################################################
# Advanced Skew-T Plotting
# ------------------------
#
# Fiducial lines indicating dry adiabats, moist adiabats, and mixing ratio are
# useful when performing further analysis on the Skew-T diagram. Often the
# 0C isotherm is emphasized and areas of CAPE and CIN are shaded.
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig, rotation=30)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')
skew.plot_barbs(p, u, v)
skew.ax.set_ylim(1000, 100)
skew.ax.set_xlim(-40, 60)
# Plot LCL temperature as black dot
skew.plot(lcl_pressure, lcl_temperature, 'ko', markerfacecolor='black')
# Plot the parcel profile as a black line
skew.plot(p, parcel_prof, 'k', linewidth=2)
# Shade areas of CAPE and CIN
skew.shade_cin(p, T, parcel_prof)
skew.shade_cape(p, T, parcel_prof)
# Plot a zero degree isotherm
skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)
# Add the relevant special lines
skew.plot_dry_adiabats()
skew.plot_moist_adiabats()
skew.plot_mixing_lines()
# Show the plot
plt.show()
##########################################################################
# Adding a Hodograph
# ------------------
#
# A hodograph is a polar representation of the wind profile measured by the rawinsonde.
# Winds at different levels are plotted as vectors with their tails at the origin, the angle
# from the vertical axes representing the direction, and the length representing the speed.
# The line plotted on the hodograph is a line connecting the tips of these vectors,<|fim▁hole|>
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig, rotation=30)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')
skew.plot_barbs(p, u, v)
skew.ax.set_ylim(1000, 100)
skew.ax.set_xlim(-40, 60)
# Plot LCL as black dot
skew.plot(lcl_pressure, lcl_temperature, 'ko', markerfacecolor='black')
# Plot the parcel profile as a black line
skew.plot(p, parcel_prof, 'k', linewidth=2)
# Shade areas of CAPE and CIN
skew.shade_cin(p, T, parcel_prof)
skew.shade_cape(p, T, parcel_prof)
# Plot a zero degree isotherm
skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)
# Add the relevant special lines
skew.plot_dry_adiabats()
skew.plot_moist_adiabats()
skew.plot_mixing_lines()
# Create a hodograph
# Create an inset axes object that is 40% width and height of the
# figure and put it in the upper right hand corner.
ax_hod = inset_axes(skew.ax, '40%', '40%', loc=1)
h = Hodograph(ax_hod, component_range=80.)
h.add_grid(increment=20)
h.plot_colormapped(u, v, wind_speed) # Plot a line colored by wind speed
# Show the plot
plt.show()<|fim▁end|> | # which are not drawn. |
<|file_name|>plot.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Alcatel-Lucent Enterprise
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from facette.utils import *
from facette.v1.plotserie import PlotSerie
import json
PLOT_ID = "id"
PLOT_NAME = "name"
PLOT_DESCRIPTION = "description"
PLOT_TYPE = "type"
PLOT_SERIES = "series"
PLOT_STACK_MODE = "stack_mode"
PLOT_START = "start"<|fim▁hole|>PLOT_END = "end"
PLOT_STEP = "step"
PLOT_MODIFIED = "modified"
PLOT_UNIT_LABEL = "unit_label"
PLOT_UNIT_TYPE = "unit_type"
GRAPH_TYPE_AREA = 1
GRAPH_TYPE_LINE = 2
STACK_MODE_NONE = 1
STACK_MODE_NORMAL = 2
STACK_MODE_PERCENT = 3
class Plot:
def __init__(self, js=""):
self.plot = {}
self.id = facette_to_json(PLOT_ID, js, self.plot)
self.name = facette_to_json(PLOT_NAME, js, self.plot)
self.description = facette_to_json(PLOT_DESCRIPTION, js, self.plot)
self.type = facette_to_json(PLOT_TYPE, js, self.plot)
self.stack_mode = facette_to_json(PLOT_STACK_MODE, js, self.plot)
self.start = facette_to_json(PLOT_START, js, self.plot)
self.end = facette_to_json(PLOT_END, js, self.plot)
self.step = facette_to_json(PLOT_STEP, js, self.plot)
self.modified = facette_to_json(PLOT_MODIFIED, js, self.plot)
self.unit_label = facette_to_json(PLOT_UNIT_LABEL, js, self.plot)
self.unit_type = facette_to_json(PLOT_UNIT_TYPE, js, self.plot)
self.series = []
if js.get(PLOT_SERIES):
for x in js[PLOT_SERIES]:
e = PlotSerie(x)
self.series.append(e)
self.plot[PLOT_SERIES] = self.series
def __str__(self):
js = self.plot
series = []
for s in self.series:
series.append(json.loads(str(s)))
js[PLOT_SERIES] = series
return json.dumps(js)
def __repr__(self):
return str(self)<|fim▁end|> | |
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var karma = require('karma').server;
var concat = require('gulp-concat');
var uglify = require('gulp-uglify');
var rename = require('gulp-rename');
var path = require('path');
var plumber = require('gulp-plumber');
var runSequence = require('run-sequence');
var jshint = require('gulp-jshint');
/**
* File patterns
**/
// Root directory
var rootDirectory = path.resolve('./');
// Source directory for build process
var sourceDirectory = path.join(rootDirectory, './src');
var sourceFiles = [
// Make sure module files are handled first
path.join(sourceDirectory, '/**/*.module.js'),
// Then add all JavaScript files
path.join(sourceDirectory, '/**/*.js')
];
var lintFiles = [
'gulpfile.js',
// Karma configuration
'karma-*.conf.js'
].concat(sourceFiles);
gulp.task('build', function() {
gulp.src(sourceFiles)
.pipe(plumber())
.pipe(concat('df-validator.js'))
.pipe(gulp.dest('./dist/'))
.pipe(uglify())
.pipe(rename('df-validator.min.js'))
.pipe(gulp.dest('./dist'));
});
/**
* Process
*/
gulp.task('process-all', function (done) {
runSequence(/*'jshint',*/ 'test-src', 'build', done);
});
/**
* Watch task
*/
gulp.task('watch', function () {
// Watch JavaScript files
gulp.watch(sourceFiles, ['process-all']);
});
/**
* Validate source JavaScript
*/
gulp.task('jshint', function () {
return gulp.src(lintFiles)
.pipe(plumber())
.pipe(jshint())
.pipe(jshint.reporter('jshint-stylish'))
.pipe(jshint.reporter('fail'));
});<|fim▁hole|> * Run test once and exit
*/
gulp.task('test-src', function (done) {
karma.start({
configFile: __dirname + '/karma-src.conf.js',
singleRun: true
}, done);
});
/**
* Run test once and exit
*/
gulp.task('test-dist-concatenated', function (done) {
karma.start({
configFile: __dirname + '/karma-dist-concatenated.conf.js',
singleRun: true
}, done);
});
/**
* Run test once and exit
*/
gulp.task('test-dist-minified', function (done) {
karma.start({
configFile: __dirname + '/karma-dist-minified.conf.js',
singleRun: true
}, done);
});
gulp.task('default', function () {
runSequence('process-all', 'watch');
});<|fim▁end|> |
/** |
<|file_name|>error.js<|end_file_name|><|fim▁begin|>'use strict';
const buildType = process.config.target_defaults.default_configuration;
const assert = require('assert');
if (process.argv[2] === 'fatal') {
const binding = require(process.argv[3]);
binding.error.throwFatalError();
return;
}
test(`./build/${buildType}/binding.node`);
test(`./build/${buildType}/binding_noexcept.node`);
function test(bindingPath) {
const binding = require(bindingPath);
assert.throws(() => binding.error.throwApiError('test'), function(err) {
return err instanceof Error && err.message.includes('Invalid');
});
assert.throws(() => binding.error.throwJSError('test'), function(err) {
return err instanceof Error && err.message === 'test';
});
assert.throws(() => binding.error.throwTypeError('test'), function(err) {
return err instanceof TypeError && err.message === 'test';
});
assert.throws(() => binding.error.throwRangeError('test'), function(err) {
return err instanceof RangeError && err.message === 'test';
});
assert.throws(
() => binding.error.doNotCatch(<|fim▁hole|> function(err) {
return err instanceof TypeError && err.message === 'test' && !err.caught;
});
assert.throws(
() => binding.error.catchAndRethrowError(
() => {
throw new TypeError('test');
}),
function(err) {
return err instanceof TypeError && err.message === 'test' && err.caught;
});
const err = binding.error.catchError(
() => { throw new TypeError('test'); });
assert(err instanceof TypeError);
assert.strictEqual(err.message, 'test');
const msg = binding.error.catchErrorMessage(
() => { throw new TypeError('test'); });
assert.strictEqual(msg, 'test');
assert.throws(() => binding.error.throwErrorThatEscapesScope('test'), function(err) {
return err instanceof Error && err.message === 'test';
});
assert.throws(() => binding.error.catchAndRethrowErrorThatEscapesScope('test'), function(err) {
return err instanceof Error && err.message === 'test' && err.caught;
});
const p = require('./napi_child').spawnSync(
process.execPath, [ __filename, 'fatal', bindingPath ]);
assert.ifError(p.error);
assert.ok(p.stderr.toString().includes(
'FATAL ERROR: Error::ThrowFatalError This is a fatal error'));
}<|fim▁end|> | () => {
throw new TypeError('test');
}), |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Models for Student Identity Verification
This is where we put any models relating to establishing the real-life identity
of a student over a period of time. Right now, the only models are the abstract
`PhotoVerification`, and its one concrete implementation
`SoftwareSecurePhotoVerification`. The hope is to keep as much of the
photo verification process as generic as possible.
"""
import functools
import json
import logging
import os.path
import uuid
from datetime import timedelta
from email.utils import formatdate
import requests
import six
from django.conf import settings
from django.contrib.auth.models import User
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.urls import reverse
from django.db import models
from django.dispatch import receiver
from django.utils.functional import cached_property
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy
from model_utils import Choices
from model_utils.models import StatusModel, TimeStampedModel
from opaque_keys.edx.django.models import CourseKeyField
from lms.djangoapps.verify_student.ssencrypt import (
encrypt_and_encode,
generate_signed_message,
random_aes_key,
rsa_encrypt
)
from openedx.core.djangoapps.signals.signals import LEARNER_NOW_VERIFIED
from openedx.core.storage import get_storage
<|fim▁hole|>log = logging.getLogger(__name__)
def generateUUID(): # pylint: disable=invalid-name
""" Utility function; generates UUIDs """
return str(uuid.uuid4())
class VerificationException(Exception):
pass
def status_before_must_be(*valid_start_statuses):
"""
Helper decorator with arguments to make sure that an object with a `status`
attribute is in one of a list of acceptable status states before a method
is called. You could use it in a class definition like:
@status_before_must_be("submitted", "approved", "denied")
def refund_user(self, user_id):
# Do logic here...
If the object has a status that is not listed when the `refund_user` method
is invoked, it will throw a `VerificationException`. This is just to avoid
distracting boilerplate when looking at a Model that needs to go through a
workflow process.
"""
def decorator_func(func):
"""
Decorator function that gets returned
"""
@functools.wraps(func)
def with_status_check(obj, *args, **kwargs):
if obj.status not in valid_start_statuses:
exception_msg = (
u"Error calling {} {}: status is '{}', must be one of: {}"
).format(func, obj, obj.status, valid_start_statuses)
raise VerificationException(exception_msg)
return func(obj, *args, **kwargs)
return with_status_check
return decorator_func
class IDVerificationAttempt(StatusModel):
"""
Each IDVerificationAttempt represents a Student's attempt to establish
their identity through one of several methods that inherit from this Model,
including PhotoVerification and SSOVerification.
.. pii: The User's name is stored in this and sub-models
.. pii_types: name
.. pii_retirement: retained
"""
STATUS = Choices('created', 'ready', 'submitted', 'must_retry', 'approved', 'denied')
user = models.ForeignKey(User, db_index=True, on_delete=models.CASCADE)
# They can change their name later on, so we want to copy the value here so
# we always preserve what it was at the time they requested. We only copy
# this value during the mark_ready() step. Prior to that, you should be
# displaying the user's name from their user.profile.name.
name = models.CharField(blank=True, max_length=255)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True, db_index=True)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
@property
def expiration_datetime(self):
"""Datetime that the verification will expire. """
days_good_for = settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
return self.created_at + timedelta(days=days_good_for)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
raise NotImplementedError
def active_at_datetime(self, deadline):
"""Check whether the verification was active at a particular datetime.
Arguments:
deadline (datetime): The date at which the verification was active
(created before and expiration datetime is after today).
Returns:
bool
"""
return (
self.created_at < deadline and
self.expiration_datetime > now()
)
class ManualVerification(IDVerificationAttempt):
"""
Each ManualVerification represents a user's verification that bypasses the need for
any other verification.
.. pii: The User's name is stored in the parent model
.. pii_types: name
.. pii_retirement: retained
"""
reason = models.CharField(
max_length=255,
blank=True,
help_text=(
'Specifies the reason for manual verification of the user.'
)
)
class Meta(object):
app_label = 'verify_student'
def __unicode__(self):
return 'ManualIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""
Whether or not the status should be displayed to the user.
"""
return False
class SSOVerification(IDVerificationAttempt):
"""
Each SSOVerification represents a Student's attempt to establish their identity
by signing in with SSO. ID verification through SSO bypasses the need for
photo verification.
.. no_pii:
"""
OAUTH2 = 'third_party_auth.models.OAuth2ProviderConfig'
SAML = 'third_party_auth.models.SAMLProviderConfig'
LTI = 'third_party_auth.models.LTIProviderConfig'
IDENTITY_PROVIDER_TYPE_CHOICES = (
(OAUTH2, 'OAuth2 Provider'),
(SAML, 'SAML Provider'),
(LTI, 'LTI Provider'),
)
identity_provider_type = models.CharField(
max_length=100,
blank=False,
choices=IDENTITY_PROVIDER_TYPE_CHOICES,
default=SAML,
help_text=(
'Specifies which type of Identity Provider this verification originated from.'
)
)
identity_provider_slug = models.SlugField(
max_length=30, db_index=True, default='default',
help_text=(
'The slug uniquely identifying the Identity Provider this verification originated from.'
))
class Meta(object):
app_label = "verify_student"
def __unicode__(self):
return 'SSOIDVerification for {name}, status: {status}'.format(
name=self.name,
status=self.status,
)
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return False
class PhotoVerification(IDVerificationAttempt):
"""
Each PhotoVerification represents a Student's attempt to establish
their identity by uploading a photo of themselves and a picture ID. An
attempt actually has a number of fields that need to be filled out at
different steps of the approval process. While it's useful as a Django Model
for the querying facilities, **you should only edit a `PhotoVerification`
object through the methods provided**. Initialize them with a user:
attempt = PhotoVerification(user=user)
We track this attempt through various states:
`created`
Initial creation and state we're in after uploading the images.
`ready`
The user has uploaded their images and checked that they can read the
images. There's a separate state here because it may be the case that we
don't actually submit this attempt for review until payment is made.
`submitted`
Submitted for review. The review may be done by a staff member or an
external service. The user cannot make changes once in this state.
`must_retry`
We submitted this, but there was an error on submission (i.e. we did not
get a 200 when we POSTed to Software Secure)
`approved`
An admin or an external service has confirmed that the user's photo and
photo ID match up, and that the photo ID's name matches the user's.
`denied`
The request has been denied. See `error_msg` for details on why. An
admin might later override this and change to `approved`, but the
student cannot re-open this attempt -- they have to create another
attempt and submit it instead.
Because this Model inherits from IDVerificationAttempt, which inherits
from StatusModel, we can also do things like:
attempt.status == PhotoVerification.STATUS.created
attempt.status == "created"
pending_requests = PhotoVerification.submitted.all()
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
######################## Fields Set During Creation ########################
# See class docstring for description of status states
# Where we place the uploaded image files (e.g. S3 URLs)
face_image_url = models.URLField(blank=True, max_length=255)
photo_id_image_url = models.URLField(blank=True, max_length=255)
# Randomly generated UUID so that external services can post back the
# results of checking a user's photo submission without use exposing actual
# user IDs or something too easily guessable.
receipt_id = models.CharField(
db_index=True,
default=generateUUID,
max_length=255,
)
# Indicates whether or not a user wants to see the verification status
# displayed on their dash. Right now, only relevant for allowing students
# to "dismiss" a failed midcourse reverification message
# TODO: This field is deprecated.
display = models.BooleanField(db_index=True, default=True)
######################## Fields Set When Submitting ########################
submitted_at = models.DateTimeField(null=True, db_index=True)
#################### Fields Set During Approval/Denial #####################
# If the review was done by an internal staff member, mark who it was.
reviewing_user = models.ForeignKey(
User,
db_index=True,
default=None,
null=True,
related_name="photo_verifications_reviewed",
on_delete=models.CASCADE,
)
# Mark the name of the service used to evaluate this attempt (e.g
# Software Secure).
reviewing_service = models.CharField(blank=True, max_length=255)
# If status is "denied", this should contain text explaining why.
error_msg = models.TextField(blank=True)
# Non-required field. External services can add any arbitrary codes as time
# goes on. We don't try to define an exhuastive list -- this is just
# capturing it so that we can later query for the common problems.
error_code = models.CharField(blank=True, max_length=50)
class Meta(object):
app_label = "verify_student"
abstract = True
ordering = ['-created_at']
def parsed_error_msg(self):
"""
Sometimes, the error message we've received needs to be parsed into
something more human readable
The default behavior is to return the current error message as is.
"""
return self.error_msg
@status_before_must_be("created")
def upload_face_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def upload_photo_id_image(self, img):
raise NotImplementedError
@status_before_must_be("created")
def mark_ready(self):
"""
Mark that the user data in this attempt is correct. In order to
succeed, the user must have uploaded the necessary images
(`face_image_url`, `photo_id_image_url`). This method will also copy
their name from their user profile. Prior to marking it ready, we read
this value directly from their profile, since they're free to change it.
This often happens because people put in less formal versions of their
name on signup, but realize they want something different to go on a
formal document.
Valid attempt statuses when calling this method:
`created`
Status after method completes: `ready`
Other fields that will be set by this method:
`name`
State Transitions:
`created` → `ready`
This is what happens when the user confirms to us that the pictures
they uploaded are good. Note that we don't actually do a submission
anywhere yet.
"""
# At any point prior to this, they can change their names via their
# student dashboard. But at this point, we lock the value into the
# attempt.
self.name = self.user.profile.name
self.status = "ready"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve this attempt. `user_id`
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `approved`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`
State Transitions:
`submitted` → `approved`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `approved`
No-op. First one to approve it wins.
`denied` → `approved`
This might happen if a staff member wants to override a decision
made by an external service or another staff member (say, in
response to a support request). In this case, the previous values
of `reviewed_by_user_id` and `reviewed_by_service` will be changed
to whoever is doing the approving, and `error_msg` will be reset.
The only record that this record was ever denied would be in our
logs. This should be a relatively rare occurence.
"""
# If someone approves an outdated version of this, the first one wins
if self.status == "approved":
return
log.info(u"Verification for user '{user_id}' approved by '{reviewer}'.".format(
user_id=self.user, reviewer=user_id
))
self.error_msg = "" # reset, in case this attempt was denied before
self.error_code = "" # reset, in case this attempt was denied before
self.reviewing_user = user_id
self.reviewing_service = service
self.status = "approved"
self.save()
# Emit signal to find and generate eligible certificates
LEARNER_NOW_VERIFIED.send_robust(
sender=PhotoVerification,
user=self.user
)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def deny(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Deny this attempt.
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
Status after method completes: `denied`
Other fields that will be set by this method:
`reviewed_by_user_id`, `reviewed_by_service`, `error_msg`,
`error_code`
State Transitions:
`submitted` → `denied`
This is the usual flow, whether initiated by a staff user or an
external validation service.
`approved` → `denied`
This might happen if a staff member wants to override a decision
made by an external service or another staff member, or just correct
a mistake made during the approval process. In this case, the
previous values of `reviewed_by_user_id` and `reviewed_by_service`
will be changed to whoever is doing the denying. The only record
that this record was ever approved would be in our logs. This should
be a relatively rare occurence.
`denied` → `denied`
Update the error message and reviewing_user/reviewing_service. Just
lets you amend the error message in case there were additional
details to be made.
"""
log.info(u"Verification for user '{user_id}' denied by '{reviewer}'.".format(
user_id=self.user, reviewer=reviewing_user
))
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "denied"
self.save()
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def system_error(self,
error_msg,
error_code="",
reviewing_user=None,
reviewing_service=""):
"""
Mark that this attempt could not be completed because of a system error.
Status should be moved to `must_retry`. For example, if Software Secure
reported to us that they couldn't process our submission because they
couldn't decrypt the image we sent.
"""
if self.status in ["approved", "denied"]:
return # If we were already approved or denied, just leave it.
self.error_msg = error_msg
self.error_code = error_code
self.reviewing_user = reviewing_user
self.reviewing_service = reviewing_service
self.status = "must_retry"
self.save()
@classmethod
def retire_user(cls, user_id):
"""
Retire user as part of GDPR Phase I
Returns 'True' if records found
:param user_id: int
:return: bool
"""
try:
user_obj = User.objects.get(id=user_id)
except User.DoesNotExist:
return False
photo_objects = cls.objects.filter(
user=user_obj
).update(
name='',
face_image_url='',
photo_id_image_url='',
photo_id_key=''
)
return photo_objects > 0
class SoftwareSecurePhotoVerification(PhotoVerification):
"""
Model to verify identity using a service provided by Software Secure. Much
of the logic is inherited from `PhotoVerification`, but this class
encrypts the photos.
Software Secure (http://www.softwaresecure.com/) is a remote proctoring
service that also does identity verification. A student uses their webcam
to upload two images: one of their face, one of a photo ID. Due to the
sensitive nature of the data, the following security precautions are taken:
1. The snapshot of their face is encrypted using AES-256 in CBC mode. All
face photos are encypted with the same key, and this key is known to
both Software Secure and edx-platform.
2. The snapshot of a user's photo ID is also encrypted using AES-256, but
the key is randomly generated using os.urandom. Every verification
attempt has a new key. The AES key is then encrypted using a public key
provided by Software Secure. We store only the RSA-encryped AES key.
Since edx-platform does not have Software Secure's private RSA key, it
means that we can no longer even read photo ID.
3. The encrypted photos are base64 encoded and stored in an S3 bucket that
edx-platform does not have read access to.
Note: this model handles *inital* verifications (which you must perform
at the time you register for a verified cert).
.. pii: The User's name is stored in the parent model, this one stores links to face and photo ID images
.. pii_types: name, image
.. pii_retirement: retained
"""
# This is a base64.urlsafe_encode(rsa_encrypt(photo_id_aes_key), ss_pub_key)
# So first we generate a random AES-256 key to encrypt our photo ID with.
# Then we RSA encrypt it with Software Secure's public key. Then we base64
# encode that. The result is saved here. Actual expected length is 344.
photo_id_key = models.TextField(max_length=1024)
IMAGE_LINK_DURATION = 5 * 60 * 60 * 24 # 5 days in seconds
copy_id_photo_from = models.ForeignKey("self", null=True, blank=True, on_delete=models.CASCADE)
# Fields for functionality of sending email when verification expires
# expiry_date: The date when the SoftwareSecurePhotoVerification will expire
# expiry_email_date: This field is used to maintain a check for learners to which email
# to notify for expired verification is already sent.
expiry_date = models.DateTimeField(null=True, blank=True, db_index=True)
expiry_email_date = models.DateTimeField(null=True, blank=True, db_index=True)
@status_before_must_be("must_retry", "submitted", "approved", "denied")
def approve(self, user_id=None, service=""):
"""
Approve the verification attempt for user
Valid attempt statuses when calling this method:
`submitted`, `approved`, `denied`
After method completes:
status is set to `approved`
expiry_date is set to one year from now
"""
self.expiry_date = now() + timedelta(
days=settings.VERIFY_STUDENT["DAYS_GOOD_FOR"]
)
super(SoftwareSecurePhotoVerification, self).approve(user_id, service)
@classmethod
def get_initial_verification(cls, user, earliest_allowed_date=None):
"""Get initial verification for a user with the 'photo_id_key'.
Arguments:
user(User): user object
earliest_allowed_date(datetime): override expiration date for initial verification
Return:
SoftwareSecurePhotoVerification (object) or None
"""
init_verification = cls.objects.filter(
user=user,
status__in=["submitted", "approved"],
created_at__gte=(
earliest_allowed_date or earliest_allowed_verification_date()
)
).exclude(photo_id_key='')
return init_verification.latest('created_at') if init_verification.exists() else None
@status_before_must_be("created")
def upload_face_image(self, img_data):
"""
Upload an image of the user's face. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the
storage backend.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
return
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
aes_key = aes_key_str.decode("hex")
path = self._get_path("face")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
@status_before_must_be("created")
def upload_photo_id_image(self, img_data):
"""
Upload an the user's photo ID image. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using a randomly generated AES key, encode it with base64 and save it
to the storage backend. The random key is also encrypted using Software
Secure's public RSA key and stored in our `photo_id_key` field.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
"""
# Skip this whole thing if we're running acceptance tests or if we're
# developing and aren't interested in working on student identity
# verification functionality. If you do want to work on it, you have to
# explicitly enable these in your private settings.
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
# fake photo id key is set only for initial verification
self.photo_id_key = 'fake-photo-id-key'
self.save()
return
aes_key = random_aes_key()
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
# Save this to the storage backend
path = self._get_path("photo_id")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
# Update our record fields
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
self.save()
@status_before_must_be("must_retry", "ready", "submitted")
def submit(self, copy_id_photo_from=None):
"""
Submit our verification attempt to Software Secure for validation. This
will set our status to "submitted" if the post is successful, and
"must_retry" if the post fails.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
"""
try:
response = self.send_request(copy_id_photo_from=copy_id_photo_from)
if response.ok:
self.submitted_at = now()
self.status = "submitted"
self.save()
else:
self.status = "must_retry"
self.error_msg = response.text
self.save()
except Exception: # pylint: disable=broad-except
log.exception(
u'Software Secure submission failed for user %s, setting status to must_retry',
self.user.username
)
self.status = "must_retry"
self.save()
def parsed_error_msg(self):
"""
Parse the error messages we receive from SoftwareSecure
Error messages are written in the form:
`[{"photoIdReasons": ["Not provided"]}]`
Returns:
str[]: List of error messages.
"""
parsed_errors = []
error_map = {
'EdX name not provided': 'name_mismatch',
'Name mismatch': 'name_mismatch',
'Photo/ID Photo mismatch': 'photos_mismatched',
'ID name not provided': 'id_image_missing_name',
'Invalid Id': 'id_invalid',
'No text': 'id_invalid',
'Not provided': 'id_image_missing',
'Photo hidden/No photo': 'id_image_not_clear',
'Text not clear': 'id_image_not_clear',
'Face out of view': 'user_image_not_clear',
'Image not clear': 'user_image_not_clear',
'Photo not provided': 'user_image_missing',
}
try:
messages = set()
message_groups = json.loads(self.error_msg)
for message_group in message_groups:
messages = messages.union(set(*six.itervalues(message_group)))
for message in messages:
parsed_error = error_map.get(message)
if parsed_error:
parsed_errors.append(parsed_error)
else:
log.debug(u'Ignoring photo verification error message: %s', message)
except Exception: # pylint: disable=broad-except
log.exception(u'Failed to parse error message for SoftwareSecurePhotoVerification %d', self.pk)
return parsed_errors
def image_url(self, name, override_receipt_id=None):
"""
We dynamically generate this, since we want it the expiration clock to
start when the message is created, not when the record is created.
Arguments:
name (str): Name of the image (e.g. "photo_id" or "face")
Keyword Arguments:
override_receipt_id (str): If provided, use this receipt ID instead
of the ID for this attempt. This is useful for reverification
where we need to construct a URL to a previously-submitted
photo ID image.
Returns:
string: The expiring URL for the image.
"""
path = self._get_path(name, override_receipt_id=override_receipt_id)
return self._storage.url(path)
@cached_property
def _storage(self):
"""
Return the configured django storage backend.
"""
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
# Default to the S3 backend for backward compatibility
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
storage_kwargs = config.get("STORAGE_KWARGS", {})
# Map old settings to the parameters expected by the storage backend
if "AWS_ACCESS_KEY" in config:
storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"]
if "AWS_SECRET_KEY" in config:
storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"]
if "S3_BUCKET" in config:
storage_kwargs["bucket"] = config["S3_BUCKET"]
storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION
return get_storage(storage_class, **storage_kwargs)
def _get_path(self, prefix, override_receipt_id=None):
"""
Returns the path to a resource with this instance's `receipt_id`.
If `override_receipt_id` is given, the path to that resource will be
retrieved instead. This allows us to retrieve images submitted in
previous attempts (used for reverification, where we send a new face
photo with the same photo ID from a previous attempt).
"""
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
return os.path.join(prefix, receipt_id)
def _encrypted_user_photo_key_str(self):
"""
Software Secure needs to have both UserPhoto and PhotoID decrypted in
the same manner. So even though this is going to be the same for every
request, we're also using RSA encryption to encrypt the AES key for
faces.
"""
face_aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
face_aes_key = face_aes_key_str.decode("hex")
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_face_aes_key = rsa_encrypt(face_aes_key, rsa_key_str)
return rsa_encrypted_face_aes_key.encode("base64")
def create_request(self, copy_id_photo_from=None):
"""
Construct the HTTP request to the photo verification service.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
tuple of (header, body), where both `header` and `body` are dictionaries.
"""
access_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]
secret_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_SECRET_KEY"]
scheme = "https" if settings.HTTPS == "on" else "http"
callback_url = "{}://{}{}".format(
scheme, settings.SITE_NAME, reverse('verify_student_results_callback')
)
# If we're copying the photo ID image from a previous verification attempt,
# then we need to send the old image data with the correct image key.
photo_id_url = (
self.image_url("photo_id")
if copy_id_photo_from is None
else self.image_url("photo_id", override_receipt_id=copy_id_photo_from.receipt_id)
)
photo_id_key = (
self.photo_id_key
if copy_id_photo_from is None else
copy_id_photo_from.photo_id_key
)
body = {
"EdX-ID": str(self.receipt_id),
"ExpectedName": self.name,
"PhotoID": photo_id_url,
"PhotoIDKey": photo_id_key,
"SendResponseTo": callback_url,
"UserPhoto": self.image_url("face"),
"UserPhotoKey": self._encrypted_user_photo_key_str(),
}
headers = {
"Content-Type": "application/json",
"Date": formatdate(timeval=None, localtime=False, usegmt=True)
}
_message, _sig, authorization = generate_signed_message(
"POST", headers, body, access_key, secret_key
)
headers['Authorization'] = authorization
return headers, body
def request_message_txt(self):
"""
This is the body of the request we send across. This is never actually
used in the code, but exists for debugging purposes -- you can call
`print attempt.request_message_txt()` on the console and get a readable
rendering of the request that would be sent across, without actually
sending anything.
"""
headers, body = self.create_request()
header_txt = "\n".join(
u"{}: {}".format(h, v) for h, v in sorted(headers.items())
)
body_txt = json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8')
return header_txt + "\n\n" + body_txt
def send_request(self, copy_id_photo_from=None):
"""
Assembles a submission to Software Secure and sends it via HTTPS.
Keyword Arguments:
copy_id_photo_from (SoftwareSecurePhotoVerification): If provided, re-send the ID photo
data from this attempt. This is used for reverification, in which new face photos
are sent with previously-submitted ID photos.
Returns:
request.Response
"""
# If AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING is True, we want to
# skip posting anything to Software Secure. We actually don't even
# create the message because that would require encryption and message
# signing that rely on settings.VERIFY_STUDENT values that aren't set
# in dev. So we just pretend like we successfully posted
if settings.FEATURES.get('AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'):
fake_response = requests.Response()
fake_response.status_code = 200
return fake_response
headers, body = self.create_request(copy_id_photo_from=copy_id_photo_from)
response = requests.post(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_URL"],
headers=headers,
data=json.dumps(body, indent=2, sort_keys=True, ensure_ascii=False).encode('utf-8'),
verify=False
)
log.info(u"Sent request to Software Secure for receipt ID %s.", self.receipt_id)
if copy_id_photo_from is not None:
log.info(
(
u"Software Secure attempt with receipt ID %s used the same photo ID "
u"data as the receipt with ID %s"
),
self.receipt_id, copy_id_photo_from.receipt_id
)
log.debug("Headers:\n{}\n\n".format(headers))
log.debug("Body:\n{}\n\n".format(body))
log.debug(u"Return code: {}".format(response.status_code))
log.debug(u"Return message:\n\n{}\n\n".format(response.text))
return response
def should_display_status_to_user(self):
"""Whether or not the status from this attempt should be displayed to the user."""
return True
class VerificationDeadline(TimeStampedModel):
"""
Represent a verification deadline for a particular course.
The verification deadline is the datetime after which
users are no longer allowed to submit photos for initial verification
in a course.
Note that this is NOT the same as the "upgrade" deadline, after
which a user is no longer allowed to upgrade to a verified enrollment.
If no verification deadline record exists for a course,
then that course does not have a deadline. This means that users
can submit photos at any time.
.. no_pii:
"""
class Meta(object):
app_label = "verify_student"
course_key = CourseKeyField(
max_length=255,
db_index=True,
unique=True,
help_text=ugettext_lazy(u"The course for which this deadline applies"),
)
deadline = models.DateTimeField(
help_text=ugettext_lazy(
u"The datetime after which users are no longer allowed "
"to submit photos for verification."
)
)
# The system prefers to set this automatically based on default settings. But
# if the field is set manually we want a way to indicate that so we don't
# overwrite the manual setting of the field.
deadline_is_explicit = models.BooleanField(default=False)
ALL_DEADLINES_CACHE_KEY = "verify_student.all_verification_deadlines"
@classmethod
def set_deadline(cls, course_key, deadline, is_explicit=False):
"""
Configure the verification deadline for a course.
If `deadline` is `None`, then the course will have no verification
deadline. In this case, users will be able to verify for the course
at any time.
Arguments:
course_key (CourseKey): Identifier for the course.
deadline (datetime or None): The verification deadline.
"""
if deadline is None:
VerificationDeadline.objects.filter(course_key=course_key).delete()
else:
record, created = VerificationDeadline.objects.get_or_create(
course_key=course_key,
defaults={"deadline": deadline, "deadline_is_explicit": is_explicit}
)
if not created:
record.deadline = deadline
record.deadline_is_explicit = is_explicit
record.save()
@classmethod
def deadlines_for_courses(cls, course_keys):
"""
Retrieve verification deadlines for particular courses.
Arguments:
course_keys (list): List of `CourseKey`s.
Returns:
dict: Map of course keys to datetimes (verification deadlines)
"""
all_deadlines = cache.get(cls.ALL_DEADLINES_CACHE_KEY)
if all_deadlines is None:
all_deadlines = {
deadline.course_key: deadline.deadline
for deadline in VerificationDeadline.objects.all()
}
cache.set(cls.ALL_DEADLINES_CACHE_KEY, all_deadlines)
return {
course_key: all_deadlines[course_key]
for course_key in course_keys
if course_key in all_deadlines
}
@classmethod
def deadline_for_course(cls, course_key):
"""
Retrieve the verification deadline for a particular course.
Arguments:
course_key (CourseKey): The identifier for the course.
Returns:
datetime or None
"""
try:
deadline = cls.objects.get(course_key=course_key)
return deadline.deadline
except cls.DoesNotExist:
return None
@receiver(models.signals.post_save, sender=VerificationDeadline)
@receiver(models.signals.post_delete, sender=VerificationDeadline)
def invalidate_deadline_caches(sender, **kwargs): # pylint: disable=unused-argument
"""Invalidate the cached verification deadline information. """
cache.delete(VerificationDeadline.ALL_DEADLINES_CACHE_KEY)<|fim▁end|> | from .utils import earliest_allowed_verification_date
|
<|file_name|>get_aggregates.rs<|end_file_name|><|fim▁begin|>use cli::arg_types::Interval;
use std::str::FromStr; // Use of #from_str.
use itertools::Itertools;
use api::client::{TellerClient, ApiServiceResult, Transaction};
use api::client::parse_utc_date_from_transaction;
use chrono::{Date, Datelike, UTC};
pub type Balances = HistoricalAmountsWithCurrency;
pub type Outgoings = HistoricalAmountsWithCurrency;
pub type Incomings = HistoricalAmountsWithCurrency;
pub type IntervalAmount = (String, String);
type DateStringToTransactions = (String, Vec<Transaction>);
#[derive(Debug)]
pub struct HistoricalAmountsWithCurrency {
pub historical_amounts: Vec<IntervalAmount>,
pub currency: String,
}
impl HistoricalAmountsWithCurrency {
pub fn new<S: Into<String>>(historical_amounts: Vec<IntervalAmount>,
currency: S)
-> HistoricalAmountsWithCurrency {
HistoricalAmountsWithCurrency {
historical_amounts: historical_amounts,
currency: currency.into(),
}
}
}
pub trait GetBalances {
fn get_balances(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>,
to: &Date<UTC>) -> ApiServiceResult<Balances>;
}
pub trait GetOutgoings {<|fim▁hole|> to: &Date<UTC>) -> ApiServiceResult<Outgoings>;
}
pub trait GetIncomings {
fn get_incomings(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>,
to: &Date<UTC>) -> ApiServiceResult<Incomings>;
}
fn to_grouped_transaction_aggregates(transactions: Vec<Transaction>,
from: &Date<UTC>,
to: &Date<UTC>,
interval: &Interval,
aggregate_txs: &Fn(DateStringToTransactions) -> (String, i64))
-> Vec<(String, i64)> {
let group_format = |date: Date<UTC>| -> String {
date.format("%m-%Y").to_string()
};
let get_txs_from_group = |current_date_str: &str, grouped_transactions: &Vec<DateStringToTransactions>| -> Vec<Transaction> {
match grouped_transactions.iter().find(|g| g.0 == current_date_str) {
Some(g) => g.1.clone(), // I had to make Transaction cloneable to do this...
None => vec![],
}
};
let month_year_grouped_transactions = transactions.into_iter().group_by(|t| {
let transaction_date = parse_utc_date_from_transaction(&t);
match *interval {
Interval::Monthly => {
let group_name = group_format(transaction_date);
group_name
}
}
}).collect();
let mut month_year_transactions: Vec<DateStringToTransactions> = vec![];
let mut current_date = from.clone();
let end_date = to.clone();
while current_date <= end_date {
let current_date_str = group_format(current_date);
let txs = get_txs_from_group(¤t_date_str, &month_year_grouped_transactions);
month_year_transactions.push((current_date_str, txs));
let next_date = if current_date.month() < 12 {
current_date.with_month(current_date.month() + 1).unwrap()
} else {
current_date.with_year(current_date.year() + 1).unwrap().with_month(1).unwrap()
};
current_date = next_date;
}
let mut month_year_aggregates = month_year_transactions.into_iter()
.map(aggregate_txs)
.collect::<Vec<(String, i64)>>();
month_year_aggregates.reverse();
month_year_aggregates
}
impl<'a> GetBalances for TellerClient<'a> {
// The amount shown is for the beginning of a month before
// any transactions have come in or out.
//
// NOTE: Balances will not work correctly if based off a different month
// than the current account balance returned by get_accounts.
fn get_balances(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>,
to: &Date<UTC>)
-> ApiServiceResult<Balances> {
let sum_all = |myt: (String, Vec<Transaction>)| {
let to_cent_integer = |t: &Transaction| {
(f64::from_str(&t.amount).unwrap() * 100f64).round() as i64
};
let group_name = myt.0;
let amount = myt.1.iter().map(to_cent_integer).fold(0i64, |sum, v| sum + v);
(group_name, amount)
};
let account = try!(self.get_account(&account_id));
let current_balance = (f64::from_str(&account.balance).unwrap() * 100f64).round() as i64;
let currency = account.currency;
let transactions = self.get_transactions(&account_id, &from, &to).unwrap_or(vec![]);
let month_year_total_transactions = to_grouped_transaction_aggregates(transactions,
&from,
&to,
&interval,
&sum_all);
let mut historical_amounts: Vec<IntervalAmount> = vec![];
historical_amounts.push(("current".to_string(),
format!("{:.2}", current_balance as f64 / 100f64)));
let mut last_balance = current_balance;
for mytt in month_year_total_transactions {
last_balance = last_balance - mytt.1;
historical_amounts.push((mytt.0.to_string(),
format!("{:.2}", last_balance as f64 / 100f64)));
}
historical_amounts.reverse();
Ok(HistoricalAmountsWithCurrency::new(historical_amounts, currency))
}
}
impl<'a> GetOutgoings for TellerClient<'a> {
fn get_outgoings(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>,
to: &Date<UTC>)
-> ApiServiceResult<Outgoings> {
let sum_outgoings = |myt: (String, Vec<Transaction>)| {
let to_cent_integer = |t: &Transaction| {
(f64::from_str(&t.amount).unwrap() * 100f64).round() as i64
};
let group_name = myt.0;
let amount = myt.1
.iter()
.map(to_cent_integer)
.filter(|ci| *ci < 0)
.fold(0i64, |sum, v| sum + v);
(group_name, amount)
};
let account = try!(self.get_account(&account_id));
let currency = account.currency;
let transactions = self.get_transactions(&account_id, &from, &to).unwrap_or(vec![]);
let month_year_total_outgoing = to_grouped_transaction_aggregates(transactions,
&from,
&to,
&interval,
&sum_outgoings);
let from_cent_integer_to_float_string = |amount: i64| format!("{:.2}", amount as f64 / 100f64);
let mut historical_amounts: Vec<IntervalAmount> = vec![];
for mytt in month_year_total_outgoing {
historical_amounts.push((mytt.0.to_string(),
from_cent_integer_to_float_string(mytt.1.abs())));
}
historical_amounts.reverse();
Ok(HistoricalAmountsWithCurrency::new(historical_amounts, currency))
}
}
impl<'a> GetIncomings for TellerClient<'a> {
fn get_incomings(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>,
to: &Date<UTC>)
-> ApiServiceResult<Incomings> {
let sum_incomings = |myt: (String, Vec<Transaction>)| {
let to_cent_integer = |t: &Transaction| {
(f64::from_str(&t.amount).unwrap() * 100f64).round() as i64
};
let group_name = myt.0;
let amount = myt.1
.iter()
.map(to_cent_integer)
.filter(|ci| *ci > 0)
.fold(0i64, |sum, v| sum + v);
(group_name, amount)
};
let account = try!(self.get_account(&account_id));
let currency = account.currency;
let transactions = self.get_transactions(&account_id, &from, &to).unwrap_or(vec![]);
let month_year_total_incoming = to_grouped_transaction_aggregates(transactions,
&from,
&to,
&interval,
&sum_incomings);
let from_cent_integer_to_float_string = |amount: i64| format!("{:.2}", amount as f64 / 100f64);
let mut historical_amounts: Vec<IntervalAmount> = vec![];
for mytt in month_year_total_incoming {
historical_amounts.push((mytt.0.to_string(),
from_cent_integer_to_float_string(mytt.1)));
}
historical_amounts.reverse();
Ok(HistoricalAmountsWithCurrency::new(historical_amounts, currency))
}
}
#[cfg(test)]
mod tests {
use cli::arg_types::Interval;
use api::client::{TellerClient, generate_utc_date_from_date_str};
use super::{GetBalances, GetOutgoings, GetIncomings};
use hyper;
mock_connector_in_order!(GetAccountFollowedByGetTransactions {
include_str!("../mocks/get-account.http")
include_str!("../mocks/get-transactions.http")
});
#[test]
fn can_get_balances() {
let c = hyper::client::Client::with_connector(GetAccountFollowedByGetTransactions::default());
let teller = TellerClient::new_with_hyper_client("fake-auth-token", c);
let from = generate_utc_date_from_date_str("2015-01-01");
let to = generate_utc_date_from_date_str("2015-12-31");
let agg = teller.get_balances("123", &Interval::Monthly, &from, &to).unwrap();
assert_eq!("GBP", agg.currency);
assert_eq!("01-2015", agg.historical_amounts[0].0);
assert_eq!("858.97", agg.historical_amounts[0].1);
assert_eq!("02-2015", agg.historical_amounts[1].0);
assert_eq!("835.00", agg.historical_amounts[1].1);
assert_eq!("03-2015", agg.historical_amounts[2].0);
assert_eq!("835.00", agg.historical_amounts[2].1);
assert_eq!("04-2015", agg.historical_amounts[3].0);
assert_eq!("835.00", agg.historical_amounts[3].1);
assert_eq!("05-2015", agg.historical_amounts[4].0);
assert_eq!("835.00", agg.historical_amounts[4].1);
assert_eq!("06-2015", agg.historical_amounts[5].0);
assert_eq!("810.00", agg.historical_amounts[5].1);
assert_eq!("07-2015", agg.historical_amounts[6].0);
assert_eq!("760.00", agg.historical_amounts[6].1);
assert_eq!("08-2015", agg.historical_amounts[7].0);
assert_eq!("910.00", agg.historical_amounts[7].1);
assert_eq!("09-2015", agg.historical_amounts[8].0);
assert_eq!("1010.00", agg.historical_amounts[8].1);
assert_eq!("10-2015", agg.historical_amounts[9].0);
assert_eq!("960.00", agg.historical_amounts[9].1);
assert_eq!("11-2015", agg.historical_amounts[10].0);
assert_eq!("1010.00", agg.historical_amounts[10].1);
assert_eq!("12-2015", agg.historical_amounts[11].0);
assert_eq!("950.00", agg.historical_amounts[11].1);
assert_eq!("current", agg.historical_amounts[12].0);
assert_eq!("1000.00", agg.historical_amounts[12].1);
}
#[test]
fn can_get_outgoings() {
let c = hyper::client::Client::with_connector(GetAccountFollowedByGetTransactions::default());
let teller = TellerClient::new_with_hyper_client("fake-auth-token", c);
let from = generate_utc_date_from_date_str("2015-01-01");
let to = generate_utc_date_from_date_str("2015-12-31");
let agg = teller.get_outgoings("123", &Interval::Monthly, &from, &to).unwrap();
assert_eq!("GBP", agg.currency);
assert_eq!("01-2015", agg.historical_amounts[0].0);
assert_eq!("23.97", agg.historical_amounts[0].1);
assert_eq!("02-2015", agg.historical_amounts[1].0);
assert_eq!("0.00", agg.historical_amounts[1].1);
assert_eq!("03-2015", agg.historical_amounts[2].0);
assert_eq!("0.00", agg.historical_amounts[2].1);
assert_eq!("04-2015", agg.historical_amounts[3].0);
assert_eq!("0.00", agg.historical_amounts[3].1);
assert_eq!("05-2015", agg.historical_amounts[4].0);
assert_eq!("25.00", agg.historical_amounts[4].1);
assert_eq!("06-2015", agg.historical_amounts[5].0);
assert_eq!("50.00", agg.historical_amounts[5].1);
assert_eq!("07-2015", agg.historical_amounts[6].0);
assert_eq!("0.00", agg.historical_amounts[6].1);
assert_eq!("08-2015", agg.historical_amounts[7].0);
assert_eq!("0.00", agg.historical_amounts[7].1);
assert_eq!("09-2015", agg.historical_amounts[8].0);
assert_eq!("50.00", agg.historical_amounts[8].1);
assert_eq!("10-2015", agg.historical_amounts[9].0);
assert_eq!("0.00", agg.historical_amounts[9].1);
assert_eq!("11-2015", agg.historical_amounts[10].0);
assert_eq!("60.00", agg.historical_amounts[10].1);
assert_eq!("12-2015", agg.historical_amounts[11].0);
assert_eq!("0.00", agg.historical_amounts[11].1);
}
#[test]
fn can_get_incomings() {
let c = hyper::client::Client::with_connector(GetAccountFollowedByGetTransactions::default());
let teller = TellerClient::new_with_hyper_client("fake-auth-token", c);
let from = generate_utc_date_from_date_str("2015-01-01");
let to = generate_utc_date_from_date_str("2015-12-31");
let agg = teller.get_incomings("123", &Interval::Monthly, &from, &to).unwrap();
assert_eq!("GBP", agg.currency);
assert_eq!("01-2015", agg.historical_amounts[0].0);
assert_eq!("0.00", agg.historical_amounts[0].1);
assert_eq!("02-2015", agg.historical_amounts[1].0);
assert_eq!("0.00", agg.historical_amounts[1].1);
assert_eq!("03-2015", agg.historical_amounts[2].0);
assert_eq!("0.00", agg.historical_amounts[2].1);
assert_eq!("04-2015", agg.historical_amounts[3].0);
assert_eq!("0.00", agg.historical_amounts[3].1);
assert_eq!("05-2015", agg.historical_amounts[4].0);
assert_eq!("0.00", agg.historical_amounts[4].1);
assert_eq!("06-2015", agg.historical_amounts[5].0);
assert_eq!("0.00", agg.historical_amounts[5].1);
assert_eq!("07-2015", agg.historical_amounts[6].0);
assert_eq!("150.00", agg.historical_amounts[6].1);
assert_eq!("08-2015", agg.historical_amounts[7].0);
assert_eq!("100.00", agg.historical_amounts[7].1);
assert_eq!("09-2015", agg.historical_amounts[8].0);
assert_eq!("0.00", agg.historical_amounts[8].1);
assert_eq!("10-2015", agg.historical_amounts[9].0);
assert_eq!("50.00", agg.historical_amounts[9].1);
assert_eq!("11-2015", agg.historical_amounts[10].0);
assert_eq!("0.00", agg.historical_amounts[10].1);
assert_eq!("12-2015", agg.historical_amounts[11].0);
assert_eq!("50.00", agg.historical_amounts[11].1);
}
}<|fim▁end|> | fn get_outgoings(&self,
account_id: &str,
interval: &Interval,
from: &Date<UTC>, |
<|file_name|>image_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base classes and utilities for image datasets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import io
import os
import numpy as np
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import problem
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.layers import common_layers<|fim▁hole|>from tensor2tensor.utils import metrics
from tensor2tensor.utils import registry
import tensorflow as tf
def matplotlib_pyplot():
import matplotlib # pylint: disable=g-import-not-at-top
matplotlib.use("agg")
import matplotlib.pyplot as plt # pylint: disable=g-import-not-at-top
return plt
def image_to_tf_summary_value(image, tag):
"""Converts a NumPy image to a tf.Summary.Value object.
Args:
image: 3-D NumPy array.
tag: name for tf.Summary.Value for display in tensorboard.
Returns:
image_summary: A tf.Summary.Value object.
"""
curr_image = np.asarray(image, dtype=np.uint8)
height, width, n_channels = curr_image.shape
s = io.BytesIO()
matplotlib_pyplot().imsave(s, curr_image, format="png")
img_sum = tf.Summary.Image(encoded_image_string=s.getvalue(),
height=height, width=width,
colorspace=n_channels)
return tf.Summary.Value(tag=tag, image=img_sum)
def convert_predictions_to_image_summaries(hook_args):
"""Optionally converts images from hooks_args to image summaries.
Args:
hook_args: DecodeHookArgs namedtuple
Returns:
summaries: list of tf.Summary values if hook_args.decode_hpara
"""
decode_hparams = hook_args.decode_hparams
if not decode_hparams.display_decoded_images:
return []
predictions = hook_args.predictions[0]
# Display ten random inputs and outputs so that tensorboard does not hang.
all_summaries = []
rand_predictions = np.random.choice(predictions, size=10)
for ind, prediction in enumerate(rand_predictions):
output_summary = image_to_tf_summary_value(
prediction["outputs"], tag="%d_output" % ind)
input_summary = image_to_tf_summary_value(
prediction["inputs"], tag="%d_input" % ind)
all_summaries.append(input_summary)
all_summaries.append(output_summary)
return all_summaries
def resize_by_area(img, size):
"""image resize function used by quite a few image problems."""
return tf.to_int64(
tf.image.resize_images(img, [size, size], tf.image.ResizeMethod.AREA))
def make_multiscale(image, resolutions,
resize_method=tf.image.ResizeMethod.BICUBIC,
num_channels=3):
"""Returns list of scaled images, one for each resolution.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to.
resize_method: tf.image.ResizeMethod.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels].
"""
scaled_images = []
for height in resolutions:
scaled_image = tf.image.resize_images(
image,
size=[height, height], # assuming that height = width
method=resize_method)
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([height, height, num_channels])
scaled_images.append(scaled_image)
return scaled_images
def make_multiscale_dilated(image, resolutions, num_channels=3):
"""Returns list of scaled images, one for each resolution.
Resizes by skipping every nth pixel.
Args:
image: Tensor of shape [height, height, num_channels].
resolutions: List of heights that image's height is resized to. The function
assumes VALID padding, so the original image's height must be divisible
by each resolution's height to return the exact resolution size.
num_channels: Number of channels in image.
Returns:
List of Tensors, one for each resolution with shape given by
[resolutions[i], resolutions[i], num_channels] if resolutions properly
divide the original image's height; otherwise shape height and width is up
to valid skips.
"""
image_height = common_layers.shape_list(image)[0]
scaled_images = []
for height in resolutions:
dilation_rate = image_height // height # assuming height = width
scaled_image = image[::dilation_rate, ::dilation_rate]
scaled_image = tf.to_int64(scaled_image)
scaled_image.set_shape([None, None, num_channels])
scaled_images.append(scaled_image)
return scaled_images
class ImageProblem(problem.Problem):
"""Base class for problems with images."""
@property
def num_channels(self):
"""Number of color channels."""
return 3
@property
def vocab_size(self):
"""Number of pixel values."""
return 256
def example_reading_spec(self):
data_fields = {
"image/encoded": tf.FixedLenFeature((), tf.string),
"image/format": tf.FixedLenFeature((), tf.string),
}
data_items_to_decoders = {
"inputs":
tf.contrib.slim.tfexample_decoder.Image(
image_key="image/encoded",
format_key="image/format",
channels=self.num_channels),
}
return data_fields, data_items_to_decoders
def preprocess_example(self, example, mode, hparams):
if not self._was_reversed:
example["inputs"] = tf.image.per_image_standardization(example["inputs"])
return example
def eval_metrics(self):
eval_metrics = [
metrics.Metrics.ACC, metrics.Metrics.ACC_TOP5,
metrics.Metrics.ACC_PER_SEQ, metrics.Metrics.NEG_LOG_PERPLEXITY
]
if self._was_reversed:
eval_metrics += [metrics.Metrics.IMAGE_SUMMARY]
return eval_metrics
@property
def decode_hooks(self):
return [convert_predictions_to_image_summaries]
class Image2ClassProblem(ImageProblem):
"""Base class for image classification problems."""
@property
def is_small(self):
raise NotImplementedError()
@property
def num_classes(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
return 1
@property
def class_labels(self):
return ["ID_%d" % i for i in range(self.num_classes)]
def feature_encoders(self, data_dir):
del data_dir
return {
"inputs": text_encoder.ImageEncoder(channels=self.num_channels),
"targets": text_encoder.ClassLabelEncoder(self.class_labels)
}
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2ClassProblem, self).example_reading_spec())
data_fields[label_key] = tf.FixedLenFeature((1,), tf.int64)
data_items_to_decoders[
"targets"] = tf.contrib.slim.tfexample_decoder.Tensor(label_key)
return data_fields, data_items_to_decoders
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.input_modality = {"inputs": (registry.Modalities.IMAGE, 256)}
p.target_modality = (registry.Modalities.CLASS_LABEL, self.num_classes)
p.batch_size_multiplier = 4 if self.is_small else 256
p.loss_multiplier = 3.0 if self.is_small else 1.0
if self._was_reversed:
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = problem.SpaceID.IMAGE_LABEL
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def encode_images_as_png(images):
"""Yield images encoded as pngs."""
if tf.contrib.eager.in_eager_mode():
for image in images:
yield tf.image.encode_png(image).numpy()
else:
(height, width, channels) = images[0].shape
with tf.Graph().as_default():
image_t = tf.placeholder(dtype=tf.uint8, shape=(height, width, channels))
encoded_image_t = tf.image.encode_png(image_t)
with tf.Session() as sess:
for image in images:
enc_string = sess.run(encoded_image_t, feed_dict={image_t: image})
yield enc_string
def image_generator(images, labels):
"""Generator for images that takes image and labels lists and creates pngs.
Args:
images: list of images given as [width x height x channels] numpy arrays.
labels: list of ints, same length as images.
Yields:
A dictionary representing the images with the following fields:
* image/encoded: the string encoding the image as PNG,
* image/format: the string "png" representing image format,
* image/class/label: an integer representing the label,
* image/height: an integer representing the height,
* image/width: an integer representing the width.
Every field is actually a singleton list of the corresponding type.
Raises:
ValueError: if images is an empty list.
"""
if not images:
raise ValueError("Must provide some images for the generator.")
width, height, _ = images[0].shape
for (enc_image, label) in zip(encode_images_as_png(images), labels):
yield {
"image/encoded": [enc_image],
"image/format": ["png"],
"image/class/label": [int(label)],
"image/height": [height],
"image/width": [width]
}
class Image2TextProblem(ImageProblem):
"""Base class for image-to-text problems."""
@property
def is_character_level(self):
raise NotImplementedError()
@property
def vocab_problem(self):
raise NotImplementedError() # Not needed if self.is_character_level.
@property
def target_space_id(self):
raise NotImplementedError()
@property
def train_shards(self):
raise NotImplementedError()
@property
def dev_shards(self):
raise NotImplementedError()
def generator(self, data_dir, tmp_dir, is_training):
raise NotImplementedError()
def example_reading_spec(self):
label_key = "image/class/label"
data_fields, data_items_to_decoders = (
super(Image2TextProblem, self).example_reading_spec())
data_fields[label_key] = tf.VarLenFeature(tf.int64)
data_items_to_decoders[
"targets"] = tf.contrib.slim.tfexample_decoder.Tensor(label_key)
return data_fields, data_items_to_decoders
def feature_encoders(self, data_dir):
if self.is_character_level:
encoder = text_encoder.ByteTextEncoder()
else:
vocab_filename = os.path.join(
data_dir, self.vocab_problem.vocab_filename)
encoder = text_encoder.SubwordTextEncoder(vocab_filename)
input_encoder = text_encoder.ImageEncoder(channels=self.num_channels)
return {"inputs": input_encoder, "targets": encoder}
def hparams(self, defaults, unused_model_hparams):
p = defaults
p.input_modality = {"inputs": (registry.Modalities.IMAGE, 256)}
encoder = self._encoders["targets"]
p.target_modality = (registry.Modalities.SYMBOL, encoder.vocab_size)
p.batch_size_multiplier = 256
p.loss_multiplier = 1.0
p.input_space_id = problem.SpaceID.IMAGE
p.target_space_id = self.target_space_id
def generate_data(self, data_dir, tmp_dir, task_id=-1):
generator_utils.generate_dataset_and_shuffle(
self.generator(data_dir, tmp_dir, True),
self.training_filepaths(data_dir, self.train_shards, shuffled=False),
self.generator(data_dir, tmp_dir, False),
self.dev_filepaths(data_dir, self.dev_shards, shuffled=False))
def image_augmentation(images, do_colors=False, crop_size=None):
"""Image augmentation: cropping, flipping, and color transforms."""
if crop_size is None:
crop_size = [299, 299]
images = tf.random_crop(images, crop_size + [3])
images = tf.image.random_flip_left_right(images)
if do_colors: # More augmentation, but might be slow.
images = tf.image.random_brightness(images, max_delta=32. / 255.)
images = tf.image.random_saturation(images, lower=0.5, upper=1.5)
images = tf.image.random_hue(images, max_delta=0.2)
images = tf.image.random_contrast(images, lower=0.5, upper=1.5)
return images
def cifar_image_augmentation(images):
"""Image augmentation suitable for CIFAR-10/100.
As described in https://arxiv.org/pdf/1608.06993v3.pdf (page 5).
Args:
images: a Tensor.
Returns:
Tensor of the same shape as images.
"""
images = tf.image.resize_image_with_crop_or_pad(images, 40, 40)
images = tf.random_crop(images, [32, 32, 3])
images = tf.image.random_flip_left_right(images)
return images
def random_shift(image, wsr=0.1, hsr=0.1):
"""Apply random horizontal and vertical shift to images.
This is the default data-augmentation strategy used on CIFAR in Glow.
Args:
image: a 3-D Tensor
wsr: Width shift range, as a float fraction of the width.
hsr: Height shift range, as a float fraction of the width.
Returns:
images: images translated by the provided wsr and hsr.
"""
height, width, _ = common_layers.shape_list(image)
width_range, height_range = wsr*width, hsr*height
height_translations = tf.random_uniform((1,), -height_range, height_range)
width_translations = tf.random_uniform((1,), -width_range, width_range)
translations = tf.concat((height_translations, width_translations), axis=0)
return tf.contrib.image.translate(image, translations=translations)<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from communities.models import Community, SendToOption
from datetime import datetime, date, time
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from ocd.formfields import HTMLArea, OCSplitDateTime, OCCheckboxSelectMultiple
from users.models import OCUser, Membership
import floppyforms as forms
from haystack.forms import SearchForm, ModelSearchForm
class EditUpcomingMeetingForm(forms.ModelForm):
class Meta:
model = Community
fields = (
'upcoming_meeting_title',
'upcoming_meeting_location',
'upcoming_meeting_scheduled_at',
# 'voting_ends_at',
'upcoming_meeting_comments',
)
widgets = {
'upcoming_meeting_title': forms.TextInput,
'upcoming_meeting_scheduled_at': OCSplitDateTime,
'upcoming_meeting_location': forms.TextInput,
# 'voting_ends_at': OCSplitDateTime,
'upcoming_meeting_comments': HTMLArea,
}
def __init__(self, *args, **kwargs):
super(EditUpcomingMeetingForm, self).__init__(*args, **kwargs)
self.fields['upcoming_meeting_title'].label = _('Title')
self.fields['upcoming_meeting_scheduled_at'].label = _('Scheduled at')
self.fields['upcoming_meeting_location'].label = _('Location')
self.fields['upcoming_meeting_comments'].label = _('Background')
"""
removed this function as we don't include voting_end_time in the form any more.
# ----------------------------------------------------------------------------
def clean(self):
#prevent voting end time from illegal values (past time,
#time after meeting schedule)
try:
voting_ends_at = self.cleaned_data['voting_ends_at']
except KeyError:
voting_ends_at = None
try:
meeting_time = self.cleaned_data['upcoming_meeting_scheduled_at']
except KeyError:
meeting_time = None
if voting_ends_at:
if voting_ends_at <= timezone.now():
raise forms.ValidationError(_("End voting time cannot be set to the past"))
if meeting_time and voting_ends_at > meeting_time:
raise forms.ValidationError(_("End voting time cannot be set to after the meeting time"))
return self.cleaned_data
"""
def save(self):
c = super(EditUpcomingMeetingForm, self).save()
c.voting_ends_at = datetime.combine(date(2025, 1, 1), time(12, 0, 0))
c.save()
return c
class PublishUpcomingMeetingForm(forms.ModelForm):
send_to = forms.TypedChoiceField(label=_("Send to"), coerce=int,
choices=SendToOption.choices,
widget=forms.RadioSelect)
class Meta:
model = Community
fields = ()
class EditUpcomingMeetingSummaryForm(forms.ModelForm):
class Meta:
model = Community
fields = (
'upcoming_meeting_summary',
)
widgets = {
'upcoming_meeting_summary': HTMLArea,
}
class UpcomingMeetingParticipantsForm(forms.ModelForm):
board = forms.MultipleChoiceField(widget=OCCheckboxSelectMultiple, required=False)
class Meta:
model = Community
fields = (
'upcoming_meeting_participants',
'upcoming_meeting_guests',
)
<|fim▁hole|> 'upcoming_meeting_participants': OCCheckboxSelectMultiple,
'upcoming_meeting_guests': forms.Textarea,
}
def __init__(self, *args, **kwargs):
super(UpcomingMeetingParticipantsForm, self).__init__(*args, **kwargs)
participants = self.instance.upcoming_meeting_participants.values_list(
'id', flat=True)
board_in = []
board_choices = []
for b in self.instance.get_board_members():
board_choices.append((b.id, b.display_name,))
if b.id in participants:
board_in.append(b.id)
self.fields['board'].choices = board_choices
self.initial['board'] = board_in
self.fields['upcoming_meeting_participants'].queryset = self.instance.get_members()
self.fields['upcoming_meeting_participants'].label = ""
class CommunitySearchForm(ModelSearchForm):
pass
# def search(self):
# # First, store the SearchQuerySet received from other processing.
# sqs = super(DateRangeSearchForm, self).search()
#
# if not self.is_valid():
# return self.no_query_found()
#
# return sqs<|fim▁end|> | widgets = { |
<|file_name|>0035_siteconfiguration_base_cookie_domain.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0034_auto_20170613_2039'),
]
operations = [
migrations.AddField(
model_name='siteconfiguration',
name='base_cookie_domain',
field=models.CharField(blank=True, default=b'', help_text='Base cookie domain used to share cookies across services.', max_length=255, verbose_name='Base Cookie Domain'),
),
]<|fim▁end|> | # Generated by Django 1.10.7 on 2017-06-15 06:37 |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use num::{ToPrimitive, FromPrimitive};
use std::ptr;
use EventPump;
use rect::Rect;
use video::Window;
use sys::keyboard as ll;
mod keycode;
mod scancode;
pub use self::keycode::Keycode;
pub use self::scancode::Scancode;
bitflags! {
flags Mod: u32 {
const NOMOD = 0x0000,
const LSHIFTMOD = 0x0001,
const RSHIFTMOD = 0x0002,
const LCTRLMOD = 0x0040,
const RCTRLMOD = 0x0080,
const LALTMOD = 0x0100,
const RALTMOD = 0x0200,<|fim▁hole|> const RGUIMOD = 0x0800,
const NUMMOD = 0x1000,
const CAPSMOD = 0x2000,
const MODEMOD = 0x4000,
const RESERVEDMOD = 0x8000
}
}
pub struct KeyboardState<'a> {
keyboard_state: &'a [u8]
}
impl<'a> KeyboardState<'a> {
pub fn new(_e: &'a EventPump) -> KeyboardState<'a> {
let keyboard_state = unsafe {
let mut count = 0;
let state_ptr = ll::SDL_GetKeyboardState(&mut count);
::std::slice::from_raw_parts(state_ptr, count as usize)
};
KeyboardState {
keyboard_state: keyboard_state
}
}
/// Returns true if the scancode is pressed.
///
/// # Example
/// ```no_run
/// use sdl2::keyboard::Scancode;
///
/// fn is_a_pressed(e: &sdl2::EventPump) -> bool {
/// e.keyboard_state().is_scancode_pressed(Scancode::A)
/// }
/// ```
pub fn is_scancode_pressed(&self, scancode: Scancode) -> bool {
self.keyboard_state[ToPrimitive::to_isize(&scancode).unwrap() as usize] != 0
}
/// Returns an iterator all scancodes with a boolean indicating if the scancode is pressed.
pub fn scancodes(&self) -> ScancodeIterator {
ScancodeIterator {
index: 0,
keyboard_state: self.keyboard_state
}
}
/// Returns an iterator of pressed scancodes.
///
/// # Example
/// ```no_run
/// use sdl2::keyboard::Keycode;
/// use sdl2::keyboard::Scancode;
/// use std::collections::HashSet;
///
/// fn pressed_scancode_set(e: &sdl2::EventPump) -> HashSet<Scancode> {
/// e.keyboard_state().pressed_scancodes().collect()
/// }
///
/// fn pressed_keycode_set(e: &sdl2::EventPump) -> HashSet<Keycode> {
/// e.keyboard_state().pressed_scancodes()
/// .filter_map(Keycode::from_scancode)
/// .collect()
/// }
///
/// fn newly_pressed(old: &HashSet<Scancode>, new: &HashSet<Scancode>) -> HashSet<Scancode> {
/// new - old
/// // sugar for: new.difference(old).collect()
/// }
/// ```
pub fn pressed_scancodes(&self) -> PressedScancodeIterator {
PressedScancodeIterator {
iter: self.scancodes()
}
}
}
pub struct ScancodeIterator<'a> {
index: usize,
keyboard_state: &'a [u8]
}
impl<'a> Iterator for ScancodeIterator<'a> {
type Item = (Scancode, bool);
fn next(&mut self) -> Option<(Scancode, bool)> {
if self.index < self.keyboard_state.len() {
let index = self.index;
self.index += 1;
if let Some(scancode) = FromPrimitive::from_usize(index) {
let pressed = self.keyboard_state[index] != 0;
Some((scancode, pressed))
} else {
self.next()
}
} else {
None
}
}
}
pub struct PressedScancodeIterator<'a> {
iter: ScancodeIterator<'a>
}
impl<'a> Iterator for PressedScancodeIterator<'a> {
type Item = Scancode;
fn next(&mut self) -> Option<Scancode> {
while let Some((scancode, pressed)) = self.iter.next() {
if pressed { return Some(scancode) }
}
None
}
}
impl ::Sdl {
#[inline]
pub fn keyboard(&self) -> KeyboardUtil {
KeyboardUtil {
_sdldrop: self.sdldrop()
}
}
}
impl ::VideoSubsystem {
#[inline]
pub fn text_input(&self) -> TextInputUtil {
TextInputUtil {
_subsystem: self.clone()
}
}
}
/// Keyboard utility functions. Access with `Sdl::keyboard()`.
///
/// ```no_run
/// let sdl_context = sdl2::init().unwrap();
///
/// let focused = sdl_context.keyboard().focused_window_id().is_some();
/// ```
pub struct KeyboardUtil {
_sdldrop: ::std::rc::Rc<::SdlDrop>
}
impl KeyboardUtil {
/// Gets the id of the window which currently has keyboard focus.
pub fn focused_window_id(&self) -> Option<u32> {
let raw = unsafe { ll::SDL_GetKeyboardFocus() };
if raw == ptr::null_mut() {
None
} else {
let id = unsafe { ::sys::video::SDL_GetWindowID(raw) };
Some(id)
}
}
pub fn mod_state(&self) -> Mod {
unsafe { Mod::from_bits(ll::SDL_GetModState()).unwrap() }
}
pub fn set_mod_state(&self, flags: Mod) {
unsafe { ll::SDL_SetModState(flags.bits()); }
}
}
/// Text input utility functions. Access with `VideoSubsystem::text_input()`.
///
/// These functions require the video subsystem to be initialized and are not thread-safe.
///
/// ```no_run
/// let sdl_context = sdl2::init().unwrap();
/// let video_subsystem = sdl_context.video().unwrap();
///
/// // Start accepting text input events...
/// video_subsystem.text_input().start();
/// ```
pub struct TextInputUtil {
_subsystem: ::VideoSubsystem
}
impl TextInputUtil {
pub fn start(&self) {
unsafe { ll::SDL_StartTextInput(); }
}
pub fn is_active(&self, ) -> bool {
unsafe { ll::SDL_IsTextInputActive() == 1 }
}
pub fn stop(&self) {
unsafe { ll::SDL_StopTextInput(); }
}
pub fn set_rect(&self, rect: &Rect) {
unsafe { ll::SDL_SetTextInputRect(rect.raw()); }
}
pub fn has_screen_keyboard_support(&self) -> bool {
unsafe { ll::SDL_HasScreenKeyboardSupport() == 1 }
}
pub fn is_screen_keyboard_shown(&self, window: &Window) -> bool {
unsafe { ll::SDL_IsScreenKeyboardShown(window.raw()) == 1 }
}
}<|fim▁end|> | const LGUIMOD = 0x0400, |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"net/http"
"github.com/golang/example/stringutil"
)<|fim▁hole|>
// Handler is the entry point for this fission function
func Handler(w http.ResponseWriter, r *http.Request) {
msg := stringutil.Reverse(stringutil.Reverse("Vendor Example Test"))
w.Write([]byte(msg))
}<|fim▁end|> | |
<|file_name|>associated-types-duplicate-binding-in-env.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(dead_code)]
// Check that we do not report ambiguities when the same predicate
// appears in the environment twice. Issue #21965.
// pretty-expanded FIXME #23616
trait Foo {
type B;
fn get() -> Self::B;
}
<|fim▁hole|> <T as Foo>::get()
}
fn main() {
}<|fim▁end|> | fn foo<T>() -> ()
where T : Foo<B=()>, T : Foo<B=()>
{ |
<|file_name|> AudioPreferencesPanel.java<|end_file_name|><|fim▁begin|>/*
* In the name of Allah
* This file is part of The "Quran Teacher or Learn Arabic" Project. Use is subject to
* license terms.
*
* @author: Fazle Rabbi Rahat
*
*/
package QuranTeacher;
import javax.swing.ButtonGroup;
import javax.swing.JComboBox;
import javax.swing.JPanel;
import java.awt.GridBagLayout;
import java.awt.Color;
import javax.swing.JLabel;
import java.awt.GridBagConstraints;
import java.awt.Font;
import java.awt.Insets;
import javax.swing.JRadioButton;
import java.awt.event.ItemListener;
import java.awt.event.ItemEvent;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import QuranTeacher.Preferences.AudioPreferences;
import javax.swing.JTextArea;
public class AudioPreferencesPanel extends JPanel {<|fim▁hole|> */
private static final long serialVersionUID = 1L;
private List<String> QariNames=new ArrayList<>();
private static List<String> audioSourceLinks=new ArrayList<>();
private AudioPreferences audioSetupPrefs;
private JRadioButton rdbtnOn;
private JRadioButton rdbtnOff;
private JComboBox<String>comboBox;
/**
* Create the panel.
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public AudioPreferencesPanel(final AudioPreferences audioPrefs) {
this.audioSetupPrefs=audioPrefs;
setBackground(Color.DARK_GRAY);
setForeground(Color.RED);
GridBagLayout gridBagLayout = new GridBagLayout();
gridBagLayout.columnWidths = new int[]{0, 0, 0, 0};
gridBagLayout.rowHeights = new int[]{0, 0, 0, 0, 32, 0};
gridBagLayout.columnWeights = new double[]{0.0, 1.0, 0.0, Double.MIN_VALUE};
gridBagLayout.rowWeights = new double[]{0.0, 0.0, 0.0, 0.0, Double.MIN_VALUE, 1.0};
setLayout(gridBagLayout);
JLabel lblHeader = new JLabel("Recitation Preferences");
lblHeader.setForeground(Color.MAGENTA);
lblHeader.setFont(new Font("Tahoma", Font.PLAIN, 18));
GridBagConstraints gbc_lblHeader = new GridBagConstraints();
gbc_lblHeader.gridwidth = 4;
gbc_lblHeader.insets = new Insets(0, 0, 5, 0);
gbc_lblHeader.gridx = 0;
gbc_lblHeader.gridy = 0;
add(lblHeader, gbc_lblHeader);
JLabel lblAudioState = new JLabel("Recitation State :");
lblAudioState.setFont(new Font("Tahoma", Font.PLAIN, 18));
lblAudioState.setForeground(Color.ORANGE);
GridBagConstraints gbc_lblAudioState = new GridBagConstraints();
gbc_lblAudioState.insets = new Insets(0, 0, 5, 5);
gbc_lblAudioState.gridx = 0;
gbc_lblAudioState.gridy = 2;
add(lblAudioState, gbc_lblAudioState);
rdbtnOn = new JRadioButton("ON");
rdbtnOn.setFont(new Font("Tahoma", Font.PLAIN, 18));
rdbtnOn.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if(e.getStateChange()==ItemEvent.SELECTED)
{
audioSetupPrefs.setAudioON(true);
//System.out.println("On");
}
else
{
audioSetupPrefs.setAudioON(false);
//System.out.println("Off");
}
}
});
rdbtnOn.setBackground(Color.DARK_GRAY);
rdbtnOn.setForeground(Color.GREEN);
GridBagConstraints gbc_rdbtnOn = new GridBagConstraints();
gbc_rdbtnOn.insets = new Insets(0, 0, 5, 5);
gbc_rdbtnOn.gridx = 1;
gbc_rdbtnOn.gridy = 2;
add(rdbtnOn, gbc_rdbtnOn);
rdbtnOff = new JRadioButton("Off");
rdbtnOff.setFont(new Font("Tahoma", Font.PLAIN, 18));
rdbtnOff.setBackground(Color.DARK_GRAY);
rdbtnOff.setForeground(Color.GREEN);
GridBagConstraints gbc_rdbtnOff = new GridBagConstraints();
gbc_rdbtnOff.insets = new Insets(0, 0, 5, 5);
gbc_rdbtnOff.gridx = 2;
gbc_rdbtnOff.gridy = 2;
add(rdbtnOff, gbc_rdbtnOff);
ButtonGroup buttonGroup=new ButtonGroup();
buttonGroup.add(rdbtnOn);
buttonGroup.add(rdbtnOff);
JLabel lblSelectQari = new JLabel("Select Qari :");
lblSelectQari.setForeground(Color.ORANGE);
lblSelectQari.setFont(new Font("Tahoma", Font.PLAIN, 18));
GridBagConstraints gbc_lblSelectQari = new GridBagConstraints();
gbc_lblSelectQari.anchor = GridBagConstraints.WEST;
gbc_lblSelectQari.insets = new Insets(0, 0, 5, 5);
gbc_lblSelectQari.gridx = 0;
gbc_lblSelectQari.gridy = 4;
add(lblSelectQari, gbc_lblSelectQari);
storeQariSource();
comboBox = new JComboBox(QariNames.toArray());
comboBox.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
audioSetupPrefs.setAudioSourceIndex(comboBox.getSelectedIndex());
//System.out.println(AudioPreferences.audioSource);
}
});
comboBox.setFont(new Font("Tahoma", Font.PLAIN, 16));
int k=audioSetupPrefs.getAudioSourceIndex();
if(k<0 || k>QariNames.size())k=0;
comboBox.setSelectedIndex(k);
GridBagConstraints gbc_comboBox = new GridBagConstraints();
gbc_comboBox.gridwidth = 0;
gbc_comboBox.insets = new Insets(0, 0, 5, 0);
gbc_comboBox.fill = GridBagConstraints.HORIZONTAL;
gbc_comboBox.gridx = 1;
gbc_comboBox.gridy = 4;
add(comboBox, gbc_comboBox);
JTextArea txtrNote = new JTextArea();
txtrNote.setFont(new Font("Monospaced", Font.PLAIN, 16));
txtrNote.setEditable(false);
txtrNote.setLineWrap(true);
txtrNote.setWrapStyleWord(true);
txtrNote.setForeground(Color.PINK);
txtrNote.setBackground(Color.DARK_GRAY);
txtrNote.setText("Note: If you change Qari name, it will take effect only for the \"next to be downoaded\" recitation files. So, the Qari for the previously downloaded files will not change. ");
GridBagConstraints gbc_txtrNote = new GridBagConstraints();
gbc_txtrNote.gridwidth = 0;
gbc_txtrNote.insets = new Insets(0, 0, 0, 5);
gbc_txtrNote.fill = GridBagConstraints.BOTH;
gbc_txtrNote.gridx = 0;
gbc_txtrNote.gridy = 5;
add(txtrNote, gbc_txtrNote);
updateButtonGroup();
}
private void updateButtonGroup() {
if(audioSetupPrefs.isAudioON())
rdbtnOn.setSelected(true);
else
rdbtnOff.setSelected(true);
}
private void storeQariSource()
{
InputStream inStream=this.getClass().getResourceAsStream("files/AudioLinks");
BufferedReader reader=new BufferedReader(new InputStreamReader(inStream));
String text;
try {
while((text=reader.readLine())!=null)
{
if(text.startsWith("name"))
{
QariNames.add(text.split("=")[1]);
}
else if(text.startsWith("link"))
{
audioSourceLinks.add(text.split("=")[1]);
}
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static String getAudioSourceLink(int index) {
return audioSourceLinks.get(index);
}
public void updateSetupPanel()
{
updateButtonGroup();
int k=audioSetupPrefs.getAudioSourceIndex();
if(k<0 || k>QariNames.size())k=0;
comboBox.setSelectedIndex(k);
}
}<|fim▁end|> |
/**
* Preferences panel to handle audio preferences. It doesn't extends
* the PreferencesPanel class |
<|file_name|>Cluster.java<|end_file_name|><|fim▁begin|>package com.cluit.util.dataTypes;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
import com.cluit.util.Const;
import com.cluit.util.AoP.MethodMapper;
import com.cluit.util.methods.ClusteringUtils;
import com.cluit.util.structures.KeyPriorityQueue_Max;
import com.cluit.util.structures.Pair;
/**A cluster is a collection of entries.
*
* The class has a lot of utility functions related to clusters such as calculating centoid, finding the entry furthest
* from the centoid and so on.
*
* @author Simon
*
*/
public class Cluster {
//*******************************************************************************************************
//region VARIABLES
//*******************************************************************************************************
private Entry centoid;
public Pair<Double, Entry> cache = new Pair<Double, Entry>( 0.0, new Entry() );
private final int dimensions;
private final Set<Entry> members = new HashSet<>();
private final KeyPriorityQueue_Max<Entry> distanceQueue = new KeyPriorityQueue_Max<Entry>();
//endregion *********************************************************************************************
//region CUNSTRUCTOR
//*******************************************************************************************************
/**
*
* @param position
* @param centoidIsMember
*/
public Cluster(double[] position) {
if( position.length < 1){
API_Exeption("A cluster's position must be defined and have 1 or more dimenstions!");
}
this.centoid = new Entry(position);
this.dimensions = centoid.getDimensions();
};
//endregion *********************************************************************************************
//region STATIC METHODS
//*******************************************************************************************************
/**Calculates a central point (centoid) from a collection of entries. Not that all entries must have the same dimensionality.
*
* @param entries
* @return A new entry, with a position that is the mean of all parameter entries (NULL if entries.lenght == 0)
*/
public static Entry calculateCentoid(Entry[] entries){
if( entries.length == 0)
return null;
//Fetch dimensionality for the entries and set up the coordinate array
int dim = entries[0].getDimensions();
double[] centoidCoordinates = new double[dim];
//Add all entries positions together (for example, add all entries x-values together in one array slot,
//and all y-values together in the next array slot).
for( Entry p : entries ){
for( int i = 0; i < p.getDimensions(); i++ )
centoidCoordinates[i] += p.getCoordinateAt(i);
}
<|fim▁hole|> centoidCoordinates[i] /= entries.length;
return new Entry(centoidCoordinates);
}
/**Calculates the sum of squared errors for a given set of entries, given a centoid.<br>
* The calculation is simply: For each point, calculate the euclidian distance from that point to the centoid, and square the distance
*
* @param centoid The mean position of the entries (see @link {@link Cluster.calculateCentoid} )
* @param entries
* @return
*/
public static double calculateSquaredError(Entry centoid, Entry[] entries){
double out = 0;
double dist = 0;
for(Entry e : entries ){
dist = ClusteringUtils.eucDistance(centoid, e);
out += (dist*dist);
}
return out;
}
//endregion *********************************************************************************************
//region PUBLIC
//*******************************************************************************************************
public int getNumberOfMembers(){
return distanceQueue.size() == members.size() ? distanceQueue.size() : -1;
}
/**Returns the distance to the centoid for the point which is farthest from the centoid
*
* @return The distance, if there are any members of the cluster. -1 otherwise
*/
public double getFurthestMembersDistance(){
if( distanceQueue.size() == 0 )
return -1;
return distanceQueue.peekKey();
}
/** Calculates a new centoid for the cluster. This method also update each points distance to the centoid
* <br><br>
* Complexity = <b>O(n * d)</b>,
* where <b>n</b> is the number of elements in the cluster
* where <b>d</b> the number of dimensions for each point
*/
public void calculateCentoid(){
int dim = centoid.getDimensions();
double[] newCentoidCoordinates = new double[dim];
for( Entry p : distanceQueue.values() ){
for( int i = 0; i < p.getDimensions(); i++ )
newCentoidCoordinates[i] += p.getCoordinateAt(i);
}
for( int i = 0; i < newCentoidCoordinates.length; i++)
newCentoidCoordinates[i] /= distanceQueue.size();
centoid = new Entry(newCentoidCoordinates );
updateMemberDistances();
}
/**Fetches a <b>copy</b> of the centoid of the cluster
*
* @return A new Entry, which is a copy of the cluster's centoid
*/
public Entry getCentoid(){
return new Entry(centoid);
}
/**Adds an entry to the cluster. The same entry cannot be added twice to the same cluster.
* This does not automatically update the cluster centoid. To do that, call "UpdateCentoid"
*
* @param e
* @return True if the entry was added, false if it was not
*/
public boolean add(Entry e){
if( e.getDimensions() != dimensions ){
API_Exeption("An entry cannot be added to a cluster if their dimenstions does not match! Cluster.dim = "+dimensions+" Entry.dim = "+e.getDimensions() );
return false;
}
if( members.contains(e) ){
API_Exeption("An entry cannot be added to a cluster twice! The entry "+e+" is already present in the cluster" );
return false;
}
double dist;
if( e == cache.right )
dist = cache.left;
else
dist = ClusteringUtils.eucDistance(e, centoid);
boolean a = distanceQueue.put(dist, e);
boolean b = members.add(e);
return a & b;
}
/**Removes a point from the cluster
*
* @param e The point to be removed
* @return True if it was found. False if the point wasn't found.
*/
public boolean removeEntry(Entry e){
boolean a = distanceQueue.remove(e);
boolean b = members.remove(e);
return a & b;
}
/**Calculates a points distance to the clusters centoid.
* The result is cached (the cache stores only 1 element), to prevent
* the result from having to be re-computed in the near future.
* <br>It is therefore recommended that whenever a point checks its distance to
* all clusters, it should be added to a cluster before another point checks
* it's distances.
*
* @param p The point
* @return Distance to the centoid
*/
public double distanceToCentoid(Entry p){
double dist = ClusteringUtils.eucDistance(p, centoid);
cache = new Pair<Double, Entry>(dist, p);
return dist;
}
/**Checks whether a given point is member of this cluster or not
*
* @param p The point
* @return True if the point is found within the cluster
*/
public boolean isMember(Entry e) {
return members.contains(e);
}
/**Fetches an array of all entries that are present within this cluster. This array can have a lenght of 0, in case no
* entries are registered within this cluster
*/
public Entry[] getMembers() {
return members.toArray( new Entry[0] );
}
/**Calculates the sum of squared errors for this cluster
*
* @return
*/
public double getSquaredError(){
return Cluster.calculateSquaredError(centoid, getMembers()) ;
}
public String toString(){
String out = "[ ";
for( Entry e : members ){
out += e.toString() + " : ";
}
return members.size() > 0 ? out.substring(0, out.length() - 3) + " ]" : "[ ]";
}
//endregion *********************************************************************************************
//region PRIVATE
//*******************************************************************************************************
/**Update each member's distance to the centoid
*
*/
private void updateMemberDistances() {
ArrayList<Entry> list = distanceQueue.values();
distanceQueue.clear();
for(Entry p : list){
double newDistance = ClusteringUtils.eucDistance(centoid, p);
distanceQueue.add(newDistance, p);
}
}
private int API_Exeption(String s){
MethodMapper.invoke(Const.METHOD_EXCEPTION_GENERAL, "Error in Cluster.java! " + s +" " + com.cluit.util.methods.MiscUtils.getStackPos(), new Exception() );
return -1;
}
//endregion *********************************************************************************************
//*******************************************************************************************************
}<|fim▁end|> | //Divide each position by the number of entries (to get the mean of each dimension's position
for( int i = 0; i < centoidCoordinates.length; i++) |
<|file_name|>queue.js<|end_file_name|><|fim▁begin|>class Queue {
constructor() {
this.items = [];
}
add(item) {
this.items.push(item);
}
remove() {<|fim▁hole|> return this.items.shift();
}
peek() {
return this.items[this.items.length - 1];
}
isEmpty() {
return this.items.length === 0;
}
}
module.exports = Queue;<|fim▁end|> | |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>module.exports = {
testClient: 'http://localhost:8089/dist/',
mochaTimeout: 10000,
testLayerIds: [0],
seleniumTimeouts: {<|fim▁hole|> script: 5000,
implicit: 1000,
pageLoad: 5000
}
}<|fim▁end|> | |
<|file_name|>ReduceConllResponse.java<|end_file_name|><|fim▁begin|>package eu.newsreader.conversion;
import eu.newsreader.util.Util;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
/**
* Created by piek on 12/15/14.
*/
public class ReduceConllResponse {
static public void main (String[] args) {
try {
String pathToKeyFolder = "";
String pathToResponseFolder = "";
pathToKeyFolder = "/Users/piek/Desktop/NWR/NWR-benchmark/coreference/corpus_CONLL/corpus_airbus/events/key";
pathToResponseFolder = "/Users/piek/Desktop/NWR/NWR-benchmark/coreference/corpus_CONLL/corpus_airbus/events/response";
for (int i = 0; i < args.length; i++) {<|fim▁hole|> }
else if (arg.equalsIgnoreCase("--response") && args.length>(i+1)) {
pathToResponseFolder = args[i+1];
}
}
ArrayList<File> keyFiles = Util.makeFlatFileList(new File(pathToKeyFolder));
ArrayList<File> responseFiles = Util.makeFlatFileList(new File(pathToResponseFolder));
System.out.println("keyFiles = " + keyFiles.size());
System.out.println("responseFiles = " + responseFiles.size());
for (int i = 0; i < keyFiles.size(); i++) {
File keyFile = keyFiles.get(i);
ArrayList<String> sentenceIds = Util.getSentenceIdsConllFile(keyFile);
// System.out.println("sentenceIds.toString() = " + sentenceIds.toString());
String keyS1 = Util.readFirstSentence(keyFile);
boolean MATCH = false;
for (int j = 0; j < responseFiles.size(); j++) {
File responseFile = responseFiles.get(j);
String responseS1 = Util.readFirstSentence(responseFile);
if (keyS1.equals(responseS1)) {
String reducedResponse = Util.reduceConllFileForSentenceIds(responseFile, sentenceIds);
// System.out.println("reducedResponse = " + reducedResponse);
OutputStream responseFos = new FileOutputStream(responseFile);
responseFos.write(reducedResponse.getBytes());
responseFos.close();
MATCH = true;
break;
}
}
if (!MATCH) {
System.out.println("NO MATCH for keyFile = " + keyFile.getName());
System.out.println("sentenceIds = " + sentenceIds.toString());
}
// break;
}
} catch (Exception e) {
//e.printStackTrace();
}
}
}<|fim▁end|> | String arg = args[i];
if (arg.equalsIgnoreCase("--key") && args.length>(i+1)) {
pathToKeyFolder = args[i+1]; |
<|file_name|>interface.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
// Package codegururevieweriface provides an interface to enable mocking the Amazon CodeGuru Reviewer service client
// for testing your code.
//
// It is important to note that this interface will have breaking changes
// when the service model is updated and adds new API operations, paginators,
// and waiters.
package codegururevieweriface
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/service/codegurureviewer"
)
// CodeGuruReviewerAPI provides an interface to enable mocking the
// codegurureviewer.CodeGuruReviewer service client's API operation,
// paginators, and waiters. This make unit testing your code that calls out
// to the SDK's service client's calls easier.
//
// The best way to use this interface is so the SDK's service client's calls
// can be stubbed out for unit testing your code with the SDK without needing
// to inject custom request handlers into the SDK's request pipeline.
//
// // myFunc uses an SDK service client to make a request to
// // Amazon CodeGuru Reviewer.
// func myFunc(svc codegururevieweriface.CodeGuruReviewerAPI) bool {
// // Make svc.AssociateRepository request
// }
//
// func main() {
// sess := session.New()
// svc := codegurureviewer.New(sess)
//
// myFunc(svc)
// }
//<|fim▁hole|>//
// // Define a mock struct to be used in your unit tests of myFunc.
// type mockCodeGuruReviewerClient struct {
// codegururevieweriface.CodeGuruReviewerAPI
// }
// func (m *mockCodeGuruReviewerClient) AssociateRepository(input *codegurureviewer.AssociateRepositoryInput) (*codegurureviewer.AssociateRepositoryOutput, error) {
// // mock response/functionality
// }
//
// func TestMyFunc(t *testing.T) {
// // Setup Test
// mockSvc := &mockCodeGuruReviewerClient{}
//
// myfunc(mockSvc)
//
// // Verify myFunc's functionality
// }
//
// It is important to note that this interface will have breaking changes
// when the service model is updated and adds new API operations, paginators,
// and waiters. Its suggested to use the pattern above for testing, or using
// tooling to generate mocks to satisfy the interfaces.
type CodeGuruReviewerAPI interface {
AssociateRepository(*codegurureviewer.AssociateRepositoryInput) (*codegurureviewer.AssociateRepositoryOutput, error)
AssociateRepositoryWithContext(aws.Context, *codegurureviewer.AssociateRepositoryInput, ...request.Option) (*codegurureviewer.AssociateRepositoryOutput, error)
AssociateRepositoryRequest(*codegurureviewer.AssociateRepositoryInput) (*request.Request, *codegurureviewer.AssociateRepositoryOutput)
CreateCodeReview(*codegurureviewer.CreateCodeReviewInput) (*codegurureviewer.CreateCodeReviewOutput, error)
CreateCodeReviewWithContext(aws.Context, *codegurureviewer.CreateCodeReviewInput, ...request.Option) (*codegurureviewer.CreateCodeReviewOutput, error)
CreateCodeReviewRequest(*codegurureviewer.CreateCodeReviewInput) (*request.Request, *codegurureviewer.CreateCodeReviewOutput)
DescribeCodeReview(*codegurureviewer.DescribeCodeReviewInput) (*codegurureviewer.DescribeCodeReviewOutput, error)
DescribeCodeReviewWithContext(aws.Context, *codegurureviewer.DescribeCodeReviewInput, ...request.Option) (*codegurureviewer.DescribeCodeReviewOutput, error)
DescribeCodeReviewRequest(*codegurureviewer.DescribeCodeReviewInput) (*request.Request, *codegurureviewer.DescribeCodeReviewOutput)
DescribeRecommendationFeedback(*codegurureviewer.DescribeRecommendationFeedbackInput) (*codegurureviewer.DescribeRecommendationFeedbackOutput, error)
DescribeRecommendationFeedbackWithContext(aws.Context, *codegurureviewer.DescribeRecommendationFeedbackInput, ...request.Option) (*codegurureviewer.DescribeRecommendationFeedbackOutput, error)
DescribeRecommendationFeedbackRequest(*codegurureviewer.DescribeRecommendationFeedbackInput) (*request.Request, *codegurureviewer.DescribeRecommendationFeedbackOutput)
DescribeRepositoryAssociation(*codegurureviewer.DescribeRepositoryAssociationInput) (*codegurureviewer.DescribeRepositoryAssociationOutput, error)
DescribeRepositoryAssociationWithContext(aws.Context, *codegurureviewer.DescribeRepositoryAssociationInput, ...request.Option) (*codegurureviewer.DescribeRepositoryAssociationOutput, error)
DescribeRepositoryAssociationRequest(*codegurureviewer.DescribeRepositoryAssociationInput) (*request.Request, *codegurureviewer.DescribeRepositoryAssociationOutput)
DisassociateRepository(*codegurureviewer.DisassociateRepositoryInput) (*codegurureviewer.DisassociateRepositoryOutput, error)
DisassociateRepositoryWithContext(aws.Context, *codegurureviewer.DisassociateRepositoryInput, ...request.Option) (*codegurureviewer.DisassociateRepositoryOutput, error)
DisassociateRepositoryRequest(*codegurureviewer.DisassociateRepositoryInput) (*request.Request, *codegurureviewer.DisassociateRepositoryOutput)
ListCodeReviews(*codegurureviewer.ListCodeReviewsInput) (*codegurureviewer.ListCodeReviewsOutput, error)
ListCodeReviewsWithContext(aws.Context, *codegurureviewer.ListCodeReviewsInput, ...request.Option) (*codegurureviewer.ListCodeReviewsOutput, error)
ListCodeReviewsRequest(*codegurureviewer.ListCodeReviewsInput) (*request.Request, *codegurureviewer.ListCodeReviewsOutput)
ListCodeReviewsPages(*codegurureviewer.ListCodeReviewsInput, func(*codegurureviewer.ListCodeReviewsOutput, bool) bool) error
ListCodeReviewsPagesWithContext(aws.Context, *codegurureviewer.ListCodeReviewsInput, func(*codegurureviewer.ListCodeReviewsOutput, bool) bool, ...request.Option) error
ListRecommendationFeedback(*codegurureviewer.ListRecommendationFeedbackInput) (*codegurureviewer.ListRecommendationFeedbackOutput, error)
ListRecommendationFeedbackWithContext(aws.Context, *codegurureviewer.ListRecommendationFeedbackInput, ...request.Option) (*codegurureviewer.ListRecommendationFeedbackOutput, error)
ListRecommendationFeedbackRequest(*codegurureviewer.ListRecommendationFeedbackInput) (*request.Request, *codegurureviewer.ListRecommendationFeedbackOutput)
ListRecommendationFeedbackPages(*codegurureviewer.ListRecommendationFeedbackInput, func(*codegurureviewer.ListRecommendationFeedbackOutput, bool) bool) error
ListRecommendationFeedbackPagesWithContext(aws.Context, *codegurureviewer.ListRecommendationFeedbackInput, func(*codegurureviewer.ListRecommendationFeedbackOutput, bool) bool, ...request.Option) error
ListRecommendations(*codegurureviewer.ListRecommendationsInput) (*codegurureviewer.ListRecommendationsOutput, error)
ListRecommendationsWithContext(aws.Context, *codegurureviewer.ListRecommendationsInput, ...request.Option) (*codegurureviewer.ListRecommendationsOutput, error)
ListRecommendationsRequest(*codegurureviewer.ListRecommendationsInput) (*request.Request, *codegurureviewer.ListRecommendationsOutput)
ListRecommendationsPages(*codegurureviewer.ListRecommendationsInput, func(*codegurureviewer.ListRecommendationsOutput, bool) bool) error
ListRecommendationsPagesWithContext(aws.Context, *codegurureviewer.ListRecommendationsInput, func(*codegurureviewer.ListRecommendationsOutput, bool) bool, ...request.Option) error
ListRepositoryAssociations(*codegurureviewer.ListRepositoryAssociationsInput) (*codegurureviewer.ListRepositoryAssociationsOutput, error)
ListRepositoryAssociationsWithContext(aws.Context, *codegurureviewer.ListRepositoryAssociationsInput, ...request.Option) (*codegurureviewer.ListRepositoryAssociationsOutput, error)
ListRepositoryAssociationsRequest(*codegurureviewer.ListRepositoryAssociationsInput) (*request.Request, *codegurureviewer.ListRepositoryAssociationsOutput)
ListRepositoryAssociationsPages(*codegurureviewer.ListRepositoryAssociationsInput, func(*codegurureviewer.ListRepositoryAssociationsOutput, bool) bool) error
ListRepositoryAssociationsPagesWithContext(aws.Context, *codegurureviewer.ListRepositoryAssociationsInput, func(*codegurureviewer.ListRepositoryAssociationsOutput, bool) bool, ...request.Option) error
ListTagsForResource(*codegurureviewer.ListTagsForResourceInput) (*codegurureviewer.ListTagsForResourceOutput, error)
ListTagsForResourceWithContext(aws.Context, *codegurureviewer.ListTagsForResourceInput, ...request.Option) (*codegurureviewer.ListTagsForResourceOutput, error)
ListTagsForResourceRequest(*codegurureviewer.ListTagsForResourceInput) (*request.Request, *codegurureviewer.ListTagsForResourceOutput)
PutRecommendationFeedback(*codegurureviewer.PutRecommendationFeedbackInput) (*codegurureviewer.PutRecommendationFeedbackOutput, error)
PutRecommendationFeedbackWithContext(aws.Context, *codegurureviewer.PutRecommendationFeedbackInput, ...request.Option) (*codegurureviewer.PutRecommendationFeedbackOutput, error)
PutRecommendationFeedbackRequest(*codegurureviewer.PutRecommendationFeedbackInput) (*request.Request, *codegurureviewer.PutRecommendationFeedbackOutput)
TagResource(*codegurureviewer.TagResourceInput) (*codegurureviewer.TagResourceOutput, error)
TagResourceWithContext(aws.Context, *codegurureviewer.TagResourceInput, ...request.Option) (*codegurureviewer.TagResourceOutput, error)
TagResourceRequest(*codegurureviewer.TagResourceInput) (*request.Request, *codegurureviewer.TagResourceOutput)
UntagResource(*codegurureviewer.UntagResourceInput) (*codegurureviewer.UntagResourceOutput, error)
UntagResourceWithContext(aws.Context, *codegurureviewer.UntagResourceInput, ...request.Option) (*codegurureviewer.UntagResourceOutput, error)
UntagResourceRequest(*codegurureviewer.UntagResourceInput) (*request.Request, *codegurureviewer.UntagResourceOutput)
}
var _ CodeGuruReviewerAPI = (*codegurureviewer.CodeGuruReviewer)(nil)<|fim▁end|> | // In your _test.go file: |
<|file_name|>client_utils.py<|end_file_name|><|fim▁begin|>"""Client Utilities
Factor out code shared by both the resync and resync-explorer
clients.
Copyright 2012,2013 Simeon Warner
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License
"""
import logging
import logging.config
import optparse
import sys
from resync.client import ClientFatalError
from resync.explorer import Explorer
from resync.utils import UTCFormatter
def init_logging(to_file=False, logfile=None, default_logfile='/tmp/resync.log',
human=True, verbose=False, eval_mode=False,
default_logger='client', extra_loggers=None):
"""Initialize logging
Use of log levels:
DEBUG - very verbose, for evaluation of output (-e)
INFO - verbose, only seen by users if they ask for it (-v)
WARNING - messages output messages to console
Logging to a file: If to_file is True then output will be written to
a file. This will be logfile if set, else default_logfile (which may
also be overridden).
"""
fmt = '%(asctime)s | %(name)s | %(levelname)s | %(message)s'
formatter = UTCFormatter(fmt)
if human:
# Create a special handler designed just for human readable output
hh = logging.StreamHandler()
hh.setLevel( logging.INFO if (verbose) else logging.WARNING )
hh.setFormatter(logging.Formatter(fmt='%(message)s'))
if to_file:
if (logfile is None):
logfile = default_logfile
fh = logging.FileHandler(filename=logfile, mode='a')
fh.setFormatter(formatter)
fh.setLevel( logging.DEBUG if (eval_mode) else logging.INFO )
loggers = [default_logger,'resync']
if (extra_loggers is not None):
for logger in extra_loggers:
loggers.append(logger)
for logger in loggers:
log = logging.getLogger(logger)
log.setLevel(logging.DEBUG) #control at handler instead
if human:
log.addHandler(hh)
if to_file:
log.addHandler(fh)
log=logging.getLogger(default_logger)
if (to_file):
log.info("Writing detailed log to %s" % (logfile))
def count_true_args(*args):
"""Count number of list of arguments that evaluate True"""
count=0
for arg in args:
if (arg):
count+=1
return(count)
def parse_links(args_link):
links=[]
if (args_link is not None):
for link_str in args_link:
try:
links.append(parse_link(link_str))
except ValueError as e:
raise ClientFatalError("Bad --link option '%s' (%s)"%(link_str,str(e)))
return(links)
def parse_link(link_str):
"""Parse --link option to add to <rs:ln> links
Input string of the form: rel,href,att1=val1,att2=val2
"""
atts={}
help_str = "--link option '%s' (format rel,href,att1=val1...)"%(link_str)
try:
segs = link_str.split(',')
# First segments are relation and subject
atts['rel'] = segs.pop(0)
atts['href'] = segs.pop(0)
if (atts['href']==''):
raise ClientFatalError("Missing uri in " + help_str)
# Remaining segments are attributes
for term in segs:
(k,v)=term.split('=')
if (k=='' or v==''):
raise ClientFatalError("Bad attribute (%s) in " % (term) + help_str)
atts[k]=v
except ValueError as e:
raise ClientFatalError("Bad component of " + help_str)
except IndexError as e:
raise ClientFatalError("Incomplete component of " + help_str)
return(atts)
def parse_capabilities(caps_str):
"""Parse list of capabilities in --capabilitylist option
<|fim▁hole|> try:
segs = caps_str.split(',')
for term in segs:
(k,v)=term.split('=')
capabilities[k]=v
except ValueError as e:
raise ClientFatalError("Bad component of --capabilitylist option '%s' (%s)"%(caps_str,str(e)))
return(capabilities)
def parse_capability_lists(cls_str):
"""Parse list of capability lists in --capabilitylistindex option
Input string of the form: uri,uri
"""
return(cls_str.split(','))<|fim▁end|> | Input string of the form: cap_name=uri,cap_name=uri
"""
capabilities={} |
<|file_name|>prefetch.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::trace::JSTraceable;
use crate::dom::document::Document;
use crate::dom::htmlimageelement::image_fetch_request;
use crate::dom::htmlscriptelement::script_fetch_request;
use crate::stylesheet_loader::stylesheet_fetch_request;
use html5ever::buffer_queue::BufferQueue;
use html5ever::tokenizer::states::RawKind;
use html5ever::tokenizer::Tag;
use html5ever::tokenizer::TagKind;
use html5ever::tokenizer::Token;
use html5ever::tokenizer::TokenSink;
use html5ever::tokenizer::TokenSinkResult;
use html5ever::tokenizer::Tokenizer as HtmlTokenizer;
use html5ever::tokenizer::TokenizerResult;
use html5ever::Attribute;
use html5ever::LocalName;
use js::jsapi::JSTracer;
use msg::constellation_msg::PipelineId;
use net_traits::request::CorsSettings;
use net_traits::request::Referrer;
use net_traits::CoreResourceMsg;
use net_traits::FetchChannels;
use net_traits::IpcSend;
use net_traits::ReferrerPolicy;
use net_traits::ResourceThreads;
use servo_url::ImmutableOrigin;
use servo_url::ServoUrl;
#[derive(JSTraceable, MallocSizeOf)]
#[must_root]
pub struct Tokenizer {
#[ignore_malloc_size_of = "Defined in html5ever"]
inner: HtmlTokenizer<PrefetchSink>,
}
#[allow(unsafe_code)]
unsafe impl JSTraceable for HtmlTokenizer<PrefetchSink> {
unsafe fn trace(&self, trc: *mut JSTracer) {
self.sink.trace(trc)
}
}
impl Tokenizer {
pub fn new(document: &Document) -> Self {
let sink = PrefetchSink {
origin: document.origin().immutable().clone(),
pipeline_id: document.global().pipeline_id(),
base_url: None,
document_url: document.url(),
referrer: Referrer::ReferrerUrl(document.url()),
referrer_policy: document.get_referrer_policy(),
resource_threads: document.loader().resource_threads().clone(),
// Initially we set prefetching to false, and only set it
// true after the first script tag, since that is what will
// block the main parser.
prefetching: false,
};
let options = Default::default();
let inner = HtmlTokenizer::new(sink, options);
Tokenizer { inner }
}
pub fn feed(&mut self, input: &mut BufferQueue) {
while let TokenizerResult::Script(PrefetchHandle) = self.inner.feed(input) {}
}
}
#[derive(JSTraceable)]
struct PrefetchSink {
origin: ImmutableOrigin,
pipeline_id: PipelineId,
document_url: ServoUrl,
base_url: Option<ServoUrl>,
referrer: Referrer,
referrer_policy: Option<ReferrerPolicy>,
resource_threads: ResourceThreads,
prefetching: bool,
}
/// The prefetch tokenizer produces trivial results
struct PrefetchHandle;
impl TokenSink for PrefetchSink {
type Handle = PrefetchHandle;
fn process_token(
&mut self,
token: Token,
_line_number: u64,
) -> TokenSinkResult<PrefetchHandle> {
let tag = match token {
Token::TagToken(ref tag) => tag,
_ => return TokenSinkResult::Continue,
};
match (tag.kind, &tag.name) {
(TagKind::StartTag, local_name!("script")) if self.prefetching => {
if let Some(url) = self.get_url(tag, local_name!("src")) {
debug!("Prefetch script {}", url);
let cors_setting = self.get_cors_settings(tag, local_name!("crossorigin"));
let integrity_metadata = self
.get_attr(tag, local_name!("integrity"))
.map(|attr| String::from(&attr.value))
.unwrap_or_default();
let request = script_fetch_request(
url,
cors_setting,
self.origin.clone(),
self.pipeline_id,
self.referrer.clone(),
self.referrer_policy,
integrity_metadata,
);
let _ = self
.resource_threads
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
}
TokenSinkResult::RawData(RawKind::ScriptData)
},
(TagKind::StartTag, local_name!("img")) if self.prefetching => {
if let Some(url) = self.get_url(tag, local_name!("src")) {
debug!("Prefetch {} {}", tag.name, url);
let request = image_fetch_request(url, self.origin.clone(), self.pipeline_id);
let _ = self
.resource_threads
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
}<|fim▁hole|> if rel.value.eq_ignore_ascii_case("stylesheet") {
if let Some(url) = self.get_url(tag, local_name!("href")) {
debug!("Prefetch {} {}", tag.name, url);
let cors_setting =
self.get_cors_settings(tag, local_name!("crossorigin"));
let integrity_metadata = self
.get_attr(tag, local_name!("integrity"))
.map(|attr| String::from(&attr.value))
.unwrap_or_default();
let request = stylesheet_fetch_request(
url,
cors_setting,
self.origin.clone(),
self.pipeline_id,
self.referrer.clone(),
self.referrer_policy,
integrity_metadata,
);
let _ = self
.resource_threads
.send(CoreResourceMsg::Fetch(request, FetchChannels::Prefetch));
}
}
}
TokenSinkResult::Continue
},
(TagKind::StartTag, local_name!("script")) => {
TokenSinkResult::RawData(RawKind::ScriptData)
},
(TagKind::EndTag, local_name!("script")) => {
// After the first script tag, the main parser is blocked, so it's worth prefetching.
self.prefetching = true;
TokenSinkResult::Script(PrefetchHandle)
},
(TagKind::StartTag, local_name!("base")) => {
if let Some(url) = self.get_url(tag, local_name!("href")) {
if self.base_url.is_none() {
debug!("Setting base {}", url);
self.base_url = Some(url);
}
}
TokenSinkResult::Continue
},
_ => TokenSinkResult::Continue,
}
}
}
impl PrefetchSink {
fn get_attr<'a>(&'a self, tag: &'a Tag, name: LocalName) -> Option<&'a Attribute> {
tag.attrs.iter().find(|attr| attr.name.local == name)
}
fn get_url(&self, tag: &Tag, name: LocalName) -> Option<ServoUrl> {
let attr = self.get_attr(tag, name)?;
let base = self.base_url.as_ref().unwrap_or(&self.document_url);
ServoUrl::parse_with_base(Some(base), &attr.value).ok()
}
fn get_cors_settings(&self, tag: &Tag, name: LocalName) -> Option<CorsSettings> {
let crossorigin = self.get_attr(tag, name)?;
if crossorigin.value.eq_ignore_ascii_case("anonymous") {
Some(CorsSettings::Anonymous)
} else if crossorigin.value.eq_ignore_ascii_case("use-credentials") {
Some(CorsSettings::UseCredentials)
} else {
None
}
}
}<|fim▁end|> | TokenSinkResult::Continue
},
(TagKind::StartTag, local_name!("link")) if self.prefetching => {
if let Some(rel) = self.get_attr(tag, local_name!("rel")) { |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from django.views.generic.base import TemplateView
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from blog.models import Post
from taggit.models import Tag
class BlogHomeView(ListView):
template_name = 'blog/home.html'
context_object_name = 'posts'
paginate_by = 10
def get_queryset(self):
posts = Post.objects.order_by('-pub_date')
if self.request.user.is_superuser:
return posts
else:
return posts.filter(is_published=True)
class BlogPostView(DetailView):
context_object_name = 'post'
template_name = 'blog/post.html'
def get_queryset(self):
if self.request.user.is_superuser:
return Post.objects.all()
return Post.objects.filter(is_published=True)
class BlogTagView(TemplateView):
template_name = 'blog/tag.html'
def get_context_data(self, **kwargs):
context = super(BlogTagView, self).get_context_data(**kwargs)
tagslug = self.kwargs['slug']
tag = Tag.objects.get(slug=tagslug)
context['tag'] = tag.name
context['taggedposts'] = (Post.objects
.filter(is_published=True)
.filter(tags__name=tag.name)
.distinct())
context['published_tags'] = Post.objects.filter(is_published=True)
return context
class BlogRssFeed(Feed):
title = "Brandon Waskiewicz's blog"
link = '/blog/'
description = 'Inside my head'
def items(self):<|fim▁hole|> def item_title(self, item):
return item.name
def item_description(self, item):
return item.get_preview()
class BlogAtomFeed(BlogRssFeed):
feed_type = Atom1Feed
subtitle = BlogRssFeed.title<|fim▁end|> | return Post.objects.filter(is_published=True).order_by('-pub_date')
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015-2017 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from .linkstate import LinkState # noqa
from .node.local_router_id import LocalRouterID # noqa
from .node.name import NodeName # noqa
from .node.isisarea import ISISArea # noqa
from .node.sr_capabilities import SRCapabilities # noqa
from .node.sr_algorithm import SRAlgorithm # noqa
from .node.node_msd import NodeMSD # noqa
from .node.nodeflags import NodeFlags # noqa
from .node.opa_node_attr import OpaNodeAttr # noqa
from .node.sid_or_label import SIDorLabel # noqa
from .node.srlb import SRLB # noqa
from .link.admingroup import AdminGroup # noqa
from .link.remote_router_id import RemoteRouterID # noqa
from .link.max_bw import MaxBandwidth # noqa
from .link.max_rsv_bw import MaxResvBandwidth # noqa
from .link.unsrv_bw import UnrsvBandwidth # noqa
from .link.te_metric import TeMetric # noqa
from .link.link_name import LinkName # noqa
from .link.igp_metric import IGPMetric # noqa
from .link.adj_seg_id import AdjSegID # noqa
from .link.link_identifiers import LinkIdentifiers # noqa
from .link.link_msd import LinkMSD # noqa
from .link.lan_adj_sid import LanAdjSegID # noqa
from .link.srlg import SRLGList # noqa
from .link.mplsmask import MplsMask # noqa
from .link.protection_type import ProtectionType # noqa
from .link.opa_link_attr import OpaLinkAttr # noqa
from .link.peer_node_sid import PeerNodeSID # noqa
from .link.peer_adj_sid import PeerAdjSID # noqa
from .link.peer_set_sid import PeerSetSID # noqa
from .link.unidirect_link_delay import UnidirectLinkDelay # noqa
from .link.min_max_link_delay import MinMaxUnidirectLinkDelay # noqa
from .link.unidirect_delay_var import UnidirectDelayVar # noqa
from .link.unidirect_packet_loss import UnidirectPacketLoss # noqa
from .link.unidirect_residual_bw import UnidirectResidualBw # noqa
from .link.unidirect_avail_bw import UnidirectAvailBw # noqa
from .link.unidirect_bw_util import UnidirectBwUtil # noqa
from .prefix.prefix_metric import PrefixMetric # noqa
from .prefix.prefix_sid import PrefixSID # noqa<|fim▁hole|>from .prefix.igp_route_tag_list import IGPRouteTagList # noqa
from .prefix.ext_igp_route_tag_list import ExtIGPRouteTagList # noqa
from .prefix.ospf_forward_addr import OspfForwardingAddr # noqa<|fim▁end|> | from .prefix.prefix_igp_attr import PrefixIGPAttr # noqa
from .prefix.src_router_id import SrcRouterID # noqa
from .prefix.igpflags import IGPFlags # noqa |
<|file_name|>IUnaryOperator.java<|end_file_name|><|fim▁begin|>// Catalano Fuzzy Library
// The Catalano Framework
//
// Copyright © Diego Catalano, 2013
// diego.catalano at live.com
//
// Copyright © Andrew Kirillov, 2007-2008
// andrew.kirillov at gmail.com
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
//
package Catalano.Fuzzy;
/**
* Interface with the common methods of Fuzzy Unary Operator.
* @author Diego Catalano
*/
public interface IUnaryOperator {
/**
* Calculates the numerical result of a Unary operation applied to one fuzzy membership value.
* @param membership A fuzzy membership value, [0..1].
* @return The numerical result of the operation applied to <paramref name="membership"/>.
*/<|fim▁hole|> float Evaluate( float membership );
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.