prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>na_ontap_cluster_peer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# (c) 2018-2019, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
author: NetApp Ansible Team (@carchi8py) <[email protected]>
description:
- Create/Delete cluster peer relations on ONTAP
extends_documentation_fragment:
- netapp.na_ontap
module: na_ontap_cluster_peer
options:
state:
choices: ['present', 'absent']
description:
- Whether the specified cluster peer should exist or not.
default: present
source_intercluster_lifs:
description:
- List of intercluster addresses of the source cluster.
- Used as peer-addresses in destination cluster.
- All these intercluster lifs should belong to the source cluster.
version_added: "2.8"
aliases:
- source_intercluster_lif
dest_intercluster_lifs:
description:
- List of intercluster addresses of the destination cluster.
- Used as peer-addresses in source cluster.
- All these intercluster lifs should belong to the destination cluster.
version_added: "2.8"
aliases:
- dest_intercluster_lif
passphrase:
description:
- The arbitrary passphrase that matches the one given to the peer cluster.
source_cluster_name:
description:
- The name of the source cluster name in the peer relation to be deleted.
dest_cluster_name:
description:
- The name of the destination cluster name in the peer relation to be deleted.
- Required for delete
dest_hostname:
description:
- Destination cluster IP or hostname which needs to be peered
- Required to complete the peering process at destination cluster.
required: True
dest_username:
description:
- Destination username.
- Optional if this is same as source username.
dest_password:
description:
- Destination password.
- Optional if this is same as source password.
short_description: NetApp ONTAP Manage Cluster peering
version_added: "2.7"
'''
EXAMPLES = """
- name: Create cluster peer
na_ontap_cluster_peer:
state: present
source_intercluster_lifs: 1.2.3.4,1.2.3.5
dest_intercluster_lifs: 1.2.3.6,1.2.3.7
passphrase: XXXX
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
dest_hostname: "{{ dest_netapp_hostname }}"
- name: Delete cluster peer
na_ontap_cluster_peer:
state: absent
source_cluster_name: test-source-cluster
dest_cluster_name: test-dest-cluster
hostname: "{{ netapp_hostname }}"
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
dest_hostname: "{{ dest_netapp_hostname }}"
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_module import NetAppModule
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPClusterPeer(object):
"""
Class with cluster peer methods
"""
def __init__(self):
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, type='str', choices=['present', 'absent'], default='present'),
source_intercluster_lifs=dict(required=False, type='list', aliases=['source_intercluster_lif']),
dest_intercluster_lifs=dict(required=False, type='list', aliases=['dest_intercluster_lif']),
passphrase=dict(required=False, type='str', no_log=True),
dest_hostname=dict(required=True, type='str'),
dest_username=dict(required=False, type='str'),
dest_password=dict(required=False, type='str', no_log=True),
source_cluster_name=dict(required=False, type='str'),
dest_cluster_name=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
required_together=[['source_intercluster_lifs', 'dest_intercluster_lifs']],
required_if=[('state', 'absent', ['source_cluster_name', 'dest_cluster_name'])],
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
# set destination server connection
self.module.params['hostname'] = self.parameters['dest_hostname']
if self.parameters.get('dest_username'):
self.module.params['username'] = self.parameters['dest_username']
if self.parameters.get('dest_password'):
self.module.params['password'] = self.parameters['dest_password']
self.dest_server = netapp_utils.setup_na_ontap_zapi(module=self.module)
# reset to source host connection for asup logs
self.module.params['hostname'] = self.parameters['hostname']
def cluster_peer_get_iter(self, cluster):
"""
Compose NaElement object to query current source cluster using peer-cluster-name and peer-addresses parameters
:param cluster: type of cluster (source or destination)
:return: NaElement object for cluster-get-iter with query
"""
cluster_peer_get = netapp_utils.zapi.NaElement('cluster-peer-get-iter')
query = netapp_utils.zapi.NaElement('query')
cluster_peer_info = netapp_utils.zapi.NaElement('cluster-peer-info')
if cluster == 'source':
peer_lifs, peer_cluster = 'dest_intercluster_lifs', 'dest_cluster_name'
else:
peer_lifs, peer_cluster = 'source_intercluster_lifs', 'source_cluster_name'
if self.parameters.get(peer_lifs):
peer_addresses = netapp_utils.zapi.NaElement('peer-addresses')
for peer in self.parameters.get(peer_lifs):
peer_addresses.add_new_child('remote-inet-address', peer)
cluster_peer_info.add_child_elem(peer_addresses)
if self.parameters.get(peer_cluster):
cluster_peer_info.add_new_child('cluster-name', self.parameters[peer_cluster])
query.add_child_elem(cluster_peer_info)
cluster_peer_get.add_child_elem(query)
return cluster_peer_get
def cluster_peer_get(self, cluster):
"""
Get current cluster peer info
:param cluster: type of cluster (source or destination)
:return: Dictionary of current cluster peer details if query successful, else return None
"""<|fim▁hole|> if cluster == 'source':
server = self.server
else:
server = self.dest_server
try:
result = server.invoke_successfully(cluster_peer_get_iter, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error fetching cluster peer %s: %s'
% (self.parameters['dest_cluster_name'], to_native(error)),
exception=traceback.format_exc())
# return cluster peer details
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) >= 1:
cluster_peer_info = result.get_child_by_name('attributes-list').get_child_by_name('cluster-peer-info')
cluster_info['cluster_name'] = cluster_peer_info.get_child_content('cluster-name')
peers = cluster_peer_info.get_child_by_name('peer-addresses')
cluster_info['peer-addresses'] = [peer.get_content() for peer in peers.get_children()]
return cluster_info
return None
def cluster_peer_delete(self, cluster):
"""
Delete a cluster peer on source or destination
For source cluster, peer cluster-name = destination cluster name and vice-versa
:param cluster: type of cluster (source or destination)
:return:
"""
if cluster == 'source':
server, peer_cluster_name = self.server, self.parameters['dest_cluster_name']
else:
server, peer_cluster_name = self.dest_server, self.parameters['source_cluster_name']
cluster_peer_delete = netapp_utils.zapi.NaElement.create_node_with_children(
'cluster-peer-delete', **{'cluster-name': peer_cluster_name})
try:
server.invoke_successfully(cluster_peer_delete, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting cluster peer %s: %s'
% (peer_cluster_name, to_native(error)),
exception=traceback.format_exc())
def cluster_peer_create(self, cluster):
"""
Create a cluster peer on source or destination
For source cluster, peer addresses = destination inter-cluster LIFs and vice-versa
:param cluster: type of cluster (source or destination)
:return: None
"""
cluster_peer_create = netapp_utils.zapi.NaElement.create_node_with_children('cluster-peer-create')
if self.parameters.get('passphrase') is not None:
cluster_peer_create.add_new_child('passphrase', self.parameters['passphrase'])
peer_addresses = netapp_utils.zapi.NaElement('peer-addresses')
if cluster == 'source':
server, peer_address = self.server, self.parameters['dest_intercluster_lifs']
else:
server, peer_address = self.dest_server, self.parameters['source_intercluster_lifs']
for each in peer_address:
peer_addresses.add_new_child('remote-inet-address', each)
cluster_peer_create.add_child_elem(peer_addresses)
try:
server.invoke_successfully(cluster_peer_create, enable_tunneling=True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating cluster peer %s: %s'
% (peer_address, to_native(error)),
exception=traceback.format_exc())
def apply(self):
"""
Apply action to cluster peer
:return: None
"""
self.asup_log_for_cserver("na_ontap_cluster_peer")
source = self.cluster_peer_get('source')
destination = self.cluster_peer_get('destination')
source_action = self.na_helper.get_cd_action(source, self.parameters)
destination_action = self.na_helper.get_cd_action(destination, self.parameters)
self.na_helper.changed = False
# create only if expected cluster peer relation is not present on both source and destination clusters
if source_action == 'create' and destination_action == 'create':
self.cluster_peer_create('source')
self.cluster_peer_create('destination')
self.na_helper.changed = True
# delete peer relation in cluster where relation is present
else:
if source_action == 'delete':
self.cluster_peer_delete('source')
self.na_helper.changed = True
if destination_action == 'delete':
self.cluster_peer_delete('destination')
self.na_helper.changed = True
self.module.exit_json(changed=self.na_helper.changed)
def asup_log_for_cserver(self, event_name):
"""
Fetch admin vserver for the given cluster
Create and Autosupport log event with the given module name
:param event_name: Name of the event log
:return: None
"""
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
netapp_utils.ems_log_event(event_name, cserver)
def main():
"""
Execute action
:return: None
"""
community_obj = NetAppONTAPClusterPeer()
community_obj.apply()
if __name__ == '__main__':
main()<|fim▁end|>
|
cluster_peer_get_iter = self.cluster_peer_get_iter(cluster)
result, cluster_info = None, dict()
|
<|file_name|>PastebinCom.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
############################################################################
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
############################################################################
from module.plugins.internal.SimpleCrypter import SimpleCrypter
<|fim▁hole|>
class PastebinCom(SimpleCrypter):
__name__ = "PastebinCom"
__type__ = "crypter"
__pattern__ = r"http://(?:w{3}.)?pastebin\.com/\w+"
__version__ = "0.01"
__description__ = """Pastebin.com Plugin"""
__author_name__ = ("stickell")
__author_mail__ = ("[email protected]")
LINK_PATTERN = r'<div class="de\d+">(https?://[^ <]+)(?:[^<]*)</div>'
TITLE_PATTERN = r'<div class="paste_box_line1" title="(?P<title>[^"]+)">'<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Author: echel0n <[email protected]>
# URL: https://sickrage.ca
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
class SickRageException(Exception):
"""
Generic SiCKRAGE Exception - should never be thrown, only sub-classed
"""
class AuthException(SickRageException):
"""
Your authentication information are incorrect
"""
class CantRefreshShowException(SickRageException):
"""
The show can't be refreshed right now
"""
class CantRemoveShowException(SickRageException):
"""
The show can't removed right now
"""
class CantUpdateShowException(SickRageException):
"""
The show can't be updated right now
"""
class EpisodeDeletedException(SickRageException):
"""
This episode has been deleted
"""
<|fim▁hole|>class EpisodeNotFoundException(SickRageException):
"""
The episode wasn't found on the Indexer
"""
class EpisodePostProcessingFailedException(SickRageException):
"""
The episode post-processing failed
"""
class EpisodeDirectoryNotFoundException(SickRageException):
"""
The episode directory was not found
"""
class FailedPostProcessingFailedException(SickRageException):
"""
The failed post-processing failed
"""
class MultipleEpisodesInDatabaseException(SickRageException):
"""
Multiple episodes were found in the database! The database must be fixed first
"""
class MultipleShowsInDatabaseException(SickRageException):
"""
Multiple shows were found in the database! The database must be fixed first
"""
class MultipleShowObjectsException(SickRageException):
"""
Multiple objects for the same show were found! Something is very wrong
"""
class NoNFOException(SickRageException):
"""
No NFO was found
"""
class ShowNotFoundException(SickRageException):
"""
The show wasn't found on the Indexer
"""<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
sentry.db.models
~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
from copy import copy
import logging
import six
from bitfield.types import BitHandler
from django.db import models
from django.db.models import signals
from django.db.models.query_utils import DeferredAttribute
from django.utils import timezone
from .fields.bounded import BoundedBigAutoField
from .manager import BaseManager
from .query import update
__all__ = ('BaseModel', 'Model', 'sane_repr')
UNSAVED = object()
DEFERRED = object()
def sane_repr(*attrs):
if 'id' not in attrs and 'pk' not in attrs:
attrs = ('id', ) + attrs
def _repr(self):
cls = type(self).__name__
pairs = ('%s=%s' % (a, repr(getattr(self, a, None))) for a in attrs)
return u'<%s at 0x%x: %s>' % (cls, id(self), ', '.join(pairs))
return _repr
class BaseModel(models.Model):
class Meta:
abstract = True
objects = BaseManager()
update = update
def __init__(self, *args, **kwargs):
super(BaseModel, self).__init__(*args, **kwargs)
self._update_tracked_data()
def __getstate__(self):
d = self.__dict__.copy()
# we cant serialize weakrefs
d.pop('_Model__data', None)
return d
def __hash__(self):
# Django decided that it shouldnt let us hash objects even though they have
# memory addresses. We need that behavior, so let's revert.
if self.pk:
return models.Model.__hash__(self)
return id(self)
def __reduce__(self):
(model_unpickle, stuff, _) = super(BaseModel, self).__reduce__()
return (model_unpickle, stuff, self.__getstate__())
def __setstate__(self, state):
self.__dict__.update(state)
self._update_tracked_data()
def __get_field_value(self, field):
if isinstance(type(field).__dict__.get(field.attname), DeferredAttribute):
return DEFERRED
if isinstance(field, models.ForeignKey):
return getattr(self, field.column, None)
return getattr(self, field.attname, None)
def _update_tracked_data(self):
"Updates a local copy of attributes values"
if self.id:
data = {}
for f in self._meta.fields:
# XXX(dcramer): this is how Django determines this (copypasta from Model)
if isinstance(type(f).__dict__.get(f.attname),
DeferredAttribute) or f.column is None:
continue
try:
v = self.__get_field_value(f)
except AttributeError as e:
# this case can come up from pickling
logging.exception(six.text_type(e))
else:
if isinstance(v, BitHandler):
v = copy(v)
data[f.column] = v
self.__data = data
else:
self.__data = UNSAVED
def _update_timestamps(self):
if hasattr(self, 'date_updated'):
self.date_updated = timezone.now()
def has_changed(self, field_name):
"Returns ``True`` if ``field`` has changed since initialization."
if self.__data is UNSAVED:
return False
field = self._meta.get_field(field_name)
value = self.__get_field_value(field)
if value is DEFERRED:
return False
return self.__data.get(field_name) != value
def old_value(self, field_name):
"Returns the previous value of ``field``"
if self.__data is UNSAVED:
return None
value = self.__data.get(field_name)
if value is DEFERRED:<|fim▁hole|>class Model(BaseModel):
id = BoundedBigAutoField(primary_key=True)
class Meta:
abstract = True
__repr__ = sane_repr('id')
def __model_post_save(instance, **kwargs):
if not isinstance(instance, BaseModel):
return
instance._update_tracked_data()
def __model_pre_save(instance, **kwargs):
if not isinstance(instance, BaseModel):
return
instance._update_timestamps()
def __model_class_prepared(sender, **kwargs):
if not issubclass(sender, BaseModel):
return
if not hasattr(sender, '__core__'):
raise ValueError('{!r} model has not defined __core__'.format(sender))
signals.pre_save.connect(__model_pre_save)
signals.post_save.connect(__model_post_save)
signals.class_prepared.connect(__model_class_prepared)<|fim▁end|>
|
return None
return self.__data.get(field_name)
|
<|file_name|>salutations.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2006, The Dojo Foundation
All Rights Reserved.
Licensed under the Academic Free License version 2.1 or above OR the
modified BSD license. For more information on Dojo licensing, see:
http://dojotoolkit.org/community/licensing.shtml
*/<|fim▁hole|> hello: "Aloha"
}<|fim▁end|>
|
{
|
<|file_name|>0002_auto__chg_field_message_updated.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Message.updated'
db.alter_column(u'mailer_message', 'updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True))
def backwards(self, orm):
<|fim▁hole|> models = {
u'mailer.message': {
'Meta': {'object_name': 'Message'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_data': ('django.db.models.fields.TextField', [], {}),
'priority': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'recipients': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'subject': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['mailer']<|fim▁end|>
|
# Changing field 'Message.updated'
db.alter_column(u'mailer_message', 'updated', self.gf('django.db.models.fields.DateTimeField')())
|
<|file_name|>bravo_mailer.py<|end_file_name|><|fim▁begin|>import smtplib
import argparse
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
def main(args):
# Allow HTML-formatted emails (very simplistic atm, should be expanded if used)
msg = MIMEMultipart("alternative")
if args["body"].startswith("<html>", 0, 10):<|fim▁hole|> msg.attach(MIMEText(args["body"],"html"))
else:
msg.attach(MIMEText(args["body"],"plain"))
msg["Subject"] = args["sub"]
msg["From"] = args["from"]
msg["To"] = args["to"]
s = smtplib.SMTP(args["smtp"])
# If authentication is required:
# s.starttls()
# s.login(user, pass)
s.sendmail(args["from"], [args["to"]], msg.as_string())
s.quit()
if __name__ == "__main__":
p = argparse.ArgumentParser(description="Send an email")
p.add_argument("--to", "-t", required=True, help="To address")
p.add_argument("--from", "-f", required=True, help="From address")
p.add_argument("--sub", "-s", required=True, help="Subject")
p.add_argument("--body", "-b", required=True, help="Message body")
p.add_argument("--smtp", default="localhost", help="SMTP server")
args = p.parse_args()
main(vars(args))<|fim▁end|>
| |
<|file_name|>load.go<|end_file_name|><|fim▁begin|>package fsmonitor
import (
"context"
"errors"
log "gopkg.in/inconshreveable/log15.v2"
"github.com/lxc/lxd/lxd/fsmonitor/drivers"
"github.com/lxc/lxd/lxd/storage/filesystem"
"github.com/lxc/lxd/shared/logger"
"github.com/lxc/lxd/shared/logging"
)
// New creates a new FSMonitor instance.
func New(ctx context.Context, path string) (FSMonitor, error) {
startMonitor := func(driverName string) (drivers.Driver, logger.Logger, error) {
logger := logging.AddContext(logger.Log, log.Ctx{"driver": driverName})
driver, err := drivers.Load(ctx, logger, driverName, path)
if err != nil {
return nil, nil, err
}
return driver, logger, nil<|fim▁hole|> return nil, errors.New("Path needs to be a mountpoint")
}
driver, monLogger, err := startMonitor("fanotify")
if err != nil {
logger.Warn("Failed to initialize fanotify, falling back on fsnotify", log.Ctx{"err": err})
driver, monLogger, err = startMonitor("fsnotify")
if err != nil {
return nil, err
}
}
logger.Debug("Initialized filesystem monitor", log.Ctx{"path": path})
monitor := fsMonitor{
driver: driver,
logger: monLogger,
}
return &monitor, nil
}<|fim▁end|>
|
}
if !filesystem.IsMountPoint(path) {
|
<|file_name|>ExamConflictStatisticsInfo.java<|end_file_name|><|fim▁begin|>/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.solver.exam.ui;
import java.io.PrintWriter;
import java.io.Serializable;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.Vector;
import javax.servlet.jsp.JspWriter;
import org.cpsolver.exam.model.Exam;
import org.cpsolver.exam.model.ExamDistributionConstraint;
import org.cpsolver.exam.model.ExamInstructor;
import org.cpsolver.exam.model.ExamPlacement;
import org.cpsolver.exam.model.ExamRoom;
import org.cpsolver.exam.model.ExamRoomPlacement;
import org.cpsolver.exam.model.ExamStudent;
import org.cpsolver.ifs.extension.AssignedValue;
import org.cpsolver.ifs.extension.ConflictStatistics;
import org.cpsolver.ifs.model.Constraint;
import org.dom4j.Element;
import org.unitime.timetable.model.PreferenceLevel;
import org.unitime.timetable.solver.ui.TimetableInfo;
import org.unitime.timetable.webutil.timegrid.ExamGridTable;
/**
* @author Tomas Muller
*/
public class ExamConflictStatisticsInfo implements TimetableInfo, Serializable {
private static final long serialVersionUID = 7L;
public static int sVersion = 7; // to be able to do some changes in the future
public static final int sConstraintTypeRoom = 1;
public static final int sConstraintTypeInstructor = 2;
public static final int sConstraintTypeGroup = 3;
public static final int sConstraintTypeStudent = 4;
private Hashtable iVariables = new Hashtable();
public Collection getCBS() { return iVariables.values(); }
public CBSVariable getCBS(Long classId) { return (CBSVariable)iVariables.get(classId); }
public void load(ConflictStatistics cbs) {
load(cbs, null);
}
public ExamConflictStatisticsInfo getConflictStatisticsSubInfo(Vector variables) {
ExamConflictStatisticsInfo ret = new ExamConflictStatisticsInfo();
for (Enumeration e=variables.elements();e.hasMoreElements();) {
Exam exam = (Exam)e.nextElement();
CBSVariable var = (CBSVariable)iVariables.get(exam.getId());
if (var!=null)
ret.iVariables.put(exam.getId(),var);
}
return ret;
}
public void merge(ExamConflictStatisticsInfo info) {
if (info!=null) iVariables.putAll(info.iVariables);
}
public void load(ConflictStatistics cbs, Long examId) {
iVariables.clear();
for (Iterator i1=cbs.getNoGoods().entrySet().iterator();i1.hasNext();) {
Map.Entry entry = (Map.Entry)i1.next();
AssignedValue assignment = (AssignedValue)entry.getKey();
ExamPlacement placement = (ExamPlacement)assignment.getValue();
Exam exam = (Exam)placement.variable();
if (examId!=null && !examId.equals(exam.getId())) continue;
CBSVariable var = (CBSVariable)iVariables.get(exam.getId());
if (var==null) {
String pref = PreferenceLevel.sNeutral;//SolverGridModel.hardConflicts2pref(exam,null);
var = new CBSVariable(exam.getId(),exam.getName(),pref);
iVariables.put(exam.getId(),var);
}
Vector roomIds = new Vector();
Vector roomNames = new Vector();
Vector roomPrefs = new Vector();
for (Iterator i=new TreeSet(placement.getRoomPlacements()).iterator();i.hasNext();) {
ExamRoomPlacement room = (ExamRoomPlacement)i.next();
roomIds.add(room.getId());
roomNames.add(room.getName());
roomPrefs.add(exam.getRoomPlacements().size()==placement.getRoomPlacements().size()?PreferenceLevel.sIntLevelRequired:room.getPenalty(placement.getPeriod()));
}
CBSValue val = new CBSValue(var,
placement.getPeriod().getId(),
placement.getPeriod().getDayStr()+" "+placement.getPeriod().getTimeStr(),
(exam.getPeriodPlacements().size()==1?PreferenceLevel.sIntLevelRequired:placement.getPeriodPlacement().getPenalty()),
roomIds, roomNames, roomPrefs);
var.values().add(val);
List noGoods = (List)entry.getValue();
Hashtable constr2assignments = new Hashtable();
for (Iterator e2=noGoods.iterator();e2.hasNext();) {
AssignedValue noGood = (AssignedValue)e2.next();
if (noGood.getConstraint()==null) continue;
Vector aaa = (Vector)constr2assignments.get(noGood.getConstraint());
if (aaa == null) {
aaa = new Vector();
constr2assignments.put(noGood.getConstraint(), aaa);
}
aaa.addElement(noGood);
}
for (Iterator i2=constr2assignments.entrySet().iterator();i2.hasNext();) {
Map.Entry entry2 = (Map.Entry)i2.next();
Constraint constraint = (Constraint)entry2.getKey();
Vector noGoodsThisConstraint = (Vector)entry2.getValue();
CBSConstraint con = null;
if (constraint instanceof ExamRoom) {
con = new CBSConstraint(val, sConstraintTypeRoom, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired);
} else if (constraint instanceof ExamInstructor) {
con = new CBSConstraint(val, sConstraintTypeInstructor, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired);
} else if (constraint instanceof ExamStudent) {
con = new CBSConstraint(val, sConstraintTypeStudent, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired);
} else if (constraint instanceof ExamDistributionConstraint) {
con = new CBSConstraint(val, sConstraintTypeGroup, constraint.getId(), ((ExamDistributionConstraint)constraint).getTypeString(), (constraint.isHard()?PreferenceLevel.sRequired:PreferenceLevel.int2prolog(((ExamDistributionConstraint)constraint).getWeight())));
} else {
con = new CBSConstraint(val, -1, constraint.getId(), constraint.getName(), PreferenceLevel.sRequired);
}
val.constraints().add(con);
for (Enumeration e3=noGoodsThisConstraint.elements();e3.hasMoreElements();) {
AssignedValue ass = (AssignedValue)e3.nextElement();
ExamPlacement p = (ExamPlacement)ass.getValue();
Exam x = (Exam)p.variable();
String pr = PreferenceLevel.sNeutral;//SolverGridModel.hardConflicts2pref(x,p);
Vector aroomIds = new Vector();
Vector aroomNames = new Vector();
Vector aroomPrefs = new Vector();
for (Iterator i=new TreeSet(p.getRoomPlacements()).iterator();i.hasNext();) {
ExamRoomPlacement room = (ExamRoomPlacement)i.next();
aroomIds.add(room.getId());
aroomNames.add(room.getName());
aroomPrefs.add(x.getRoomPlacements().size()==p.getRoomPlacements().size()?PreferenceLevel.sIntLevelRequired:room.getPenalty(p.getPeriod()));
}
CBSAssignment a = new CBSAssignment(con,
x.getId(),
x.getName(),
pr,
p.getPeriod().getId(),
p.getPeriod().getDayStr()+" "+p.getPeriod().getTimeStr(),
(x.getPeriodPlacements().size()==1?PreferenceLevel.sIntLevelRequired:p.getPeriodPlacement().getPenalty()),
aroomIds,
aroomNames,
aroomPrefs);
con.assignments().add(a);
a.incCounter((int)ass.getCounter(0));
}
}
}
}
public void load(Element root) {
int version = Integer.parseInt(root.attributeValue("version"));
if (version==sVersion) {
iVariables.clear();
for (Iterator i1=root.elementIterator("var");i1.hasNext();) {
CBSVariable var = new CBSVariable((Element)i1.next());
iVariables.put(Long.valueOf(var.getId()),var);
}
}
}
public void save(Element root) {
root.addAttribute("version", String.valueOf(sVersion));
for (Iterator i1=iVariables.values().iterator();i1.hasNext();) {
((CBSVariable)i1.next()).save(root.addElement("var"));
}
}
public static interface Counter {
public int getCounter();
public void incCounter(int value);
}
public static class CBSVariable implements Counter, Comparable, Serializable {
private static final long serialVersionUID = 1L;
int iCounter = 0;
long iExamId;
String iName;
HashSet iValues = new HashSet();
CBSConstraint iConstraint = null;
String iPref = null;
CBSVariable(long examId, String name, String pref) {
iExamId = examId;
iName = name;
iPref = pref;
}
CBSVariable(CBSConstraint constraint, long classId, String examId, String pref) {
iConstraint = constraint;
iExamId = classId;
iName = examId;
iPref = pref;
}
CBSVariable(Element element) {
iExamId = Long.parseLong(element.attributeValue("exam"));
iName = element.attributeValue("name");
iPref = element.attributeValue("pref");
for (Iterator i=element.elementIterator("val");i.hasNext();)
iValues.add(new CBSValue(this,(Element)i.next()));
}
public long getId() { return iExamId; }
public int getCounter() { return iCounter; }
public String getName() { return iName; }
public String getPref() { return iPref; }
public void incCounter(int value) {
iCounter+=value;
if (iConstraint!=null) iConstraint.incCounter(value);
}
public Set values() { return iValues; }
public int hashCode() {
return (Long.valueOf(iExamId)).hashCode();
}
public boolean equals(Object o) {
if (o==null || !(o instanceof CBSVariable)) return false;
return ((CBSVariable)o).getId()==getId();
}
public int compareTo(Object o) {
if (o==null || !(o instanceof CBSVariable)) return -1;
int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSVariable)o).getCounter()));
if (ret!=0) return ret;
return toString().compareTo(o.toString());
}
public String toString() {
return iName;
}
public void save(Element element) {
element.addAttribute("exam",String.valueOf(iExamId));
element.addAttribute("name", iName);
if (iPref!=null)
element.addAttribute("pref", iPref);
for (Iterator i=iValues.iterator();i.hasNext();)
((CBSValue)i.next()).save(element.addElement("val"));
}
}
public static class CBSValue implements Counter, Comparable, Serializable {
private static final long serialVersionUID = 1L;
int iCounter = 0;
Long iPeriodId;
String iPeriodName;
int iPeriodPref;
Vector iRoomIds;
String iInstructorName = null;
Vector iRoomNames;
Vector iRoomPrefs;
CBSVariable iVariable = null;
HashSet iConstraints = new HashSet();
HashSet iAssignments = new HashSet();
int iLength;
CBSValue(CBSVariable var, Long periodId, String periodName, int periodPref, Vector roomIds, Vector roomNames, Vector roomPrefs) {
iVariable = var; iRoomIds = roomIds; iRoomNames = roomNames; iRoomPrefs = roomPrefs;
iPeriodId = periodId; iPeriodName = periodName; iPeriodPref = periodPref;
}
CBSValue(CBSVariable var, Element element) {
iVariable = var;
iPeriodId = Long.valueOf(element.attributeValue("period"));
iPeriodName = element.attributeValue("name");
iPeriodPref = Integer.parseInt(element.attributeValue("pref"));
iRoomIds = new Vector();
iRoomNames = new Vector();
iRoomPrefs = new Vector();
for (Iterator i=element.elementIterator("room");i.hasNext();) {
Element r = (Element)i.next();
iRoomIds.addElement(Integer.valueOf(r.attributeValue("id")));
iRoomNames.addElement(r.attributeValue("name"));
iRoomPrefs.addElement(Integer.valueOf(r.attributeValue("pref")));
}
for (Iterator i=element.elementIterator("cons");i.hasNext();)
iConstraints.add(new CBSConstraint(this,(Element)i.next()));
}
public CBSVariable variable() { return iVariable; }
public Long getPeriodId() { return iPeriodId; }
public String getPeriodName() { return iPeriodName; }
public int getPeriodPref() { return iPeriodPref; }
public Vector getRoomNames() { return iRoomNames; }
public Vector getRoomPrefs() { return iRoomPrefs; }
public String toString() {
return iPeriodName+" "+iRoomNames;
}
public int getCounter() { return iCounter; }
public void incCounter(int value) {
iCounter+=value;
if (iVariable!=null) iVariable.incCounter(value);
}
public Vector getRoomIds() {
return iRoomIds;
}
public Set constraints() { return iConstraints; }
public Set assignments() { return iAssignments; }
public int hashCode() {
return combine(iPeriodId.hashCode(), (iRoomIds==null?0:iRoomIds.hashCode()));
}
public boolean equals(Object o) {
if (o==null || !(o instanceof CBSValue)) return false;
CBSValue v = (CBSValue)o;
return v.getRoomIds().equals(getRoomIds()) && v.getPeriodId().equals(getPeriodId());
}
public int compareTo(Object o) {
if (o==null || !(o instanceof CBSValue)) return -1;
int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSValue)o).getCounter()));
if (ret!=0) return ret;
return toString().compareTo(o.toString());
}
public void save(Element element) {
element.addAttribute("period",String.valueOf(iPeriodId));
element.addAttribute("pref",String.valueOf(iPeriodPref));
element.addAttribute("name", iPeriodName);
for (int i=0;i<iRoomIds.size();i++) {
Element r = element.addElement("room");
r.addAttribute("id",iRoomIds.elementAt(i).toString());
r.addAttribute("name",iRoomNames.elementAt(i).toString());
r.addAttribute("pref",iRoomPrefs.elementAt(i).toString());
}
for (Iterator i=iConstraints.iterator();i.hasNext();)
((CBSConstraint)i.next()).save(element.addElement("cons"));
}
}
public static class CBSConstraint implements Counter, Comparable, Serializable {
private static final long serialVersionUID = 1L;
CBSValue iValue;
int iCounter = 0;
long iId;
String iName = null;
int iType;
HashSet iAssignments = new HashSet();
HashSet iVariables = new HashSet();
String iPref;
CBSConstraint(int type, long id, String name, String pref) {
iId = id;
iType = type;
iName = name;
iPref = pref;
}
CBSConstraint(CBSValue value, int type, long id, String name, String pref) {
iId = id;
iType = type;
iValue = value;
iName = name;
iPref = pref;
}
CBSConstraint(CBSValue value, Element element) {
iValue = value;
iId = Integer.parseInt(element.attributeValue("id"));
iType = Integer.parseInt(element.attributeValue("type"));
iName = element.attributeValue("name");
iPref = element.attributeValue("pref");
for (Iterator i=element.elementIterator("nogood");i.hasNext();)
iAssignments.add(new CBSAssignment(this,(Element)i.next()));
}
public long getId() { return iId; }
public int getType() { return iType; }
public String getName() { return iName; }
public CBSValue value() { return iValue; }
public Set variables() { return iVariables; }
public Set assignments() { return iAssignments; }
public String getPref() { return iPref; }
public int getCounter() { return iCounter; }
public void incCounter(int value) {
iCounter+=value;
if (iValue!=null) iValue.incCounter(value);
}
public int hashCode() {
return combine((int)iId,iType);
}
public boolean equals(Object o) {
if (o==null || !(o instanceof CBSConstraint)) return false;
CBSConstraint c = (CBSConstraint)o;
return c.getId()==getId() && c.getType()==getType();
}
public int compareTo(Object o) {
if (o==null || !(o instanceof CBSConstraint)) return -1;
int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSConstraint)o).getCounter()));
if (ret!=0) return ret;
return toString().compareTo(o.toString());
}
public void save(Element element) {
element.addAttribute("id",String.valueOf(iId));
element.addAttribute("type",String.valueOf(iType));
if (iName!=null)
element.addAttribute("name", iName);
if (iPref!=null)
element.addAttribute("pref", iPref);
for (Iterator i=iAssignments.iterator();i.hasNext();)
((CBSAssignment)i.next()).save(element.addElement("nogood"));
}
}
public static class CBSAssignment implements Counter, Comparable, Serializable {
private static final long serialVersionUID = 1L;
CBSConstraint iConstraint;
Long iExamId;
String iExamName;
String iExamPref;
Long iPeriodId;
String iPeriodName;
int iPeriodPref;
int iCounter = 0;
Vector iRoomIds;
Vector iRoomPrefs;
Vector iRoomNames;
CBSAssignment(CBSConstraint constraint, Long examId, String examName, String examPref, Long periodId, String periodName, int periodPref, Vector roomIds, Vector roomNames, Vector roomPrefs) {
iExamId = examId; iExamName = examName; iExamPref = examPref;
iPeriodId = periodId; iPeriodName = periodName; iPeriodPref = periodPref;
iRoomIds = roomIds; iRoomNames = roomNames; iRoomPrefs = roomPrefs;
iConstraint = constraint;
}
CBSAssignment(CBSConstraint constraint, Element element) {
iConstraint = constraint;
iExamId = Long.valueOf(element.attributeValue("exam"));
iExamName = element.attributeValue("name");
iExamPref = element.attributeValue("pref");
iRoomIds = new Vector();
iRoomNames = new Vector();
iRoomPrefs = new Vector();
for (Iterator i=element.elementIterator("room");i.hasNext();) {
Element r = (Element)i.next();
iRoomIds.addElement(Integer.valueOf(r.attributeValue("id")));
iRoomNames.addElement(r.attributeValue("name"));
iRoomPrefs.addElement(Integer.valueOf(r.attributeValue("pref")));
}
iPeriodId = Long.valueOf(element.attributeValue("period"));
iPeriodName = element.attributeValue("periodName");
iPeriodPref = Integer.parseInt(element.attributeValue("periodPref"));
incCounter(Integer.parseInt(element.attributeValue("cnt")));
}
public Long getId() { return iExamId; }
public CBSConstraint getConstraint() { return iConstraint; }
public String getName() { return iExamName; }
public String getPref() { return iExamPref; }
public Long getPeriodId() { return iPeriodId; }
public String getPeriodName() { return iPeriodName; }
public int getPeriodPref() { return iPeriodPref; }<|fim▁hole|> public Vector getRoomIds() {
return iRoomIds;
}
public Vector getRoomPrefs() { return iRoomPrefs; }
public int hashCode() {
return combine(iExamId.hashCode(),combine(iRoomIds.hashCode(),iPeriodId.hashCode()));
}
public int getCounter() { return iCounter; }
public void incCounter(int value) {
iCounter+=value;
if (iConstraint!=null) iConstraint.incCounter(value);
}
public boolean equals(Object o) {
if (o==null || !(o instanceof CBSAssignment)) return false;
CBSAssignment a = (CBSAssignment)o;
return a.getId().equals(getId()) && a.getRoomIds().equals(getRoomIds()) && a.getPeriodId().equals(getPeriodId());
}
public int compareTo(Object o) {
if (o==null || !(o instanceof CBSAssignment)) return -1;
int ret = -(Integer.valueOf(iCounter)).compareTo(Integer.valueOf(((CBSAssignment)o).getCounter()));
if (ret!=0) return ret;
return toString().compareTo(o.toString());
}
public void save(Element element) {
element.addAttribute("exam",String.valueOf(iExamId));
element.addAttribute("name",iExamName);
element.addAttribute("pref",iExamPref);
for (int i=0;i<iRoomIds.size();i++) {
Element r = element.addElement("room");
r.addAttribute("id",iRoomIds.elementAt(i).toString());
r.addAttribute("name",iRoomNames.elementAt(i).toString());
r.addAttribute("pref",iRoomPrefs.elementAt(i).toString());
}
element.addAttribute("period", String.valueOf(iPeriodId));
element.addAttribute("periodName", iPeriodName);
element.addAttribute("periodPref", String.valueOf(iPeriodPref));
element.addAttribute("cnt", String.valueOf(iCounter));
}
}
private static int combine(int a, int b) {
int ret = 0;
for (int i=0;i<15;i++) ret = ret | ((a & (1<<i))<<i) | ((b & (1<<i))<<(i+1));
return ret;
}
//--------- toHtml -------------------------------------------------
private static String IMG_BASE = "images/";
private static String IMG_EXPAND = IMG_BASE+"expand_node_btn.gif";
private static String IMG_COLLAPSE = IMG_BASE+"collapse_node_btn.gif";
private static String IMG_LEAF = IMG_BASE+"end_node_btn.gif";
public static int TYPE_VARIABLE_BASED = 0;
public static int TYPE_CONSTRAINT_BASED = 1;
private void menu_item(PrintWriter out, String id, String name, String description, String page, boolean isCollapsed) {
out.println("<div style=\"margin-left:5px;\">");
out.println("<A style=\"border:0;background:0\" id=\"__idMenu"+id+"\" href=\"javascript:toggle('"+id+"')\" name=\""+name+"\">");
out.println("<img id=\"__idMenuImg"+id+"\" border=\"0\" src=\""+(isCollapsed ? IMG_EXPAND : IMG_COLLAPSE)+"\" align=\"absmiddle\"></A>");
out.println(" <A class='noFancyLinks' target=\"__idContentFrame\" "+(page == null ? "" : page+" onmouseover=\"this.style.cursor='hand';this.style.cursor='pointer';\" ")+"title=\""+(description == null ? "" : description)+"\" >"+ name+(description == null?"":" <font color='gray'>[" + description + "]</font>")+"</A><br>");
out.println("</div>");
out.println("<div ID=\"__idMenuDiv"+id+"\" style=\"display:"+(isCollapsed ? "none" : "block")+";position:relative;margin-left:18px;\">");
}
private void leaf_item(PrintWriter out, String name, String description, String page) {
out.println("<div style=\"margin-left:5px;\">");
out.println("<img border=\"0\" src=\""+IMG_LEAF+"\" align=\"absmiddle\">");
out.println(" <A class='noFancyLinks' target=\"__idContentFrame\" "+(page == null ? "" : page + " onmouseover=\"this.style.cursor='hand';this.style.cursor='pointer';\" ")+"title=\""+(description == null ? "" : description)+"\" >"+name+(description == null ? "" : " <font color='gray'>[" + description + "]</font>")+"</A><br>");
out.println("</div>");
}
private void end_item(PrintWriter out) {
out.println("</div>");
}
private void unassignedVariableMenuItem(PrintWriter out, String menuId, CBSVariable variable, boolean clickable) {
String name =
"<font color='"+PreferenceLevel.prolog2color(variable.getPref())+"'>"+
variable.getName()+
"</font>";
String description = null;
String onClick = null;
if (clickable)
onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+variable.getId()+"&op=Reset','900','90%');\"";
menu_item(out, menuId, variable.getCounter() + "× " + name, description, onClick, true);
}
private void unassignmentMenuItem(PrintWriter out, String menuId, CBSValue value, boolean clickable) {
String name =
"<font color='"+PreferenceLevel.int2color(value.getPeriodPref())+"'>"+
value.getPeriodName()+
"</font> ";
String roomLink = "";
for (int i=0;i<value.getRoomIds().size();i++) {
name += (i>0?", ":"")+"<font color='"+PreferenceLevel.int2color(((Integer)value.getRoomPrefs().elementAt(i)).intValue())+"'>"+ value.getRoomNames().elementAt(i)+"</font>";
roomLink += (i>0?":":"")+value.getRoomIds().elementAt(i);
}
String description = null;
String onClick = null;
if (clickable)
onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+value.variable().getId()+"&period="+value.getPeriodId()+"&room="+roomLink+"&op=Try&reset=1','900','90%');\"";
menu_item(out, menuId, value.getCounter() + "× " + name, description, onClick, true);
}
private void constraintMenuItem(PrintWriter out, String menuId, CBSConstraint constraint, boolean clickable) {
String name = "<font color='"+PreferenceLevel.prolog2color(constraint.getPref())+"'>";
String link = null;
switch (constraint.getType()) {
case sConstraintTypeGroup :
name += "Distribution "+constraint.getName();
break;
case sConstraintTypeInstructor :
name += "Instructor "+constraint.getName();
if (clickable) link = "examGrid.do?filter="+constraint.getName()+"&resource="+ExamGridTable.sResourceInstructor+"&op=Cbs";
break;
case sConstraintTypeRoom :
name += "Room "+constraint.getName();
if (clickable) link = "examGrid.do?filter="+constraint.getName()+"&resource="+ExamGridTable.sResourceRoom+"&op=Cbs";
break;
case sConstraintTypeStudent :
name += "Student "+constraint.getName();
break;
default :
name += (constraint.getName()==null?"Unknown":constraint.getName());
}
name += "</font>";
String description = null;
String onClick = null;
if (link!=null)
onClick = "href=\""+link+"\"";
menu_item(out, menuId, constraint.getCounter() + "× " + name, description, onClick, true);
}
private void assignmentLeafItem(PrintWriter out, CBSAssignment assignment, boolean clickable) {
String name =
"<font color='"+PreferenceLevel.prolog2color(assignment.getPref())+"'>"+
assignment.getName()+
"</font> ← "+
"<font color='"+PreferenceLevel.int2color(assignment.getPeriodPref())+"'>"+
assignment.getPeriodName()+
"</font> ";
String roomLink = "";
for (int i=0;i<assignment.getRoomIds().size();i++) {
name += (i>0?", ":"")+"<font color='"+PreferenceLevel.int2color(((Integer)assignment.getRoomPrefs().elementAt(i)).intValue())+"'>"+ assignment.getRoomNames().elementAt(i)+"</font>";
roomLink += (i>0?":":"")+assignment.getRoomIds().elementAt(i);
}
String onClick = null;
if (clickable)
onClick = "onclick=\"(parent ? parent : window).showGwtDialog('Examination Assignment', 'examInfo.do?examId="+assignment.getId()+"&period="+assignment.getPeriodId()+"&room="+roomLink+"&op=Try&reset=1','900','90%');\"";
leaf_item(out, assignment.getCounter()+"× "+name, null, onClick);
}
public static void printHtmlHeader(JspWriter jsp) {
PrintWriter out = new PrintWriter(jsp);
printHtmlHeader(out, false);
}
public static void printHtmlHeader(PrintWriter out, boolean style) {
if (style) {
out.println("<style type=\"text/css\">");
out.println("<!--");
out.println("A:link { color: blue; text-decoration: none; border:0; background:0; }");
out.println("A:visited { color: blue; text-decoration: none; border:0; background:0; }");
out.println("A:active { color: blue; text-decoration: none; border:0; background:0; }");
out.println("A:hover { color: blue; text-decoration: none; border:0; background:0; }");
out.println(".TextBody { background-color: white; color:black; font-size: 12px; }");
out.println(".WelcomeHead { color: black; margin-top: 0px; margin-left: 0px; font-weight: bold; text-align: right; font-size: 30px; font-family: Comic Sans MS}");
out.println("-->");
out.println("</style>");
out.println();
}
out.println("<script language=\"javascript\" type=\"text/javascript\">");
out.println("function toggle(item) {");
out.println(" obj=document.getElementById(\"__idMenuDiv\"+item);");
out.println(" visible=(obj.style.display!=\"none\");");
out.println(" img=document.getElementById(\"__idMenuImg\" + item);");
out.println(" menu=document.getElementById(\"__idMenu\" + item);");
out.println(" if (visible) {obj.style.display=\"none\";img.src=\""+IMG_EXPAND+"\";}");
out.println(" else {obj.style.display=\"block\";img.src=\""+IMG_COLLAPSE+"\";}");
out.println("}");
out.println("</script>");
out.flush();
}
private Vector filter(Collection counters, double limit) {
Vector cnt = new Vector(counters);
Collections.sort(cnt);
int total = 0;
for (Enumeration e=cnt.elements();e.hasMoreElements();)
total += ((Counter)e.nextElement()).getCounter();
int totalLimit = (int)Math.ceil(limit*total);
int current = 0;
Vector ret = new Vector();
for (Enumeration e=cnt.elements();e.hasMoreElements();) {
Counter c = (Counter)e.nextElement();
ret.addElement(c);
current += c.getCounter();
if (current>=totalLimit) break;
}
return ret;
}
/** Print conflict-based statistics in HTML format */
public void printHtml(JspWriter jsp, double limit, int type, boolean clickable) {
printHtml(jsp, null, new double[] {limit,limit,limit,limit}, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(PrintWriter out, double limit, int type, boolean clickable) {
printHtml(out, null, new double[] {limit,limit,limit,limit}, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(JspWriter jsp, double[] limit, int type, boolean clickable) {
printHtml(jsp, null, limit, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(PrintWriter out, double[] limit, int type, boolean clickable) {
printHtml(out, null, limit, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(JspWriter jsp, Long classId, double limit, int type, boolean clickable) {
printHtml(jsp, classId, new double[] {limit,limit,limit,limit}, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(PrintWriter out, Long classId, double limit, int type, boolean clickable) {
printHtml(out, classId, new double[] {limit,limit,limit,limit}, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(JspWriter jsp, Long classId, double[] limit, int type, boolean clickable) {
PrintWriter out = new PrintWriter(jsp);
printHtml(out, classId, limit, type, clickable);
}
/** Print conflict-based statistics in HTML format */
public void printHtml(PrintWriter out, Long classId, double[] limit, int type, boolean clickable) {
if (type == TYPE_VARIABLE_BASED) {
Vector vars = filter(iVariables.values(), limit[0]);
if (classId!=null) {
CBSVariable var = (CBSVariable)iVariables.get(classId);
vars.clear();
if (var!=null) vars.add(var);
}
for (Enumeration e1 = vars.elements(); e1.hasMoreElements();) {
CBSVariable variable = (CBSVariable)e1.nextElement();
String m1 = String.valueOf(variable.getId());
if (classId==null)
unassignedVariableMenuItem(out,m1,variable, clickable);
Vector vals = filter(variable.values(), limit[1]);
int id = 0;
for (Enumeration e2 = vals.elements();e2.hasMoreElements();) {
CBSValue value = (CBSValue)e2.nextElement();
String m2 = m1+"."+(id++);
unassignmentMenuItem(out,m2,value, clickable);
Vector constraints =filter(value.constraints(),limit[2]);
for (Enumeration e3 = constraints.elements(); e3.hasMoreElements();) {
CBSConstraint constraint = (CBSConstraint)e3.nextElement();
String m3 = m2 + constraint.getType()+"."+constraint.getId();
constraintMenuItem(out,m3,constraint, clickable);
Vector assignments = filter(constraint.assignments(),limit[3]);
for (Enumeration e4 = assignments.elements();e4.hasMoreElements();) {
CBSAssignment assignment = (CBSAssignment)e4.nextElement();
assignmentLeafItem(out, assignment, clickable);
}
end_item(out);
}
end_item(out);
}
end_item(out);
}
} else if (type == TYPE_CONSTRAINT_BASED) {
Hashtable constraints = new Hashtable();
for (Enumeration e1 = iVariables.elements(); e1.hasMoreElements();) {
CBSVariable variable = (CBSVariable)e1.nextElement();
if (classId!=null && classId.longValue()!=variable.getId())
continue;
for (Iterator e2=variable.values().iterator();e2.hasNext();) {
CBSValue value = (CBSValue)e2.next();
for (Iterator e3=value.constraints().iterator();e3.hasNext();) {
CBSConstraint constraint = (CBSConstraint)e3.next();
CBSConstraint xConstraint = (CBSConstraint)constraints.get(constraint.getType()+"."+constraint.getId());
if (xConstraint==null) {
xConstraint = new CBSConstraint(constraint.getType(),constraint.getId(),constraint.getName(),constraint.getPref());
constraints.put(constraint.getType()+"."+constraint.getId(),xConstraint);
}
CBSVariable xVariable = null;
for (Iterator i=xConstraint.variables().iterator();i.hasNext();) {
CBSVariable v = (CBSVariable)i.next();
if (v.getId()==variable.getId()) {
xVariable = v; break;
}
}
if (xVariable==null) {
xVariable = new CBSVariable(xConstraint,variable.getId(),variable.getName(),variable.getPref());
xConstraint.variables().add(xVariable);
}
CBSValue xValue = new CBSValue(xVariable,
value.getPeriodId(), value.getPeriodName(), value.getPeriodPref(),
value.getRoomIds(), value.getRoomNames(), value.getRoomPrefs());
xVariable.values().add(xValue);
for (Iterator e4=constraint.assignments().iterator();e4.hasNext();) {
CBSAssignment assignment = (CBSAssignment)e4.next();
xValue.assignments().add(assignment);
xValue.incCounter(assignment.getCounter());
}
}
}
}
Vector consts = filter(constraints.values(), limit[0]);
for (Enumeration e1 = consts.elements(); e1.hasMoreElements();) {
CBSConstraint constraint = (CBSConstraint)e1.nextElement();
String m1 = constraint.getType()+"."+constraint.getId();
constraintMenuItem(out,m1,constraint, clickable);
Vector variables = filter(constraint.variables(), limit[1]);
Collections.sort(variables);
for (Enumeration e2 = variables.elements(); e2.hasMoreElements();) {
CBSVariable variable = (CBSVariable)e2.nextElement();
String m2 = m1+"."+variable.getId();
if (classId==null)
unassignedVariableMenuItem(out,m2,variable, clickable);
Vector vals = filter(variable.values(), limit[2]);
int id = 0;
for (Enumeration e3 = vals.elements();e3.hasMoreElements();) {
CBSValue value = (CBSValue)e3.nextElement();
String m3 = m2+"."+(id++);
unassignmentMenuItem(out,m3,value, clickable);
Vector assignments = filter(value.assignments(), limit[3]);
for (Enumeration e4 = assignments.elements();e4.hasMoreElements();) {
CBSAssignment assignment = (CBSAssignment)e4.nextElement();
assignmentLeafItem(out, assignment, clickable);
}
end_item(out);
}
if (classId==null)
end_item(out);
}
end_item(out);
}
}
out.flush();
}
public boolean saveToFile() {
return true;
}
}<|fim▁end|>
|
public String toString() {
return iExamName+" "+iPeriodName+" "+iRoomNames;
}
public Vector getRoomNames() { return iRoomNames; }
|
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># Bzrflag
# Copyright 2008-2011 Brigham Young University
#
# This file is part of Bzrflag.
#
# Bzrflag is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# Bzrflag is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# Bzrflag. If not, see <http://www.gnu.org/licenses/>.
#
# Inquiries regarding any further use of Bzrflag, please contact the Copyright
# Licensing Office, Brigham Young University, 3760 HBLL, Provo, UT 84602,
# (801) 422-9339 or 422-3821, e-mail [email protected].
"""BZFlag Constants
These constants are originally defined in src/common/global.cxx in the BZFlag
repository. There are more than a hundred BZFlag constants that are in
global.cxx but are not included in the list of BSFlag constants.
"""
from __future__ import division # Must be at the beginning of the file!
__author__ = "BYU AML Lab <[email protected]>"<|fim▁hole|>import logging
logger = logging.getLogger('constants')
# Colors
COLORNAME = ('rogue', 'red', 'green', 'blue', 'purple')
# Tanks
TANKANGVEL = math.pi / 4
TANKLENGTH = 6
TANKRADIUS = 0.72 * TANKLENGTH
TANKSPEED = 25
LINEARACCEL = 0.5
ANGULARACCEL = 0.5
TANKWIDTH = 2.8
TANKALIVE = 'alive'
TANKDEAD = 'dead'
DEADZONE = -999999.0, -999999.0
# Shots
MAXSHOTS = 10
SHOTRADIUS = 0.5
SHOTRANGE = 350
SHOTSPEED = 100
RELOADTIME = SHOTRANGE/SHOTSPEED
SHOTALIVE = 'alive'
SHOTDEAD = 'dead'
# Flags
FLAGRADIUS = 2.5
INITPOINTS = 2000
CAPTUREPOINTS = 4000
# Rules
EXPLODETIME = 5
# Graphics
BASE_PATTERN = '%s_basetop.png'
SHOT_PATTERN = '%s_bolt.png'
FLAG_PATTERN = '%s_flag.png'
TANK_PATTERN = '%s_tank.png'
TILESCALE = 0.1
SHOTSCALE = 3
FLAGSCALE = 3
TANKSCALE = 1.2
FONTSIZE = 16
# A higher loop timeout decreases CPU usage but also decreases the frame rate.
LOOP_TIMEOUT = 0.01
# Server
BACKLOG = 5
# Game
RESPAWNTRIES = 1000<|fim▁end|>
|
__copyright__ = "Copyright 2008-2011 Brigham Young University"
__license__ = "GNU GPL"
import math
|
<|file_name|>previewbrowser.py<|end_file_name|><|fim▁begin|>"""
Preview Browser Widget.
"""
from xml.sax.saxutils import escape
from PyQt4.QtGui import (
QWidget, QLabel, QListView, QAction, QVBoxLayout, QHBoxLayout, QSizePolicy,
QStyleOption, QStylePainter
)
from PyQt4.QtSvg import QSvgWidget
from PyQt4.QtCore import (
Qt, QSize, QByteArray, QModelIndex, QEvent
)
from PyQt4.QtCore import pyqtSignal as Signal
from ..utils import check_type
from ..gui.dropshadow import DropShadowFrame
from . import previewmodel
NO_PREVIEW_SVG = """
"""
# Default description template
DESCRIPTION_TEMPLATE = """
<h3 class=item-heading>{name}</h3>
<p class=item-description>
{description}
</p>
"""
PREVIEW_SIZE = (440, 295)
class LinearIconView(QListView):
def __init__(self, *args, **kwargs):
QListView.__init__(self, *args, **kwargs)
self.setViewMode(QListView.IconMode)
self.setWrapping(False)
self.setWordWrap(True)
self.setSelectionMode(QListView.SingleSelection)
self.setEditTriggers(QListView.NoEditTriggers)
self.setMovement(QListView.Static)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setSizePolicy(QSizePolicy.Expanding,
QSizePolicy.Fixed)
self.setIconSize(QSize(120, 80))
def sizeHint(self):
if not self.model().rowCount():
return QSize(200, 140)
else:
scrollHint = self.horizontalScrollBar().sizeHint()
height = self.sizeHintForRow(0) + scrollHint.height()
_, top, _, bottom = self.getContentsMargins()
return QSize(200, height + top + bottom + self.verticalOffset())
class TextLabel(QWidget):
"""A plain text label widget with support for elided text.<|fim▁hole|> def __init__(self, *args, **kwargs):
QWidget.__init__(self, *args, **kwargs)
self.setSizePolicy(QSizePolicy.Expanding,
QSizePolicy.Preferred)
self.__text = ""
self.__textElideMode = Qt.ElideMiddle
self.__sizeHint = None
self.__alignment = Qt.AlignLeft | Qt.AlignVCenter
def setText(self, text):
"""Set the `text` string to display.
"""
check_type(text, str)
if self.__text != text:
self.__text = str(text)
self.__update()
def text(self):
"""Return the text
"""
return self.__text
def setTextElideMode(self, mode):
"""Set elide mode (`Qt.TextElideMode`)
"""
if self.__textElideMode != mode:
self.__textElideMode = mode
self.__update()
def elideMode(self):
return self.__elideMode
def setAlignment(self, align):
"""Set text alignment (`Qt.Alignment`).
"""
if self.__alignment != align:
self.__alignment = align
self.__update()
def sizeHint(self):
if self.__sizeHint is None:
option = QStyleOption()
option.initFrom(self)
metrics = option.fontMetrics
self.__sizeHint = QSize(200, metrics.height())
return self.__sizeHint
def paintEvent(self, event):
painter = QStylePainter(self)
option = QStyleOption()
option.initFrom(self)
rect = option.rect
metrics = option.fontMetrics
text = metrics.elidedText(self.__text, self.__textElideMode,
rect.width())
painter.drawItemText(rect, self.__alignment,
option.palette, self.isEnabled(), text,
self.foregroundRole())
painter.end()
def changeEvent(self, event):
if event.type() == QEvent.FontChange:
self.__update()
return QWidget.changeEvent(self, event)
def __update(self):
self.__sizeHint = None
self.updateGeometry()
self.update()
class PreviewBrowser(QWidget):
"""A Preview Browser for recent/premade scheme selection.
"""
# Emitted when the current previewed item changes
currentIndexChanged = Signal(int)
# Emitted when an item is double clicked in the preview list.
activated = Signal(int)
def __init__(self, *args):
QWidget.__init__(self, *args)
self.__model = None
self.__currentIndex = -1
self.__template = DESCRIPTION_TEMPLATE
self.__setupUi()
def __setupUi(self):
vlayout = QVBoxLayout()
vlayout.setContentsMargins(0, 0, 0, 0)
top_layout = QHBoxLayout()
top_layout.setContentsMargins(12, 12, 12, 12)
# Top row with full text description and a large preview
# image.
self.__label = QLabel(self, objectName="description-label",
wordWrap=True,
alignment=Qt.AlignTop | Qt.AlignLeft)
self.__label.setWordWrap(True)
self.__label.setFixedSize(220, PREVIEW_SIZE[1])
self.__image = QSvgWidget(self, objectName="preview-image")
self.__image.setFixedSize(*PREVIEW_SIZE)
self.__imageFrame = DropShadowFrame(self)
self.__imageFrame.setWidget(self.__image)
# Path text below the description and image
path_layout = QHBoxLayout()
path_layout.setContentsMargins(12, 0, 12, 0)
path_label = QLabel("<b>{0!s}</b>".format(self.tr("Path:")), self,
objectName="path-label")
self.__path = TextLabel(self, objectName="path-text")
path_layout.addWidget(path_label)
path_layout.addWidget(self.__path)
self.__selectAction = \
QAction(self.tr("Select"), self,
objectName="select-action",
)
top_layout.addWidget(self.__label, 1,
alignment=Qt.AlignTop | Qt.AlignLeft)
top_layout.addWidget(self.__image, 1,
alignment=Qt.AlignTop | Qt.AlignRight)
vlayout.addLayout(top_layout)
vlayout.addLayout(path_layout)
# An list view with small preview icons.
self.__previewList = LinearIconView(objectName="preview-list-view")
self.__previewList.doubleClicked.connect(self.__onDoubleClicked)
vlayout.addWidget(self.__previewList)
self.setLayout(vlayout)
def setModel(self, model):
"""Set the item model for preview.
"""
if self.__model != model:
if self.__model:
s_model = self.__previewList.selectionModel()
s_model.selectionChanged.disconnect(self.__onSelectionChanged)
self.__model.dataChanged.disconnect(self.__onDataChanged)
self.__model = model
self.__previewList.setModel(model)
if model:
s_model = self.__previewList.selectionModel()
s_model.selectionChanged.connect(self.__onSelectionChanged)
self.__model.dataChanged.connect(self.__onDataChanged)
if model and model.rowCount():
self.setCurrentIndex(0)
def model(self):
"""Return the item model.
"""
return self.__model
def setPreviewDelegate(self, delegate):
"""Set the delegate to render the preview images.
"""
raise NotImplementedError
def setDescriptionTemplate(self, template):
self.__template = template
self.__update()
def setCurrentIndex(self, index):
"""Set the selected preview item index.
"""
if self.__model is not None and self.__model.rowCount():
index = min(index, self.__model.rowCount() - 1)
index = self.__model.index(index, 0)
sel_model = self.__previewList.selectionModel()
# This emits selectionChanged signal and triggers
# __onSelectionChanged, currentIndex is updated there.
sel_model.select(index, sel_model.ClearAndSelect)
elif self.__currentIndex != -1:
self.__currentIndex = -1
self.__update()
self.currentIndexChanged.emit(-1)
def currentIndex(self):
"""Return the current selected index.
"""
return self.__currentIndex
def __onSelectionChanged(self, *args):
"""Selected item in the preview list has changed.
Set the new description and large preview image.
"""
rows = self.__previewList.selectedIndexes()
if rows:
index = rows[0]
self.__currentIndex = index.row()
else:
index = QModelIndex()
self.__currentIndex = -1
self.__update()
self.currentIndexChanged.emit(self.__currentIndex)
def __onDataChanged(self, topleft, bottomRight):
"""Data changed, update the preview if current index in the changed
range.
"""
if self.__currentIndex <= topleft.row() and \
self.__currentIndex >= bottomRight.row():
self.__update()
def __onDoubleClicked(self, index):
"""Double click on an item in the preview item list.
"""
self.activated.emit(index.row())
def __update(self):
"""Update the current description.
"""
if self.__currentIndex != -1:
index = self.model().index(self.__currentIndex, 0)
else:
index = QModelIndex()
if not index.isValid():
description = ""
name = ""
path = ""
svg = NO_PREVIEW_SVG
else:
description = str(index.data(Qt.WhatsThisRole))
if not description:
description = "No description."
description = escape(description)
description = description.replace("\n", "<br/>")
name = str(index.data(Qt.DisplayRole))
if not name:
name = "Untitled"
name = escape(name)
path = str(index.data(Qt.StatusTipRole))
svg = str(index.data(previewmodel.ThumbnailSVGRole))
desc_text = self.__template.format(description=description, name=name)
self.__label.setText(desc_text)
self.__path.setText(path)
if not svg:
svg = NO_PREVIEW_SVG
if svg:
self.__image.load(QByteArray(svg.encode("utf-8")))<|fim▁end|>
|
"""
|
<|file_name|>users.module.js<|end_file_name|><|fim▁begin|><|fim▁hole|> "use strict";
angular
.module('app.users', [
'app.core'
]);
})();<|fim▁end|>
|
(function() {
|
<|file_name|>randombackground.py<|end_file_name|><|fim▁begin|>import os, random
rfilename=random.choice(os.listdir("/storage/pictures"))
rextension=os.path.splitext(rfilename)[1]
picturespath='/storage/pictures/'
<|fim▁hole|> if filename.startswith("random"):
extension=os.path.splitext(filename)[1]
newname=picturespath + str(random.random()).rsplit('.',1)[1] + extension
# rename the existing random wallpaper to something random
filename=picturespath+filename
os.rename(filename, newname)
# now rename the newly randomly founded file to be random
rfilename=picturespath+rfilename
os.rename(rfilename, picturespath+'random'+rextension)<|fim▁end|>
|
#TODO Probably dont need a forloop can possibly do random*
#TODO What if the directory is empty?
for filename in os.listdir(picturespath):
|
<|file_name|>0002_auto__del_positions__add_position.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db<|fim▁hole|>
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'Positions'
db.delete_table(u'positions_positions')
# Adding model 'Position'
db.create_table(u'positions_position', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('date', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2013, 8, 21, 0, 0))),
('content', self.gf('django.db.models.fields.TextField')()),
))
db.send_create_signal(u'positions', ['Position'])
def backwards(self, orm):
# Adding model 'Positions'
db.create_table(u'positions_positions', (
('content', self.gf('django.db.models.fields.TextField')()),
('date', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2013, 8, 21, 0, 0))),
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
))
db.send_create_signal(u'positions', ['Positions'])
# Deleting model 'Position'
db.delete_table(u'positions_position')
models = {
u'positions.position': {
'Meta': {'object_name': 'Position'},
'content': ('django.db.models.fields.TextField', [], {}),
'date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 8, 21, 0, 0)'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['positions']<|fim▁end|>
|
from south.v2 import SchemaMigration
from django.db import models
|
<|file_name|>runner.py<|end_file_name|><|fim▁begin|>#
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
from ast import literal_eval
from threading import Thread
from ovirtscheduler import utils
class PythonMethodRunner(Thread):
def __init__(self, path, module, cls, method, args, request_id=''):
super(PythonMethodRunner, self).__init__(group=None)
logger = logging.getLogger()
self._log_adapter = utils.RequestAdapter(<|fim▁hole|> self._result = None
self._error = None
self._process = None
self._script = self.createScript(module, cls, method, args)
self.request_id = request_id
def run(self):
try:
self._log_adapter.debug(
'running %s in %s' % (self._script, self._path))
self._process = utils.createProcess(self._script, self._path)
(result, error) = self._process.communicate()
if not isinstance(result, str):
result = result.decode()
try:
self._result = literal_eval(result)
except Exception as ex:
if not error:
self._error = "Unable to parse result: %s" \
" got error : %s " % (result, ex)
if error:
self._error = error
except Exception as ex:
self._error = ex
if self._error:
self._log_adapter.error("script %s got error %s" %
(self._script, self._error))
def getResults(self):
return self._result
def getErrors(self):
return self._error
def getReturnCode(self):
return self._process.returncode
def stop(self):
return utils.killProcess(self._process)
def createScript(self, module, cls, method, args):
command_template = "import {m}; {m}.{c}().{method}{args}"
command_string = command_template\
.format(m=module,
c=cls,
method=method,
args=repr(utils.createFunctionArgs(args)))
return ["python3", "-c", command_string]<|fim▁end|>
|
logger,
{'method': 'PythonMethodRunner',
'request_id': request_id})
self._path = path
|
<|file_name|>player.py<|end_file_name|><|fim▁begin|>import functools
import itertools
import json
import multiprocessing
import os
import shutil
import sys
import time
import cv2
import numpy
import utility.config
import utility.cv
import utility.geometry
import utility.gui
import utility.image
import utility.log
# Explicitly disable OpenCL. Querying for OpenCL support breaks when multiprocessing.
cv2.ocl.setUseOpenCL(False)
# Create multiprocessing pool. Uses `multiprocessing.cpu_count()` processes by default.
pool = multiprocessing.Pool()
# Load all templates
template_refs = utility.cv.load_template_refs()
template_game_over = utility.cv.load_template_game_over()
# Setup empty trace directory
trace_directory = "trace"
if os.path.exists(trace_directory):
shutil.rmtree(trace_directory)
os.mkdir(trace_directory)
# Wait for game to start
while True:
screenshot = utility.image.downscale(utility.image.screenshot())
if utility.cv.match_template(screenshot, template_game_over)["score"] < 0.5:
# Game over screen cleared
utility.log.separator()
break
utility.log.info("Waiting for game to start...")
time.sleep(1)
# Begin player run loop
while True:
start = time.time()
# Grab screenshot
screenshot_original = utility.image.screenshot()
screenshot = utility.image.downscale(screenshot_original)
utility.log.performance("screenshot", start)
# Calculate character and jump matches
#
# See http://stackoverflow.com/questions/1408356/keyboard-interrupts-with-pythons-multiprocessing-pool
matches = []
map_fn = functools.partial(utility.cv.multi_match_template, screenshot)
map_args = template_refs
map_results = pool.map_async(map_fn, map_args).get(1)
utility.log.performance("multi_match_template", start)
for (idx, match_template_multiple_results) in enumerate(map_results):
for result in match_template_multiple_results:
# Adjust vertical center for character type towards bottom
if result["type"] == "character":
result["center"] = {
"x": result["center"]["x"],
"y": result["y1"] + ((result["y2"] - result["y1"]) * utility.config.character_vertical_center)
}
# Filter any conflicts from existing matches
conflicting_matches = []
def keep(match):
if match["type"] != result["type"]:
# Not conflicting by type
return True
if match["type"] == "jump" and match["action"] != result["action"]:
# Not conflicting by jump action
return True
if not utility.geometry.rects_overlap(match, result):
# Not conflicting by overlap
return True
# Conflicts with result
return False
matches = [m for m in matches if keep(m)]
# Determine best match to keep
best_match = result
for match in conflicting_matches:
if match["score"] > best_match["score"]:
# Conflicting match has higher score
best_match = match
continue
# Save best match
matches.append(best_match)
utility.log.performance("matches", start)
# Determine action
possible_actions = utility.geometry.calculate_actions(matches)
utility.log.performance("calculate_actions", start)
for action in possible_actions:
if action["action"] == "double" and action["distance"] <= utility.config.double_jump_action_distance:
# Double jump
utility.log.info("double click")
utility.gui.mouse_double_click()
break
elif action["action"] == "single" and action["distance"] <= utility.config.single_jump_action_distance:
# Single jump
utility.log.info("single click")
utility.gui.mouse_click()
break
else:<|fim▁hole|>
# Highlight results
composite_image = utility.image.highlight_regions(screenshot, matches)
utility.log.performance("highlight_regions", start)
# Present composite image
# utility.image.show(composite_image)
# utility.log.performance("show", start)
# Log trace
utility.log.trace(trace_directory, screenshot_original, composite_image, matches, possible_actions)
utility.log.performance("trace", start)
# Match game over
game_over = (len(matches) == 0 and utility.cv.match_template(screenshot, template_game_over)["score"] > 0.5)
# Log total
utility.log.performance("total", start)
utility.log.separator()
# Check exit condition
if game_over:
# Game ended
break<|fim▁end|>
|
# Try next action
continue
utility.log.performance("execute action", start)
|
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package schema
import (
"crypto/md5"
"crypto/rand"
"fmt"
"reflect"
"strings"
"time"
)
var (
// Now is a field hook handler that returns the current time, to be used in
// schema with OnInit and OnUpdate
Now = func(value interface{}) interface{} {
return time.Now()
}
// NewID is a field hook handler that generates a new unique id if none exist,
// to be used in schema with OnInit
NewID = func(value interface{}) interface{} {
if value == nil {
r := make([]byte, 128)
rand.Read(r)
value = fmt.Sprintf("%x", md5.Sum(r))
}
return value
}
// IDField is a common schema field configuration that generate an UUID for new item id.
IDField = Field{
Required: true,
ReadOnly: true,
OnInit: &NewID,
Validator: &String{
Regexp: "^[0-9a-f]{32}$",
},
}
// CreatedField is a common schema field configuration for "created" fields. It stores
// the creation date of the item.
CreatedField = Field{
Required: true,
ReadOnly: true,
OnInit: &Now,
Validator: &Time{},
}
// UpdatedField is a common schema field configuration for "updated" fields. It stores
// the current date each time the item is modified.
UpdatedField = Field{
Required: true,
ReadOnly: true,
OnInit: &Now,
OnUpdate: &Now,
Validator: &Time{},
}
)
// isNumber takes an interface as input, and returns a float64 if the type is
// compatible (int* or float*)
func isNumber(n interface{}) (float64, bool) {
switch n := n.(type) {
case int:
return float64(n), true
case int8:
return float64(n), true
case int16:
return float64(n), true
case int32:
return float64(n), true
case int64:
return float64(n), true
case uint:
return float64(n), true
case uint8:
return float64(n), true
case uint16:
return float64(n), true
case uint32:
return float64(n), true
case uint64:
return float64(n), true
case float32:
return float64(n), true
case float64:
return n, true
default:
return 0, false
}
}
// isIn returns true if on the the elements in exp is equal to value.
// The exp argument may be an item or a list of item to match.
func isIn(exp interface{}, value interface{}) bool {
values, ok := exp.([]interface{})
if !ok {
values = []interface{}{exp}
}
for _, v := range values {
if reflect.DeepEqual(v, value) {
return true
}
}
return false
}
// getField gets the value of a given field by supporting sub-field path.
// A get on field.subfield is equivalent to payload["field"]["subfield].
func getField(payload map[string]interface{}, name string) interface{} {
// Split the name to get the current level name on first element and
// the rest of the path as second element if dot notation is used
// (i.e.: field.subfield.subsubfield -> field, subfield.subsubfield)
path := strings.SplitN(name, ".", 2)
if value, found := payload[path[0]]; found {
if len(path) == 2 {
if subPayload, ok := value.(map[string]interface{}); ok {<|fim▁hole|> return getField(subPayload, path[1])
}
// The requested depth does not exist
return nil
}
// Full path has been found
return value
}
return nil
}<|fim▁end|>
|
// Check next level
|
<|file_name|>consts.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-<|fim▁hole|>CALL = 'C'
PUT = 'P'<|fim▁end|>
|
"""
Package-wide constants.
"""
|
<|file_name|>DOM.js<|end_file_name|><|fim▁begin|>/**
* DOM manipulation methods
*/
Firestorm.DOM = {
/**
* When turning HTML into nodes - it must be inserted into appropriate tags to stay valid
*/
_wrap_map: {
select: [1, "<select multiple='multiple'>", "</select>"],
fieldset: [1, "<fieldset>", "</fieldset>"],
table: [1, "<table>", "</table>"],
tbody: [2, "<table><tbody>", "</tbody></table>"],
thead: [2, "<table><tbody>", "</tbody></table>"],
tfoot: [2, "<table><tbody>", "</tbody></table>"],
tr: [3, "<table><tbody><tr>", "</tr></tbody></table>"],
colgroup: [2, "<table><tbody></tbody><colgroup>", "</colgroup></table>"],
map: [1, "<map>", "</map>"]
},
/**
* Workaround for browser bugs in IE. Equals to value of `STRIPS_INNER_HTML_SCRIPT_AND_STYLE_TAGS` capability
*/
_needs_shy: false,
/**
* Workaround for browser bugs in IE. Equals to value of `MOVES_WHITESPACE_BEFORE_SCRIPT` capability
*/
_moves_whitespace: false,
/**
* Init the object: choose appropriate methods for DOM manipulation, depending on browser capabilities
*/
init: function() {
var e = Firestorm.Environment;
this._needs_shy = e.capabilities[Firestorm.CAPABILITY_NAMES.STRIPS_INNER_HTML_SCRIPT_AND_STYLE_TAGS];
this._moves_whitespace = e.capabilities[Firestorm.CAPABILITY_NAMES.MOVES_WHITESPACE_BEFORE_SCRIPT];
if (Firestorm.schema.dom.PREFER_RANGE_API && e.capabilities[Firestorm.CAPABILITY_NAMES.SUPPORTS_RANGE]) {
this.insertHTMLBefore = this.insertHTMLBefore_Range;
this.insertHTMLAfter = this.insertHTMLAfter_Range;
this.insertHTMLTop = this.insertHTMLTop_Range;
this.insertHTMLBottom = this.insertHTMLBottom_Range;
this.clearOuterRange = this.clearOuterRange_Range;
this.clearInnerRange = this.clearInnerRange_Range;
this.replaceInnerRange = this.replaceInnerRange_Range;
this.moveRegionAfter = this.moveRegionAfter_Range;
this.moveRegionBefore = this.moveRegionBefore_Range;
} else {
this.insertHTMLBefore = this.insertHTMLBefore_Nodes;
this.insertHTMLAfter = this.insertHTMLAfter_Nodes;
this.insertHTMLTop = this.insertHTMLTop_Nodes;
this.insertHTMLBottom = this.insertHTMLBottom_Nodes;
this.clearOuterRange = this.clearOuterRange_Nodes;
this.clearInnerRange = this.clearInnerRange_Nodes;
this.replaceInnerRange = this.replaceInnerRange_Nodes;
this.moveRegionAfter = this.moveRegionAfter_Nodes;
this.moveRegionBefore = this.moveRegionBefore_Nodes;
}
},
/**
* Turn given HTML into DOM nodes and insert them before the given element
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBefore: function(element, html) { Firestorm.t(1); },
/**
* Turn given HTML into DOM nodes and insert them after the given element
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLAfter: function(element, html) { Firestorm.t(1); },
/**
* Turn given HTML into DOM nodes and insert them inside the given element, at the top of it
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLTop: function(element, html) { Firestorm.t(1); },
/**
* Turn given HTML into DOM nodes and insert them inside the given element, at the bottom
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBottom: function(element, html) { Firestorm.t(1); },
/**
* Remove all HTML nodes between the given elements and elements themselves
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearOuterRange: function(start_element, end_element) { Firestorm.t(1); },
/**
* Remove all HTML nodes between the given elements
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearInnerRange: function(start_element, end_element) { Firestorm.t(1); },
/**
* Remove all HTML nodes between the elements and insert the given html there
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
* @param {string} html
*/
replaceInnerRange: function(start_element, end_element, html) { Firestorm.t(1); },
/**
* Move `region_start_element`, `region_end_element` and all elements between them before `target`
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionBefore: function(target, region_start_element, region_end_element) { Firestorm.t(1); },
/**
* Move `region_start_element`, `region_end_element` and all elements between them after `target`
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionAfter: function(target, region_start_element, region_end_element) { Firestorm.t(1); },
/**
* Turn HTML into nodes and insert them relatively to the given element
* @param {HTMLElement} element
* @param {string} html
* @param {_eInsertPosition} [position='Bottom']
*/
insertHTML: function(element, html, position) {
this['insertHTML' + (position || 'Bottom')](element, html);
},
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// nodes api
/**
* Set the element's innerHTML, taking into account various browser bugs
* @param {HTMLElement} element
* @param {string} html
*/
_setInnerHTML: function(element, html) {
var matches,
count,
i,
script;
if (this._moves_whitespace) {
matches = [];
// Right now we only check for script tags with ids with the goal of targeting morphs.
// Remove space before script to insert it later.
html = html.replace(/(\s+)(<script id='([^']+)')/g, function(match, spaces, tag, id) {
matches.push([id, spaces]);
return tag;
});
}
element.innerHTML = html;
// If we have to do any whitespace adjustments, do them now
if (matches && matches.length > 0) {
count = matches.length;
for (i = 0; i < count; i++) {
script = Firestorm.Element.findChildById(element, matches[i][0]);
script.parentNode.insertBefore(document.createTextNode(matches[i][1]), script);
}
}
},
/**
* Given a parent node and some HTML, generate a set of nodes. Return the first
* node, which will allow us to traverse the rest using nextSibling.
*
* In cases of certain elements like tables and lists we cannot just assign innerHTML and get the nodes,
* cause innerHTML is either readonly on them in IE, or it would destroy some of the content
*
* @param {HTMLElement} parentNode
* @param {string} html
**/
_firstNodeFor: function(parentNode, html) {
var map = this._wrap_map[parentNode.nodeName.toLowerCase()] || [ 0, "", "" ],
depth = map[0],
start = map[1],
end = map[2],
element,
i,
shy_element;
if (this._needs_shy) {
// make the first tag an invisible text node to retain scripts and styles at the beginning
html = '­' + html;
}
element = document.createElement('div');
this._setInnerHTML(element, start + html + end);
for (i = 0; i <= depth; i++) {
element = element.firstChild;
}
if (this._needs_shy) {
// Look for ­ to remove it.
shy_element = element;
// Sometimes we get nameless elements with the shy inside
while (shy_element.nodeType === 1 && !shy_element.nodeName) {
shy_element = shy_element.firstChild;
}
// At this point it's the actual unicode character.
if (shy_element.nodeType === 3 && shy_element.nodeValue.charAt(0) === "\u00AD") {
shy_element.nodeValue = shy_element.nodeValue.slice(1);
}
}
return element;
},
/**
* Remove everything between two tags
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearInnerRange_Nodes: function(start_element, end_element) {
var parent_node = start_element.parentNode,
node = start_element.nextSibling;
while (node && node !== end_element) {
parent_node.removeChild(node);
node = start_element.nextSibling;
}
},
/**
* Version of clearOuterRange, which manipulates HTML nodes
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearOuterRange_Nodes: function(start_element, end_element) {
this.clearInnerRange_Nodes(start_element, end_element);
start_element.parentNode.removeChild(start_element);
end_element.parentNode.removeChild(end_element);
},
/**
* Version of replaceInnerRange, which manipulates HTML nodes
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
* @param {string} html
*/
replaceInnerRange_Nodes: function(start_element, end_element, html) {
this.clearInnerRange_Nodes(start_element, end_element);
this.insertHTMLBefore_Nodes(end_element, html);
},
/**
* Turn HTML into nodes with respect to the parent node and sequentially insert them before `insert_before` element
* @param {HTMLElement} parent_node
* @param {string} html
* @param {HTMLElement} insert_before
*/
_insertHTMLBefore: function(parent_node, html, insert_before) {
var node,
next_sibling;
node = this._firstNodeFor(parent_node, html);
while (node) {
next_sibling = node.nextSibling;
parent_node.insertBefore(node, insert_before);
node = next_sibling;
}
},
/**
* Version of insertHTMLAfter which works with nodes
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLAfter_Nodes: function(element, html) {
this._insertHTMLBefore(element.parentNode, html, element.nextSibling);
},
/**
* Version of insertHTMLBefore which works with nodes
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBefore_Nodes: function(element, html) {
this._insertHTMLBefore(element.parentNode, html, element);
},
/**
* Version of insertHTMLTop which works with nodes
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLTop_Nodes: function(element, html) {
this._insertHTMLBefore(element, html, element.firstChild);
},
/**
* Version of insertHTMLBottom which works with nodes
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBottom_Nodes: function(element, html) {
this._insertHTMLBefore(element, html, null);
},
/**
* Perform movement of a range of nodes
* @param {HTMLElement} parent
* @param {HTMLElement} target
* @param {HTMLElement} node
* @param {HTMLElement} region_end_element
*/
_moveRegionBefore: function(parent, target, node, region_end_element) {
var next_sibling;
while (node && node !== region_end_element) {
next_sibling = node.nextSibling;
parent.insertBefore(node, target);
node = next_sibling;
}
parent.insertBefore(region_end_element, target);
},
/**
* Version of `moveRegionBefore`, which works with DOM nodes.
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionBefore_Nodes: function(target, region_start_element, region_end_element) {
this._moveRegionBefore(target.parentNode, target, region_start_element, region_end_element);
},
/**
* Version of `moveRegionAfter`, which works with DOM nodes.
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionAfter_Nodes: function(target, region_start_element, region_end_element) {
this._moveRegionBefore(target.parentNode, target.nextSibling, region_start_element, region_end_element);
},
// endL nodes api
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// range api
/**
* Create a Range object, with limits between the given elements
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
* @returns {Range|TextRange}
*/
_createInnerRange: function(start_element, end_element) {
var range = document.createRange();
range.setStartAfter(start_element);
range.setEndBefore(end_element);
return range;
},
/**
* Create a Range object, which includes the given elements
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
* @returns {Range|TextRange}
*/
_createOuterRange: function(start_element, end_element) {
var range = document.createRange();
range.setStartBefore(start_element);
range.setEndAfter(end_element);
return range;
},
/**
* Version of replaceInnerRange, which works with Range API
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
* @param {string} html
*/
replaceInnerRange_Range: function(start_element, end_element, html) {
var range = this._createInnerRange(start_element, end_element);
range.deleteContents();
range.insertNode(range.createContextualFragment(html));
},
/**
* Version of clearOuterRange, which works with Range API
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearOuterRange_Range: function(start_element, end_element) {
var range = this._createOuterRange(start_element, end_element);
range.deleteContents();
},
/**
* Version of clearInnerRange, which works with Range API
* @param {HTMLElement} start_element
* @param {HTMLElement} end_element
*/
clearInnerRange_Range: function(start_element, end_element) {
var range = this._createInnerRange(start_element, end_element);
range.deleteContents();
},
/**
* Version of insertHTMLAfter, which works with Range API
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLAfter_Range: function(element, html) {
var range = document.createRange();
range.setStartAfter(element);
range.setEndAfter(element);
range.insertNode(range.createContextualFragment(html));
},
/**
* Version of insertHTMLBefore, which works with Range API
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBefore_Range: function(element, html) {
var range = document.createRange();
range.setStartBefore(element);
range.setEndBefore(element);
range.insertNode(range.createContextualFragment(html));
},
/**
* Version of insertHTMLTop, which works with Range API
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLTop_Range: function(element, html) {
var range = document.createRange();
range.setStart(element, 0);<|fim▁hole|> },
/**
* Version of insertHTMLBottom, which works with Range API
* @param {HTMLElement} element
* @param {string} html
*/
insertHTMLBottom_Range: function(element, html) {
var last_child = element.lastChild,
range;
if (last_child) {
range = document.createRange();
range.setStartAfter(last_child);
range.collapse(true);
range.insertNode(range.createContextualFragment(html));
} else {
this.insertHTMLTop_Range(element, html);
}
},
/**
* Version of `moveRegionBefore`, which works with ranges
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionBefore_Range: function(target, region_start_element, region_end_element) {
target.parentNode.insertBefore(
this._createOuterRange(region_start_element, region_end_element).extractContents(),
target
);
},
/**
* Version of `moveRegionAfter`, which works with ranges
* @param {HTMLElement} target
* @param {HTMLElement} region_start_element
* @param {HTMLElement} region_end_element
*/
moveRegionAfter_Range: function(target, region_start_element, region_end_element) {
target.parentNode.insertBefore(
this._createOuterRange(region_start_element, region_end_element).extractContents(),
target.nextSibling
);
}
// end: range api
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
};<|fim▁end|>
|
range.collapse(true);
range.insertNode(range.createContextualFragment(html));
|
<|file_name|>sanity-check-users-and-groups.py<|end_file_name|><|fim▁begin|>####
#### Give a report on the "sanity" of the users and groups YAML
#### metadata files.
####
#### Example usage to analyze the usual suspects:
#### python3 sanity-check-users-and-groups.py --help
#### Get report of current problems:
#### python3 ./scripts/sanity-check-users-and-groups.py --users metadata/users.yaml --groups metadata/groups.yaml
#### Attempt to repair file (note that we go through json2yaml as libyaml output does not seem compatible with kwalify):
#### python3 ./scripts/sanity-check-users-and-groups.py --users metadata/users.yaml --groups metadata/groups.yaml --repair --output /tmp/output.json && json2yaml --depth 10 /tmp/output.json > /tmp/users.yaml
#### Check new yaml:
#### kwalify -E -f metadata/users.schema.yaml /tmp/users.yaml
#### Run report on new yaml.
#### reset && python3 ./scripts/sanity-check-users-and-groups.py --users /tmp/users.yaml --groups metadata/groups.yaml
import sys
import argparse
import logging
import yaml
import json
## Logger basic setup.
logging.basicConfig(level=logging.INFO)
LOGGER = logging.getLogger('sanity')
LOGGER.setLevel(logging.WARNING)
## Make sure we exit in a way that will get Jenkins's attention.
DIED_SCREAMING_P = False
def die_screaming(string):
""" Die and take our toys home. """
global DIED_SCREAMING_P
LOGGER.error(string)
DIED_SCREAMING_P = True
#sys.exit(1)
def main():
## Deal with incoming.
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-v', '--verbose', action='store_true',
help='More verbose output')
parser.add_argument('-u', '--users',
help='The users.yaml file to act on')
parser.add_argument('-g', '--groups',
help='The groups.yaml file to act on')
parser.add_argument("-r", "--repair", action="store_true",
help="Attempt to repair groups and update old permissions")
parser.add_argument("-o", "--output",
help="The file to output internal structure to (if repairing)")
args = parser.parse_args()
if args.verbose:
LOGGER.setLevel(logging.INFO)
LOGGER.info('Verbose: on')
## Ensure targets.
if not args.users:
die_screaming('need a users argument')
LOGGER.info('Will operate on users: ' + args.users)
if not args.groups:
die_screaming('need a groups argument')
LOGGER.info('Will operate on groups: ' + args.groups)
## Read.
users = None
with open(args.users) as mhandle:
users = yaml.safe_load(mhandle.read())
groups_linear = None
with open(args.groups) as mhandle:
groups_linear = yaml.safe_load(mhandle.read())
## Switch linear groups to lookup by URI.
groups_lookup = {}
for group in groups_linear:
groups_lookup[group['id']] = group['label']
violations = {
"uri": [],
"groups": [],
}
## Cycle through users and see if we find any violations.
for index, user in enumerate(users):
nick = user.get('nickname', '???')
## Update old authorizations type.
if args.repair:
if user.get("authorizations", {}).get("noctua-go", False):
print('REPAIR?: Update perms for ' + nick)
auths = user["authorizations"]["noctua-go"]
del user["authorizations"]["noctua-go"] # delete old way
user["authorizations"]["noctua"] = {
"go": auths
}
users[index] = user # save new back into list
## Does the user have noctua perms?
if user.get('authorizations', False):
auth = user.get('authorizations', {})
if auth.get('noctua-go', False) or \
(auth.get('noctua', False) and auth['noctua'].get('go', False)):
#print('Has perms: ' + user.get('nickname', '???'))
## 1: If so, do they have a URI?
if not user.get('uri', False):
die_screaming(user.get('nickname', '???') +\
' has no "uri"')
#print(nick + ' has no "uri"')
violations["uri"].append(nick)
else:
## 2: Is it an ORCID?
if user.get('uri', 'NIL').find('orcid') == -1:
die_screaming(user.get('nickname', '???') +\
' "uri" is not an ORCID.')
#print(nick + ' "uri" is not an ORCID.')
violations["uri"].append(nick)
## 3: If so, do they have a populated groups?
if not user.get('groups', False) or len(user["groups"]) == 0:
die_screaming(user.get('nickname', '???') +\
' has no "groups"')
#print(nick + ' has no "groups"')
if user.get("organization", False):
org = user["organization"]
print(nick + " could try org {}".format(org))
matching_groups = list(filter(lambda g: org == g["label"] or org == g["shorthand"], groups_linear))
if len(matching_groups) > 0:
print("REPAIR?: Use group: {}".format(matching_groups[0]["id"]))
if args.repair:
user["groups"] = [matching_groups[0]["id"]]
users[index] = user
else:
violations["groups"].append(nick)
else:
## 4: If so, are all entries in groups?
for gid in user.get('groups'):
if not groups_lookup.get(gid, False):
die_screaming(user.get('nickname', '???') +\
' has mistaken group entry: ' + gid)
#print(nick + ' has mistaken group entry: ' + gid)
<|fim▁hole|>
## Check privs.
for index, user in enumerate(users):
if user["nickname"] in just_uri or user["nickname"] in just_groups:
# If we have an auth with noctua-go with allow-edit set to True
if user.get("authorizations", {}).get("noctua", {}).get("go", {}).get("allow-edit", False):
print("REPAIR?: Revoke {} noctua-go edit privileges.".format(user["nickname"]))
if args.repair:
del user["authorizations"]
users[index] = user
print("\nNo URI, or no ORCID:")
print("===================")
print("\n".join(just_uri))
print("\nNo Groups:")
print("===================")
print("\n".join(just_groups))
print("\nBoth Bad:")
print("===================")
print("\n".join(violates_both))
#print(json.dumps(users))
#print(yaml.dump(users, default_flow_style=False))
#yaml.dump(data, default_flow_style=False)
if args.output:
with open(args.output, 'w+') as fhandle:
fhandle.write(json.dumps(users, sort_keys=True, indent=4))
## TODO: implement hard checks above later.
if DIED_SCREAMING_P:
print('Errors happened, alert the sheriff.')
sys.exit(1)
else:
print('Non-failing run.')
## You saw it coming...
if __name__ == '__main__':
main()<|fim▁end|>
|
violates_both = set(violations["uri"]).intersection(violations["groups"])
just_uri = set(violations["uri"]).difference(violates_both)
just_groups = set(violations["groups"]).difference(violates_both)
|
<|file_name|>visualization.js<|end_file_name|><|fim▁begin|>var visualization = function() {
var vizData;
var vizApp = this;
var padding = 40; // padding between groups
var max_group_width = 600;
// TODO: this assumes fixed note width and height, potentially handle for importance of notes
var max_note_width = 240;
var max_note_height = 110;
var container_width;
var max_groups_in_row;
var arrayOfNotes = [];
var folderNameH6, vizContainer, colorButtons, layoutButtons, saveLayoutButton, notes;
var init = function() {
folderNameH6 = $('.js-group-name');
vizContainer = $('.js-vizContainer');
colorButtons = $('.coloring li');
layoutButtons = $('.positioning .layout');
saveLayoutButton = $('li[data-action="save_custom"]').hide()
}
var setData = function(data) {
vizData = data;
startViz();
}
var startViz = function() {
console.log(vizData);
vizContainer.fadeIn();
container_width = vizContainer.width();
max_groups_in_row = parseInt(container_width / max_group_width) + 1;
vizData.folderSpecificAnnotations.map(createNote);
setGroupPositions(arrayOfNotes, 'category', true);
saveCustomLayout();
notesEls = vizContainer.find('.ui-draggable');
notesEls.hover(function(){
var maxZ = 0;
notesEls.each(function() {
var index_current = parseInt($(this).css("zIndex"), 10);
if(index_current > maxZ) {
maxZ = index_current;
}
});
$(this).css('zIndex', maxZ+1);
})
colorButtons.on('click', function() {
colorButtons.removeClass('active');
$(this).addClass('active');
var cat = $(this).attr('data-color')
console.log(cat);
colorNotes(cat);
});
layoutButtons.on('click', function() {
layoutButtons.removeClass('active');
$(this).addClass('active');
var cat = $(this).attr('data-layout')
console.log(cat);
rearrangeNotes(cat);
});
saveLayoutButton.on('click', saveCustomLayout);
}
var setGroupPositions = function(notes, type, start) {
// create a map representing a group:
// category:{'notes': [notes], 'height': height of a group based on num notes in group, 'posy': y position}
var groups = {};
var category;
notes.map(function(note) {
if (type == 'category') { // group by topic
category = note.getCategory();
} else { // group by paper
category = note.getPaperId();
}
if (category in groups) {
groups[category]['notes'].push(note);
} else {
groups[category] = {'notes':[note], 'height': 0, 'posy': 0};
}
});
// create grid-positioning for groups
// determine the height of each by the number of notes in the group
// height will be used to offset groups underneath other groups
// width is limited by max_group_width, which currently only fits 2 notes (2 notes in a row within a group)
for (var category in groups) {
var group_notes = groups[category]['notes'];
groups[category]['height'] = Math.ceil(group_notes.length/2) * (max_note_height + padding);
}
// set height for groups in rows in the viz beyond first row
var group_keys = Object.keys(groups);
for (var i = max_groups_in_row; i < group_keys.length; i++) {
var key = group_keys[i];
// get key of the group directly above this group in the grid
var group_above_key = group_keys[i-max_groups_in_row];
groups[key]['posy'] += groups[group_above_key]['height'] + groups[group_above_key]['posy'];
}
console.log(groups);
// set note positions
left_order = 0; // order of groups in a row
for (var category in groups) {
console.log(category);
var group_notes = groups[category]['notes'];
for (var i = 0; i < group_notes.length; i++) {
var note = group_notes[i];
// get left position
var left;
if (i % 2 == 0) {
left = left_order * max_group_width;
} else {
left = left_order * max_group_width + max_note_width;
}
// get top position
var top;
if (i % 2 == 0) {
top = groups[category]['posy'] + ((i/2)*max_note_height)
} else {
top = groups[category]['posy'] + (parseInt(i/2)*max_note_height)
}
note.position([top,left], start);
}
left_order++;
if (left_order >= max_groups_in_row) {
left_order = 0;
}
}
}
var createNote = function(noteObj) {
var newNote = new Note(noteObj, vizApp, arrayOfNotes.length-1);
newNote.setBackground(
colorArray[Math.floor(Math.random()*colorArray.length)]);
arrayOfNotes.push(newNote);<|fim▁hole|> if(arrangement == "custom") {
arrayOfNotes.map(function(note) {
var pos = note.customPosition();
note.position(pos);
});
saveLayoutButton.fadeOut();
} else {
setGroupPositions(arrayOfNotes, arrangement);
}
}
var colorNotes = function(criteria) {
if(criteria != "") {
var arrayOfCriteria = generateArrayOfAttributes(criteria, arrayOfNotes);
arrayOfNotes.map(function(note) {
var attr = note.getNoteAttr(criteria);
note.setBackground(colorArray[arrayOfCriteria.indexOf(attr)])
});
} else {
arrayOfNotes.map(function(note) {
note.setBackground('#eee')
});
}
}
var saveCustomLayout = function() {
arrayOfNotes.map(function(note) {
var pos = note.position();
console.log("custom layout");
console.log(pos);
note.customPosition(pos);
});
saveLayoutButton.fadeOut();
}
this.showSaveLayoutButton = function() {
console.log('save button')
saveLayoutButton.fadeIn();
}
return {
setData : setData,
init : init
}
}
function generateArrayOfAttributes(criteria, arrayOfNotes) {
var rawArray = arrayOfNotes.map(function(note) {
return note.getNoteAttr(criteria);
});
return rawArray.getUnique();
}
//http://stackoverflow.com/questions/1960473/unique-values-in-an-array
Array.prototype.getUnique = function(){
var u = {}, a = [];
for(var i = 0, l = this.length; i < l; ++i){
if(u.hasOwnProperty(this[i])) {
continue;
}
a.push(this[i]);
u[this[i]] = 1;
}
return a;
}
var colorArray =
[
"#E1BEE7",
"#D1C4E9",
"#C5CAE9",
"#BBDEFB",
"#B2EBF2",
"#DCEDC8",
"#FFECB3",
"#D7CCC8",
"#CFD8DC",
"#FFCDD2",
"#F8BBD0"
]<|fim▁end|>
|
}
var rearrangeNotes = function(arrangement) {
|
<|file_name|>menu.rs<|end_file_name|><|fim▁begin|>// Copyright 2016, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use ffi;
use glib_ffi;
use glib::object::Cast;
use glib::translate::*;
use Menu;
use IsA;
use Widget;
use std::boxed::Box as Box_;
use libc::c_int;
use std::ptr;
pub trait GtkMenuExtManual: 'static {
fn popup<T: IsA<Widget>, U: IsA<Widget>,
F: Fn(&Self, &mut i32, &mut i32) -> bool + 'static>(
&self, parent_menu_shell: Option<&T>, parent_menu_item: Option<&U>, f: F,
button: u32, activate_time: u32);
fn popup_easy(&self, button: u32, activate_time: u32);
}
impl<O: IsA<Menu>> GtkMenuExtManual for O {
fn popup<T: IsA<Widget>, U: IsA<Widget>,
F: FnOnce(&Self, &mut i32, &mut i32) -> bool + 'static>(
&self, parent_menu_shell: Option<&T>, parent_menu_item: Option<&U>, f: F,
button: u32, activate_time: u32) {
unsafe {
let f: Box_<Option<F>> = Box_::new(Some(f));
ffi::gtk_menu_popup(self.as_ref().to_glib_none().0, parent_menu_shell.map(|p| p.as_ref()).to_glib_none().0,
parent_menu_item.map(|p| p.as_ref()).to_glib_none().0,
Some(position_callback::<Self, F>),
Box_::into_raw(f) as *mut _, button, activate_time)
}<|fim▁hole|>
fn popup_easy(&self, button: u32, activate_time: u32) {
unsafe {
ffi::gtk_menu_popup(self.as_ref().to_glib_none().0, ptr::null_mut(),
ptr::null_mut(), None, ptr::null_mut(),
button, activate_time)
}
}
}
unsafe extern "C" fn position_callback<T, F: FnOnce(&T, &mut i32, &mut i32) -> bool + 'static>(this: *mut ffi::GtkMenu,
x: *mut c_int,
y: *mut c_int,
push_in: *mut glib_ffi::gboolean,
f: glib_ffi::gpointer)
where T: IsA<Menu> {
let mut f: Box<Option<F>> = Box::from_raw(f as *mut _);
let f = f.take().expect("No callback");
*push_in = f(&Menu::from_glib_none(this).unsafe_cast(), x.as_mut().unwrap(),
y.as_mut().unwrap()).to_glib();
}<|fim▁end|>
|
}
|
<|file_name|>OpenSsl.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.buffer.ByteBuf;
import io.netty.util.internal.NativeLibraryLoader;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import org.apache.tomcat.jni.Buffer;
import org.apache.tomcat.jni.Library;
import org.apache.tomcat.jni.Pool;
import org.apache.tomcat.jni.SSL;
import org.apache.tomcat.jni.SSLContext;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Set;
/**
* Tells if <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public final class OpenSsl {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(OpenSsl.class);
private static final String LINUX = "linux";
private static final String UNKNOWN = "unknown";
private static final Throwable UNAVAILABILITY_CAUSE;
private static final Set<String> AVAILABLE_CIPHER_SUITES;
static {
Throwable cause = null;
// Test if netty-tcnative is in the classpath first.
try {
Class.forName("org.apache.tomcat.jni.SSL", false, OpenSsl.class.getClassLoader());
} catch (ClassNotFoundException t) {
cause = t;
logger.debug(
"netty-tcnative not in the classpath; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable.");
}
// If in the classpath, try to load the native library and initialize netty-tcnative.
if (cause == null) {
try {
// The JNI library was not already loaded. Load it now.
loadTcNative();
} catch (Throwable t) {
cause = t;
logger.debug(
"Failed to load netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable, unless the " +
"application has already loaded the symbols by some other means. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
try {
initializeTcNative();
// The library was initialized successfully. If loading the library failed above,
// reset the cause now since it appears that the library was loaded by some other
// means.
cause = null;
} catch (Throwable t) {
if (cause == null) {
cause = t;
}
logger.debug(
"Failed to initialize netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
}
UNAVAILABILITY_CAUSE = cause;
if (cause == null) {
final Set<String> availableCipherSuites = new LinkedHashSet<String>(128);
final long aprPool = Pool.create(0);
try {
final long sslCtx = SSLContext.make(aprPool, SSL.SSL_PROTOCOL_ALL, SSL.SSL_MODE_SERVER);
try {
SSLContext.setOptions(sslCtx, SSL.SSL_OP_ALL);
SSLContext.setCipherSuite(sslCtx, "ALL");
final long ssl = SSL.newSSL(sslCtx, true);
try {
for (String c: SSL.getCiphers(ssl)) {
// Filter out bad input.
if (c == null || c.length() == 0 || availableCipherSuites.contains(c)) {
continue;
}
availableCipherSuites.add(c);
}
} finally {
SSL.freeSSL(ssl);
}
} finally {
SSLContext.free(sslCtx);
}
} catch (Exception e) {
logger.warn("Failed to get the list of available OpenSSL cipher suites.", e);
} finally {
Pool.destroy(aprPool);
}
AVAILABLE_CIPHER_SUITES = Collections.unmodifiableSet(availableCipherSuites);
} else {
AVAILABLE_CIPHER_SUITES = Collections.emptySet();
}
}
/**
* Returns {@code true} if and only if
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public static boolean isAvailable() {
return UNAVAILABILITY_CAUSE == null;
}
/**
* Returns {@code true} if the used version of openssl supports
* <a href="https://tools.ietf.org/html/rfc7301">ALPN</a>.
*/
public static boolean isAlpnSupported() {
return version() >= 0x10002000L;
}
/**
* Returns the version of the used available OpenSSL library or {@code -1} if {@link #isAvailable()}
* returns {@code false}.
*/
public static int version() {
if (isAvailable()) {
return SSL.version();
}
return -1;
}
/**
* Returns the version string of the used available OpenSSL library or {@code null} if {@link #isAvailable()}
* returns {@code false}.
*/
public static String versionString() {
if (isAvailable()) {
return SSL.versionString();
}
return null;
}
/**
* Ensure that <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and
* its OpenSSL support are available.
*
* @throws UnsatisfiedLinkError if unavailable
*/
public static void ensureAvailability() {
if (UNAVAILABILITY_CAUSE != null) {
throw (Error) new UnsatisfiedLinkError(
"failed to load the required native library").initCause(UNAVAILABILITY_CAUSE);
}
}
/**
* Returns the cause of unavailability of<|fim▁hole|> public static Throwable unavailabilityCause() {
return UNAVAILABILITY_CAUSE;
}
/**
* Returns all the available OpenSSL cipher suites.
* Please note that the returned array may include the cipher suites that are insecure or non-functional.
*/
public static Set<String> availableCipherSuites() {
return AVAILABLE_CIPHER_SUITES;
}
/**
* Returns {@code true} if and only if the specified cipher suite is available in OpenSSL.
* Both Java-style cipher suite and OpenSSL-style cipher suite are accepted.
*/
public static boolean isCipherSuiteAvailable(String cipherSuite) {
String converted = CipherSuiteConverter.toOpenSsl(cipherSuite);
if (converted != null) {
cipherSuite = converted;
}
return AVAILABLE_CIPHER_SUITES.contains(cipherSuite);
}
static boolean isError(long errorCode) {
return errorCode != SSL.SSL_ERROR_NONE;
}
static long memoryAddress(ByteBuf buf) {
assert buf.isDirect();
return buf.hasMemoryAddress() ? buf.memoryAddress() : Buffer.address(buf.nioBuffer());
}
private OpenSsl() { }
private static void loadTcNative() throws Exception {
String os = normalizeOs(SystemPropertyUtil.get("os.name", ""));
String arch = normalizeArch(SystemPropertyUtil.get("os.arch", ""));
Set<String> libNames = new LinkedHashSet<String>(3);
// First, try loading the platform-specific library. Platform-specific
// libraries will be available if using a tcnative uber jar.
libNames.add("netty-tcnative-" + os + '-' + arch);
if (LINUX.equalsIgnoreCase(os)) {
// Fedora SSL lib so naming (libssl.so.10 vs libssl.so.1.0.0)..
libNames.add("netty-tcnative-" + os + '-' + arch + "-fedora");
}
// finally the default library.
libNames.add("netty-tcnative");
NativeLibraryLoader.loadFirstAvailable(SSL.class.getClassLoader(),
libNames.toArray(new String[libNames.size()]));
}
private static void initializeTcNative() throws Exception {
Library.initialize("provided");
SSL.initialize(null);
}
private static String normalizeOs(String value) {
value = normalize(value);
if (value.startsWith("aix")) {
return "aix";
}
if (value.startsWith("hpux")) {
return "hpux";
}
if (value.startsWith("os400")) {
// Avoid the names such as os4000
if (value.length() <= 5 || !Character.isDigit(value.charAt(5))) {
return "os400";
}
}
if (value.startsWith(LINUX)) {
return LINUX;
}
if (value.startsWith("macosx") || value.startsWith("osx")) {
return "osx";
}
if (value.startsWith("freebsd")) {
return "freebsd";
}
if (value.startsWith("openbsd")) {
return "openbsd";
}
if (value.startsWith("netbsd")) {
return "netbsd";
}
if (value.startsWith("solaris") || value.startsWith("sunos")) {
return "sunos";
}
if (value.startsWith("windows")) {
return "windows";
}
return UNKNOWN;
}
private static String normalizeArch(String value) {
value = normalize(value);
if (value.matches("^(x8664|amd64|ia32e|em64t|x64)$")) {
return "x86_64";
}
if (value.matches("^(x8632|x86|i[3-6]86|ia32|x32)$")) {
return "x86_32";
}
if (value.matches("^(ia64|itanium64)$")) {
return "itanium_64";
}
if (value.matches("^(sparc|sparc32)$")) {
return "sparc_32";
}
if (value.matches("^(sparcv9|sparc64)$")) {
return "sparc_64";
}
if (value.matches("^(arm|arm32)$")) {
return "arm_32";
}
if ("aarch64".equals(value)) {
return "aarch_64";
}
if (value.matches("^(ppc|ppc32)$")) {
return "ppc_32";
}
if ("ppc64".equals(value)) {
return "ppc_64";
}
if ("ppc64le".equals(value)) {
return "ppcle_64";
}
if ("s390".equals(value)) {
return "s390_32";
}
if ("s390x".equals(value)) {
return "s390_64";
}
return UNKNOWN;
}
private static String normalize(String value) {
return value.toLowerCase(Locale.US).replaceAll("[^a-z0-9]+", "");
}
}<|fim▁end|>
|
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support.
*
* @return the cause if unavailable. {@code null} if available.
*/
|
<|file_name|>pagination.constants.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
angular
.module('siteApp')
.constant('paginationConstants', {
'itemsPerPage': 20
});<|fim▁hole|><|fim▁end|>
|
})();
|
<|file_name|>ActivePowerLimit.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.OperationalLimits.OperationalLimit import OperationalLimit
class ActivePowerLimit(OperationalLimit):
"""Limit on active power flow.
"""
def __init__(self, value=0.0, *args, **kw_args):
"""Initialises a new 'ActivePowerLimit' instance.<|fim▁hole|> #: Value of active power limit.
self.value = value
super(ActivePowerLimit, self).__init__(*args, **kw_args)
_attrs = ["value"]
_attr_types = {"value": float}
_defaults = {"value": 0.0}
_enums = {}
_refs = []
_many_refs = []<|fim▁end|>
|
@param value: Value of active power limit.
"""
|
<|file_name|>test1.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
window.test = 1;
|
<|file_name|>many_many_relation.py<|end_file_name|><|fim▁begin|>from sqlalchemy import create_engine, Column, Integer, String, ForeignKey, Table
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# demo many to many relationship
# http://docs.sqlalchemy.org/en/rel_0_9/orm/basic_relationships.html#many-to-many
engine = create_engine('sqlite:///manymany.db')
Base = declarative_base()
# Association table linking the two tables
# Also see: http://docs.sqlalchemy.org/en/rel_0_9/orm/basic_relationships.html#association-object
member_club_mapping = Table('member_club_mapping', Base.metadata,
Column('member_id', Integer, ForeignKey('member.id')),
Column('club_id', Integer, ForeignKey('club.id')))
class Member(Base):
__tablename__ = 'member'
id = Column(Integer, primary_key=True)
first_name = Column(String)
last_name = Column(String)
clubs = relationship('Club', back_populates='members',
secondary=member_club_mapping)
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
class Club(Base):
__tablename__ = 'club'
id = Column(Integer, primary_key=True)
name = Column(String)
members = relationship('Member', back_populates='clubs',
secondary=member_club_mapping)
def __init__(self, name):
self.name = name
# create tables
Base.metadata.create_all(engine)
# create a Session
Session = sessionmaker(bind=engine)
session = Session()
# Populate
member1 = Member('John', 'Doe')
club1 = Club('Club dub')
club1.members.append(member1)
session.add(club1)
club2 = Club('Club dub dub')
club2.members.append(member1)
session.add(club2)
club3 = Club('Club dub step')
session.add(club3)
member2 = Member('Jane', 'Allen')
member2.clubs.extend([club1, club2])
session.add(member2)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print Club<|fim▁hole|>
print 'After removing members with first name: Jane'
# Remove a record
record = session.query(Member).filter(Member.first_name == 'Jane').all()
for r in record:
session.delete(r)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]
print 'After removing the club, Club dub'
# Remove a record
record = session.query(Club).filter(Club.name == 'Club dub').all()
for r in record:
session.delete(r)
session.commit()
# query and print Member
res = session.query(Member).all()
for member in res:
print member.first_name, member.last_name , [club.name for club in member.clubs]
# query and print
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]<|fim▁end|>
|
res = session.query(Club).all()
for club in res:
print club.name, [(member.first_name, member.last_name) for member in club.members]
|
<|file_name|>StatusCreated.java<|end_file_name|><|fim▁begin|>package co.edu.uniandes.csw.marketplace.providers;
<|fim▁hole|>@NameBinding
@Retention(RetentionPolicy.RUNTIME)
public @interface StatusCreated {}<|fim▁end|>
|
import javax.ws.rs.NameBinding;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
|
<|file_name|>resource_aws_directory_service_directory_test.go<|end_file_name|><|fim▁begin|>package aws
import (
"fmt"
"testing"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/directoryservice"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAWSDirectoryServiceDirectory_basic(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDirectoryServiceDirectoryDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccDirectoryServiceDirectoryConfig,
Check: resource.ComposeTestCheckFunc(
testAccCheckServiceDirectoryExists("aws_directory_service_directory.bar"),
),
},
},
})
}
func TestAccAWSDirectoryServiceDirectory_withAliasAndSso(t *testing.T) {
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDirectoryServiceDirectoryDestroy,
Steps: []resource.TestStep{
resource.TestStep{
Config: testAccDirectoryServiceDirectoryConfig_withAlias,
Check: resource.ComposeTestCheckFunc(
testAccCheckServiceDirectoryExists("aws_directory_service_directory.bar_a"),
testAccCheckServiceDirectoryAlias("aws_directory_service_directory.bar_a",
fmt.Sprintf("tf-d-%d", randomInteger)),
testAccCheckServiceDirectorySso("aws_directory_service_directory.bar_a", false),
),<|fim▁hole|> Check: resource.ComposeTestCheckFunc(
testAccCheckServiceDirectoryExists("aws_directory_service_directory.bar_a"),
testAccCheckServiceDirectoryAlias("aws_directory_service_directory.bar_a",
fmt.Sprintf("tf-d-%d", randomInteger)),
testAccCheckServiceDirectorySso("aws_directory_service_directory.bar_a", true),
),
},
resource.TestStep{
Config: testAccDirectoryServiceDirectoryConfig_withSso_modified,
Check: resource.ComposeTestCheckFunc(
testAccCheckServiceDirectoryExists("aws_directory_service_directory.bar_a"),
testAccCheckServiceDirectoryAlias("aws_directory_service_directory.bar_a",
fmt.Sprintf("tf-d-%d", randomInteger)),
testAccCheckServiceDirectorySso("aws_directory_service_directory.bar_a", false),
),
},
},
})
}
func testAccCheckDirectoryServiceDirectoryDestroy(s *terraform.State) error {
if len(s.RootModule().Resources) > 0 {
return fmt.Errorf("Expected all resources to be gone, but found: %#v",
s.RootModule().Resources)
}
return nil
}
func testAccCheckServiceDirectoryExists(name string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[name]
if !ok {
return fmt.Errorf("Not found: %s", name)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
dsconn := testAccProvider.Meta().(*AWSClient).dsconn
out, err := dsconn.DescribeDirectories(&directoryservice.DescribeDirectoriesInput{
DirectoryIds: []*string{aws.String(rs.Primary.ID)},
})
if err != nil {
return err
}
if len(out.DirectoryDescriptions) < 1 {
return fmt.Errorf("No DS directory found")
}
if *out.DirectoryDescriptions[0].DirectoryId != rs.Primary.ID {
return fmt.Errorf("DS directory ID mismatch - existing: %q, state: %q",
*out.DirectoryDescriptions[0].DirectoryId, rs.Primary.ID)
}
return nil
}
}
func testAccCheckServiceDirectoryAlias(name, alias string) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[name]
if !ok {
return fmt.Errorf("Not found: %s", name)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
dsconn := testAccProvider.Meta().(*AWSClient).dsconn
out, err := dsconn.DescribeDirectories(&directoryservice.DescribeDirectoriesInput{
DirectoryIds: []*string{aws.String(rs.Primary.ID)},
})
if err != nil {
return err
}
if *out.DirectoryDescriptions[0].Alias != alias {
return fmt.Errorf("DS directory Alias mismatch - actual: %q, expected: %q",
*out.DirectoryDescriptions[0].Alias, alias)
}
return nil
}
}
func testAccCheckServiceDirectorySso(name string, ssoEnabled bool) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[name]
if !ok {
return fmt.Errorf("Not found: %s", name)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No ID is set")
}
dsconn := testAccProvider.Meta().(*AWSClient).dsconn
out, err := dsconn.DescribeDirectories(&directoryservice.DescribeDirectoriesInput{
DirectoryIds: []*string{aws.String(rs.Primary.ID)},
})
if err != nil {
return err
}
if *out.DirectoryDescriptions[0].SsoEnabled != ssoEnabled {
return fmt.Errorf("DS directory SSO mismatch - actual: %t, expected: %t",
*out.DirectoryDescriptions[0].SsoEnabled, ssoEnabled)
}
return nil
}
}
const testAccDirectoryServiceDirectoryConfig = `
resource "aws_directory_service_directory" "bar" {
name = "corp.notexample.com"
password = "SuperSecretPassw0rd"
size = "Small"
vpc_settings {
vpc_id = "${aws_vpc.main.id}"
subnet_ids = ["${aws_subnet.foo.id}", "${aws_subnet.bar.id}"]
}
}
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
}
resource "aws_subnet" "foo" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2a"
cidr_block = "10.0.1.0/24"
}
resource "aws_subnet" "bar" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2b"
cidr_block = "10.0.2.0/24"
}
`
var randomInteger = genRandInt()
var testAccDirectoryServiceDirectoryConfig_withAlias = fmt.Sprintf(`
resource "aws_directory_service_directory" "bar_a" {
name = "corp.notexample.com"
password = "SuperSecretPassw0rd"
size = "Small"
alias = "tf-d-%d"
vpc_settings {
vpc_id = "${aws_vpc.main.id}"
subnet_ids = ["${aws_subnet.foo.id}", "${aws_subnet.bar.id}"]
}
}
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
}
resource "aws_subnet" "foo" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2a"
cidr_block = "10.0.1.0/24"
}
resource "aws_subnet" "bar" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2b"
cidr_block = "10.0.2.0/24"
}
`, randomInteger)
var testAccDirectoryServiceDirectoryConfig_withSso = fmt.Sprintf(`
resource "aws_directory_service_directory" "bar_a" {
name = "corp.notexample.com"
password = "SuperSecretPassw0rd"
size = "Small"
alias = "tf-d-%d"
enable_sso = true
vpc_settings {
vpc_id = "${aws_vpc.main.id}"
subnet_ids = ["${aws_subnet.foo.id}", "${aws_subnet.bar.id}"]
}
}
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
}
resource "aws_subnet" "foo" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2a"
cidr_block = "10.0.1.0/24"
}
resource "aws_subnet" "bar" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2b"
cidr_block = "10.0.2.0/24"
}
`, randomInteger)
var testAccDirectoryServiceDirectoryConfig_withSso_modified = fmt.Sprintf(`
resource "aws_directory_service_directory" "bar_a" {
name = "corp.notexample.com"
password = "SuperSecretPassw0rd"
size = "Small"
alias = "tf-d-%d"
enable_sso = false
vpc_settings {
vpc_id = "${aws_vpc.main.id}"
subnet_ids = ["${aws_subnet.foo.id}", "${aws_subnet.bar.id}"]
}
}
resource "aws_vpc" "main" {
cidr_block = "10.0.0.0/16"
}
resource "aws_subnet" "foo" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2a"
cidr_block = "10.0.1.0/24"
}
resource "aws_subnet" "bar" {
vpc_id = "${aws_vpc.main.id}"
availability_zone = "us-west-2b"
cidr_block = "10.0.2.0/24"
}
`, randomInteger)<|fim▁end|>
|
},
resource.TestStep{
Config: testAccDirectoryServiceDirectoryConfig_withSso,
|
<|file_name|>non-exhaustive-pattern-witness.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(advanced_slice_patterns)]
struct Foo {
first: bool,
second: Option<[uint; 4]>
}
enum Color {
Red,
Green,
CustomRGBA { a: bool, r: u8, g: u8, b: u8 }
}
fn struct_with_a_nested_enum_and_vector() {
match (Foo { first: true, second: None }) {
//~^ ERROR non-exhaustive patterns: `Foo { first: false, second: Some([_, _, _, _]) }` not covered
Foo { first: true, second: None } => (),
Foo { first: true, second: Some(_) } => (),
Foo { first: false, second: None } => (),
Foo { first: false, second: Some([1u, 2u, 3u, 4u]) } => ()
}
}
fn enum_with_multiple_missing_variants() {
match Color::Red {
//~^ ERROR non-exhaustive patterns: `Red` not covered
Color::CustomRGBA { .. } => ()
}
}
fn enum_struct_variant() {
match Color::Red {
//~^ ERROR non-exhaustive patterns: `CustomRGBA { a: true, .. }` not covered
Color::Red => (),
Color::Green => (),
Color::CustomRGBA { a: false, r: _, g: _, b: 0 } => (),
Color::CustomRGBA { a: false, r: _, g: _, b: _ } => ()
}
}
enum Enum {
First,
Second(bool)
}
fn vectors_with_nested_enums() {
let x: &'static [Enum] = &[Enum::First, Enum::Second(false)];
match x {
//~^ ERROR non-exhaustive patterns: `[Second(true), Second(false)]` not covered
[] => (),
[_] => (),
[Enum::First, _] => (),
[Enum::Second(true), Enum::First] => (),
[Enum::Second(true), Enum::Second(true)] => (),
[Enum::Second(false), _] => (),
[_, _, tail.., _] => ()
}
}
fn missing_nil() {
match ((), false) {
//~^ ERROR non-exhaustive patterns: `((), false)` not covered
((), true) => ()
}
}
fn main() {}<|fim▁end|>
|
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
|
<|file_name|>vendor-list.js<|end_file_name|><|fim▁begin|>/*global angular*/
(function () {
angular
.module('simplAdmin.vendors')<|fim▁hole|>
function VendorListCtrl(vendorService, translateService) {
var vm = this;
vm.tableStateRef = {};
vm.vendors = [];
vm.translate = translateService;
vm.getVendors = function getVendors(tableState) {
vm.tableStateRef = tableState;
vm.isLoading = true;
vendorService.getVendors(tableState).then(function (result) {
vm.vendors = result.data.items;
tableState.pagination.numberOfPages = result.data.numberOfPages;
tableState.pagination.totalItemCount = result.data.totalRecord;
vm.isLoading = false;
});
};
vm.deleteVendor = function deleteVendor(vendor) {
bootbox.confirm('Are you sure you want to delete this vendor: ' + simplUtil.escapeHtml(vendor.name), function (result) {
if (result) {
vendorService.deleteVendor(vendor)
.then(function (result) {
vm.getVendors(vm.tableStateRef);
toastr.success(vendor.name + ' has been deleted');
})
.catch(function (response) {
toastr.error(response.data.error);
});
}
});
};
}
})();<|fim▁end|>
|
.controller('VendorListCtrl', ['vendorService', 'translateService', VendorListCtrl]);
|
<|file_name|>OnPreparedListener.java<|end_file_name|><|fim▁begin|>package com.arges.sepan.argmusicplayer.Callbacks;<|fim▁hole|>import com.arges.sepan.argmusicplayer.Models.ArgAudio;
//Interfaces
public interface OnPreparedListener {
void onPrepared(ArgAudio audio, int duration);
}<|fim▁end|>
| |
<|file_name|>day10.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools;
fn day10(input: &str) -> (usize, usize) {<|fim▁hole|> let mut part1 = 0;
let mut part2 = 0;
loop {
// worst case size is double the digits
let mut buf = String::with_capacity(next.len() * 2);
for (number, list) in next.chars().group_by(|&x| x) {
buf.push_str(&list.len().to_string());
buf.push(number);
}
next = buf;
if i == 40 {
part1 = next.len();
}
if i == 50 {
part2 = next.len();
}
i += 1;
if i == 51 {
break;
}
}
return (part1, part2);
}
pub fn main() {
let (part1, part2) = day10("3113322113");
println!("Part 1: {}\nPart 2: {}", part1, part2);
}
#[test]
fn test() {
let (part1, part2) = day10("3113322113");
assert_eq!(part1, 329356);
assert_eq!(part2, 4666278);
}<|fim▁end|>
|
let mut i = 1;
let mut next = String::from(input);
|
<|file_name|>cmath.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![cfg(not(test))]
use libc::{c_float, c_double};
#[link_name = "m"]
extern {
pub fn acos(n: c_double) -> c_double;
pub fn asin(n: c_double) -> c_double;
pub fn atan(n: c_double) -> c_double;
pub fn atan2(a: c_double, b: c_double) -> c_double;
pub fn cbrt(n: c_double) -> c_double;
pub fn cbrtf(n: c_float) -> c_float;
pub fn cosh(n: c_double) -> c_double;
pub fn expm1(n: c_double) -> c_double;
pub fn expm1f(n: c_float) -> c_float;
pub fn fdim(a: c_double, b: c_double) -> c_double;
pub fn fdimf(a: c_float, b: c_float) -> c_float;
#[cfg_attr(target_env = "msvc", link_name = "_hypot")]
pub fn hypot(x: c_double, y: c_double) -> c_double;
#[cfg_attr(target_env = "msvc", link_name = "_hypotf")]
pub fn hypotf(x: c_float, y: c_float) -> c_float;
pub fn log1p(n: c_double) -> c_double;
pub fn log1pf(n: c_float) -> c_float;
pub fn sinh(n: c_double) -> c_double;
pub fn tan(n: c_double) -> c_double;
pub fn tanh(n: c_double) -> c_double;
}
pub use self::shims::*;
#[cfg(not(target_env = "msvc"))]
mod shims {<|fim▁hole|> pub fn acosf(n: c_float) -> c_float;
pub fn asinf(n: c_float) -> c_float;
pub fn atan2f(a: c_float, b: c_float) -> c_float;
pub fn atanf(n: c_float) -> c_float;
pub fn coshf(n: c_float) -> c_float;
pub fn sinhf(n: c_float) -> c_float;
pub fn tanf(n: c_float) -> c_float;
pub fn tanhf(n: c_float) -> c_float;
}
}
// On MSVC these functions aren't defined, so we just define shims which promote
// everything fo f64, perform the calculation, and then demote back to f32.
// While not precisely correct should be "correct enough" for now.
#[cfg(target_env = "msvc")]
mod shims {
use libc::c_float;
#[inline]
pub unsafe fn acosf(n: c_float) -> c_float {
f64::acos(n as f64) as c_float
}
#[inline]
pub unsafe fn asinf(n: c_float) -> c_float {
f64::asin(n as f64) as c_float
}
#[inline]
pub unsafe fn atan2f(n: c_float, b: c_float) -> c_float {
f64::atan2(n as f64, b as f64) as c_float
}
#[inline]
pub unsafe fn atanf(n: c_float) -> c_float {
f64::atan(n as f64) as c_float
}
#[inline]
pub unsafe fn coshf(n: c_float) -> c_float {
f64::cosh(n as f64) as c_float
}
#[inline]
pub unsafe fn sinhf(n: c_float) -> c_float {
f64::sinh(n as f64) as c_float
}
#[inline]
pub unsafe fn tanf(n: c_float) -> c_float {
f64::tan(n as f64) as c_float
}
#[inline]
pub unsafe fn tanhf(n: c_float) -> c_float {
f64::tanh(n as f64) as c_float
}
}<|fim▁end|>
|
use libc::c_float;
extern {
|
<|file_name|>filter_library.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from future import standard_library
standard_library.install_aliases()
from builtins import object
import pandas as pd
import os
import yaml
import astropy.io.votable as votable
import astropy.units as u
import urllib.request, urllib.error, urllib.parse
import xml.etree.ElementTree as ET
import re
from collections import defaultdict
import numpy as np
import warnings
import speclite.filters as spec_filter
from threeML.io.configuration import get_user_data_path
from threeML.io.file_utils import (
if_directory_not_existing_then_make,
file_existing_and_readable,
)
from threeML.io.network import internet_connection_is_active
from threeML.io.package_data import get_path_of_data_dir
def get_speclite_filter_path():
return os.path.join(get_path_of_data_dir(), "optical_filters")
def to_valid_python_name(name):
new_name = name.replace("-", "_")
try:
int(new_name[0])
new_name = "f_%s" % new_name
return new_name
except (ValueError):
return new_name
class ObservatoryNode(object):
def __init__(self, sub_dict):
self._sub_dict = sub_dict
def __repr__(self):
return yaml.dump(self._sub_dict, default_flow_style=False)
class FilterLibrary(object):
def __init__(self, library_file):
"""
holds all the observatories/instruments/filters
:param library_file:
"""
# get the filter file
with open(library_file) as f:
self._library = yaml.load(f, Loader=yaml.SafeLoader)
self._instruments = []
# create attributes which are lib.observatory.instrument
# and the instrument attributes are speclite FilterResponse objects
with warnings.catch_warnings():
warnings.simplefilter("ignore")
print("Loading optical filters")
for observatory, value in self._library.items():
# create a node for the observatory
this_node = ObservatoryNode(value)
# attach it to the object
setattr(self, observatory, this_node)
# now get the instruments
for instrument, value2 in value.items():
# update the instruments
self._instruments.append(instrument)
# create the filter response via speclite
filter_path = os.path.join(
get_speclite_filter_path(), observatory, instrument
)
filters_to_load = [
"%s-%s.ecsv" % (filter_path, filter) for filter in value2
]
this_filter = spec_filter.load_filters(*filters_to_load)
# attach the filters to the observatory
setattr(this_node, instrument, this_filter)
self._instruments.sort()
@property
def instruments(self):
return self._instruments
def __repr__(self):
return yaml.dump(self._library, default_flow_style=False)
def add_svo_filter_to_speclite(observatory, instrument, ffilter, update=False):
"""
download an SVO filter file and then add it to the user library
:param observatory:
:param instrument:
:param ffilter:
:return:
"""
# make a directory for this observatory and instrument
filter_path = os.path.join(
get_speclite_filter_path(), to_valid_python_name(observatory)
)
if_directory_not_existing_then_make(filter_path)
# grab the filter file from SVO
# reconvert 2MASS so we can grab it
if observatory == "TwoMASS":
observatory = "2MASS"
if (
not file_existing_and_readable(
os.path.join(
filter_path,
"%s-%s.ecsv"
% (to_valid_python_name(instrument), to_valid_python_name(ffilter)),
)
)
or update
):
url_response = urllib.request.urlopen(
"http://svo2.cab.inta-csic.es/svo/theory/fps/fps.php?PhotCalID=%s/%s.%s/AB"
% (observatory, instrument, ffilter)
)
# now parse it
data = votable.parse_single_table(url_response).to_table()
# save the waveunit
waveunit = data["Wavelength"].unit
# the filter files are masked arrays, which do not go to zero on
# the boundaries. This confuses speclite and will throw an error.
# so we add a zero on the boundaries
if data["Transmission"][0] != 0.0:
w1 = data["Wavelength"][0] * 0.9
data.insert_row(0, [w1, 0])
if data["Transmission"][-1] != 0.0:
w2 = data["Wavelength"][-1] * 1.1
data.add_row([w2, 0])
# filter any negative values
idx = data["Transmission"] < 0
data["Transmission"][idx] = 0
# build the transmission. # we will force all the wavelengths
# to Angstroms because sometimes AA is misunderstood
try:
transmission = spec_filter.FilterResponse(
wavelength=data["Wavelength"] * waveunit.to("Angstrom") * u.Angstrom,
response=data["Transmission"],
meta=dict(
group_name=to_valid_python_name(instrument),
band_name=to_valid_python_name(ffilter),
),
)
# save the filter
transmission.save(filter_path)
success = True
except (ValueError):
success = False
print(
"%s:%s:%s has an invalid wave table, SKIPPING"
% (observatory, instrument, ffilter)
)
return success
else:
return True
def download_SVO_filters(filter_dict, update=False):
"""
download the filters sets from the SVO repository
:return:
"""
# to group the observatory / instrument / filters
search_name = re.compile("^(.*)\/(.*)\.(.*)$")
# load the SVO meta XML file
svo_url = "http://svo2.cab.inta-csic.es/svo/theory/fps/fps.php?"
url_response = urllib.request.urlopen(svo_url)
# the normal VO parser cannot read the XML table
# so we manually do it to obtain all the instrument names
tree = ET.parse(url_response)
observatories = []
for elem in tree.iter(tag="PARAM"):
if elem.attrib["name"] == "INPUT:Facility":
for child in list(elem):
if child.tag == "VALUES":
for child2 in list(child):
val = child2.attrib["value"]
if val != "":
observatories.append(val)
# now we are going to build a multi-layer dictionary
# observatory:instrument:filter
for obs in observatories:
# fix 2MASS to a valid name
if obs == "2MASS":
<|fim▁hole|> )
try:
# parse the VO table
v = votable.parse(url_response)
instrument_dict = defaultdict(list)
# get the filter names for this observatory
instruments = v.get_first_table().to_table()["filterID"].tolist()
print("Downloading %s filters" % (obs))
for x in instruments:
_, instrument, subfilter = search_name.match(x).groups()
success = add_svo_filter_to_speclite(obs, instrument, subfilter, update)
if success:
instrument_dict[to_valid_python_name(instrument)].append(
to_valid_python_name(subfilter)
)
# attach this to the big dictionary
filter_dict[to_valid_python_name(obs)] = dict(instrument_dict)
except (IndexError):
pass
return filter_dict
def download_grond(filter_dict):
save_path = os.path.join(get_speclite_filter_path(), "ESO")
if_directory_not_existing_then_make(save_path)
grond_filter_url = "http://www.mpe.mpg.de/~jcg/GROND/GROND_filtercurves.txt"
url_response = urllib.request.urlopen(grond_filter_url)
grond_table = pd.read_table(url_response)
wave = grond_table["A"].as_matrix()
bands = ["g", "r", "i", "z", "H", "J", "K"]
for band in bands:
curve = np.array(grond_table["%sBand" % band])
curve[curve < 0] = 0
curve[0] = 0
curve[-1] = 0
grond_spec = spec_filter.FilterResponse(
wavelength=wave * u.nm,
response=curve,
meta=dict(group_name="GROND", band_name=band),
)
grond_spec.save(directory_name=save_path)
filter_dict["ESO"] = {"GROND": bands}
return filter_dict
def build_filter_library():
if not file_existing_and_readable(
os.path.join(get_speclite_filter_path(), "filter_lib.yml")
):
print("Downloading optical filters. This will take a while.\n")
if internet_connection_is_active():
filter_dict = {}
filter_dict = download_SVO_filters(filter_dict)
filter_dict = download_grond(filter_dict)
# Ok, finally, we want to keep track of the SVO filters we have
# so we will save this to a YAML file for future reference
with open(
os.path.join(get_speclite_filter_path(), "filter_lib.yml"), "w"
) as f:
yaml.safe_dump(filter_dict, f, default_flow_style=False)
return True
else:
print(
"You do not have the 3ML filter library and you do not have an active internet connection."
)
print("Please connect to the internet to use the 3ML filter library.")
print("pyspeclite filter library is still available.")
return False
else:
return True
with warnings.catch_warnings():
warnings.simplefilter("ignore")
lib_exists = build_filter_library()
if lib_exists:
threeML_filter_library = FilterLibrary(
os.path.join(get_speclite_filter_path(), "filter_lib.yml")
)
__all__ = ["threeML_filter_library"]
else:
raise RuntimeError("The threeML filter library does not exist!")<|fim▁end|>
|
obs = "TwoMASS"
url_response = urllib.request.urlopen(
"http://svo2.cab.inta-csic.es/svo/theory/fps/fps.php?Facility=%s" % obs
|
<|file_name|>regress-100199.js<|end_file_name|><|fim▁begin|>/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is mozilla.org code.
*
* The Initial Developer of the Original Code is
* Netscape Communications Corporation.
* Portions created by the Initial Developer are Copyright (C) 1998
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* [email protected]
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
/*
* Date: 17 September 2001
*
* SUMMARY: Regression test for Bugzilla bug 100199
* See http://bugzilla.mozilla.org/show_bug.cgi?id=100199
*
* The empty character class [] is a valid RegExp construct: the condition
* that a given character belong to a set containing no characters. As such,
* it can never be met and is always FALSE. Similarly, [^] is a condition
* that matches any given character and is always TRUE.
*
* Neither one of these conditions should cause syntax errors in a RegExp.
*/
//-----------------------------------------------------------------------------
var gTestfile = 'regress-100199.js';
var i = 0;
var BUGNUMBER = 100199;
var summary = '[], [^] are valid RegExp conditions. Should not cause errors -';
var status = '';
var statusmessages = new Array();
var pattern = '';
var patterns = new Array();
var string = '';
var strings = new Array();
var actualmatch = '';
var actualmatches = new Array();
var expectedmatch = '';
var expectedmatches = new Array();
pattern = /[]/;
string = 'abc';
status = inSection(1);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '';
status = inSection(2);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[';
status = inSection(3);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '/';
status = inSection(4);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[';
status = inSection(5);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = ']';
status = inSection(6);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[]';
status = inSection(7);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[ ]';
status = inSection(8);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '][';
status = inSection(9);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
pattern = /a[]/;
string = 'abc';
status = inSection(10);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '';
status = inSection(11);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = 'a[';
status = inSection(12);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = 'a[]';
status = inSection(13);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[';
status = inSection(14);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = ']';
status = inSection(15);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[]';
status = inSection(16);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '[ ]';
status = inSection(17);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
string = '][';
status = inSection(18);
actualmatch = string.match(pattern);
expectedmatch = null;
addThis();
pattern = /[^]/;
string = 'abc';
status = inSection(19);
actualmatch = string.match(pattern);
expectedmatch = Array('a');
addThis();
string = '';
status = inSection(20);
actualmatch = string.match(pattern);
expectedmatch = null; //there are no characters to test against the condition
addThis();
string = '\/';
status = inSection(21);
actualmatch = string.match(pattern);
expectedmatch = Array('/');
addThis();
string = '\[';
status = inSection(22);
actualmatch = string.match(pattern);
expectedmatch = Array('[');
addThis();
string = '[';
status = inSection(23);
actualmatch = string.match(pattern);
expectedmatch = Array('[');
addThis();
string = ']';
status = inSection(24);
actualmatch = string.match(pattern);
expectedmatch = Array(']');
addThis();
string = '[]';
status = inSection(25);
actualmatch = string.match(pattern);
expectedmatch = Array('[');
addThis();<|fim▁hole|>status = inSection(26);
actualmatch = string.match(pattern);
expectedmatch = Array('[');
addThis();
string = '][';
status = inSection(27);
actualmatch = string.match(pattern);
expectedmatch = Array(']');
addThis();
pattern = /a[^]/;
string = 'abc';
status = inSection(28);
actualmatch = string.match(pattern);
expectedmatch = Array('ab');
addThis();
string = '';
status = inSection(29);
actualmatch = string.match(pattern);
expectedmatch = null; //there are no characters to test against the condition
addThis();
string = 'a[';
status = inSection(30);
actualmatch = string.match(pattern);
expectedmatch = Array('a[');
addThis();
string = 'a]';
status = inSection(31);
actualmatch = string.match(pattern);
expectedmatch = Array('a]');
addThis();
string = 'a[]';
status = inSection(32);
actualmatch = string.match(pattern);
expectedmatch = Array('a[');
addThis();
string = 'a[ ]';
status = inSection(33);
actualmatch = string.match(pattern);
expectedmatch = Array('a[');
addThis();
string = 'a][';
status = inSection(34);
actualmatch = string.match(pattern);
expectedmatch = Array('a]');
addThis();
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function addThis()
{
statusmessages[i] = status;
patterns[i] = pattern;
strings[i] = string;
actualmatches[i] = actualmatch;
expectedmatches[i] = expectedmatch;
i++;
}
function test()
{
enterFunc ('test');
printBugNumber(BUGNUMBER);
printStatus (summary);
testRegExp(statusmessages, patterns, strings, actualmatches, expectedmatches);
exitFunc ('test');
}<|fim▁end|>
|
string = '[ ]';
|
<|file_name|>wid_anim.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2011-2020 [email protected]
*
* See the LICENSE file for license.
*/
#include "my_main.h"
#include "my_thing_tile.h"
#include "my_time_util.h"
#include "my_wid.h"
void wid_animate (widp w)<|fim▁hole|> }
tpp tp = wid_get_thing_template(w);
if (!tp) {
return;
}
if (!tp_is_animated(tp)) {
return;
}
thing_tilep tile;
tile = w->current_tile;
if (tile) {
/*
* If within the animate time of this frame, keep with it.
*/
if (w->timestamp_change_to_next_frame > time_get_time_ms()) {
return;
}
/*
* Stop the animation here?
*/
if (thing_tile_is_end_of_anim(tile)) {
return;
}
}
auto tiles = tp_get_tiles(tp);
/*
* Get the next tile.
*/
if (tile) {
tile = thing_tile_next(tiles, tile);
}
/*
* Find a tile that matches the things current mode.
*/
uint32_t size = tiles.size();
uint32_t tries = 0;
while (tries < size) {
tries++;
/*
* Cater for wraps.
*/
if (!tile) {
tile = thing_tile_first(tiles);
}
{
if (thing_tile_is_dead(tile)) {
tile = thing_tile_next(tiles, tile);
continue;
}
if (thing_tile_is_open(tile)) {
tile = thing_tile_next(tiles, tile);
continue;
}
}
break;
}
if (!tile) {
return;
}
/*
* Use this tile!
*/
w->current_tile = tile;
wid_set_tilename(w, thing_tile_name(tile));
/*
* When does this tile expire ?
*/
uint32_t delay = thing_tile_delay_ms(tile);
if (delay) {
delay = myrand() % delay;
}
w->timestamp_change_to_next_frame = time_get_time_ms() + delay;
}<|fim▁end|>
|
{_
if (!w->animate) {
return;
|
<|file_name|>make_loaddata.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
make_loaddata.py
Convert ken_all.csv to loaddata
"""
import argparse
import csv
def merge_separated_line(args):
"""
yields line
yields a line.
if two (or more) lines has same postalcode,
merge them.
"""
def is_dup(line, buff):
""" lines is duplicated or not """
# same postalcode
if line[2] != buff[2]:
return False
# include choume and not
if line[11] != buff[11]:
return False
# line contains touten(kana)
if line[5].count(u'、') != 0:
return True
if buff[5].count(u'、') != 0:
return True
# line contains touten(kanji)
if line[8].count(u'、') != 0:
return True
if buff[8].count(u'、') != 0:
return True
return False
def merge(line, buff):
""" merge address of two lines """
new_buff = []
idx = 0
for element in line:<|fim▁hole|> new_buff.append(u''.join([buff[idx], element]))
else:
new_buff.append(buff[idx])
idx += 1
return new_buff
line_buffer = []
ken_all = csv.reader(open(args.source))
for line in ken_all:
unicode_line = [unicode(s, 'utf8') for s in line]
if not(line_buffer):
line_buffer = unicode_line
continue
if is_dup(unicode_line, line_buffer):
line_buffer = merge(unicode_line, line_buffer)
else:
yield line_buffer
line_buffer = unicode_line
yield line_buffer
def parse_args():
# parse aruguments
Parser = argparse.ArgumentParser(description='Make loaddata of postalcode.')
Parser.add_argument('source', help='input file of converting')
Parser.add_argument('area', help='data file for area-code')
Parser.add_argument('net', help='data file of net-code')
return Parser.parse_args()
def main(args):
# converting main
Areadata = csv.writer(open(args.area, 'w'),
delimiter=',',
quoting=csv.QUOTE_NONE)
Netdata = csv.writer(open(args.net, 'w'),
delimiter=',',
quoting=csv.QUOTE_NONE)
for line in merge_separated_line(args):
zipcode = line[2]
if zipcode[5:7] != '00':
Areadata.writerow([s.encode('utf8') for s in line])
else:
Netdata.writerow([s.encode('utf8') for s in line])
if __name__ == '__main__':
args = parse_args()
main(args)<|fim▁end|>
|
if element[:len(buff[idx])] != buff[idx]:
|
<|file_name|>draw.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
import { PersonService } from '../../service/person.service';
@Component({<|fim▁hole|> providers: [
PersonService
]
})
export class DrawComponent implements OnInit {
public mrecord;
public itemsPerPage: number = 12;
public totalRecords: number = 10;
public currentPage: number = 1;
public offset: number = 0;
public end: number = 0;
constructor(
public getPerson: PersonService
) { }
ngOnInit() {
this.getCommission(this.currentPage, this.itemsPerPage )
}
getCommission(page, pagesize) {
this.getPerson.myIncomeDetail(page, pagesize, 2).subscribe(data => {
this.mrecord = data.d;
this.totalRecords = data.d.total;
})
}
pageChanged(event) {
this.getCommission(event.page + 1 ,this.itemsPerPage)
}
}<|fim▁end|>
|
selector: 'app-draw',
templateUrl: './draw.component.html',
styleUrls: ['./draw.component.scss'],
|
<|file_name|>router_5_5_4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE<|fim▁hole|># ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
from autothreadharness.harness_case import HarnessCase
import unittest
class Router_5_5_4(HarnessCase):
role = HarnessCase.ROLE_ROUTER
case = '5 5 4'
golden_devices_required = 5
def on_dialog(self, dialog, title):
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>tag-align-shape.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[derive(Debug)]
enum a_tag {
a_tag_var(u64)
}
#[derive(Debug)]
struct t_rec {
c8: u8,
t: a_tag
}
pub fn main() {
let x = t_rec {c8: 22u8, t: a_tag::a_tag_var(44u64)};
let y = format!("{:?}", x);
println!("y = {:?}", y);<|fim▁hole|><|fim▁end|>
|
assert_eq!(y, "t_rec { c8: 22, t: a_tag_var(44) }".to_string());
}
|
<|file_name|>hash_location_strategy_spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {describe, it, iit, ddescribe, expect, inject, beforeEach, beforeEachProviders,} from '@angular/core/testing/testing_internal';
import {Injector, provide} from '@angular/core';
import {PlatformLocation, APP_BASE_HREF, HashLocationStrategy} from '@angular/common';
import {SpyPlatformLocation} from '../spies';
export function main() {
describe('HashLocationStrategy', () => {
var platformLocation: SpyPlatformLocation;
var locationStrategy: HashLocationStrategy;
beforeEachProviders(
() => [HashLocationStrategy, {provide: PlatformLocation, useClass: SpyPlatformLocation}]);
describe('without APP_BASE_HREF', () => {
beforeEach(inject(
[PlatformLocation, HashLocationStrategy],
(pl: any /** TODO #9100 */, ls: any /** TODO #9100 */) => {
platformLocation = pl;
locationStrategy = ls;
platformLocation.spy('pushState');
platformLocation.pathname = '';
}));
it('should prepend urls with a hash for non-empty URLs', () => {
expect(locationStrategy.prepareExternalUrl('foo')).toEqual('#foo');
locationStrategy.pushState(null, 'Title', 'foo', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#foo');
});
it('should prepend urls with a hash for URLs with query params', () => {
expect(locationStrategy.prepareExternalUrl('foo?bar')).toEqual('#foo?bar');
locationStrategy.pushState(null, 'Title', 'foo', 'bar=baz');
expect(platformLocation.spy('pushState'))
.toHaveBeenCalledWith(null, 'Title', '#foo?bar=baz');
});
it('should prepend urls with a hash for URLs with just query params', () => {
expect(locationStrategy.prepareExternalUrl('?bar')).toEqual('#?bar');
locationStrategy.pushState(null, 'Title', '', 'bar=baz');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#?bar=baz');
});
it('should not prepend a hash to external urls for an empty internal URL', () => {
expect(locationStrategy.prepareExternalUrl('')).toEqual('');
locationStrategy.pushState(null, 'Title', '', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '');
});
});
describe('with APP_BASE_HREF with neither leading nor trailing slash', () => {
beforeEachProviders(() => [{provide: APP_BASE_HREF, useValue: 'app'}]);
beforeEach(inject(
[PlatformLocation, HashLocationStrategy],
(pl: any /** TODO #9100 */, ls: any /** TODO #9100 */) => {
platformLocation = pl;
locationStrategy = ls;
platformLocation.spy('pushState');
platformLocation.pathname = '';
}));
it('should prepend urls with a hash for non-empty URLs', () => {
expect(locationStrategy.prepareExternalUrl('foo')).toEqual('#app/foo');
locationStrategy.pushState(null, 'Title', 'foo', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#app/foo');
});
it('should prepend urls with a hash for URLs with query params', () => {
expect(locationStrategy.prepareExternalUrl('foo?bar')).toEqual('#app/foo?bar');
locationStrategy.pushState(null, 'Title', 'foo', 'bar=baz');
expect(platformLocation.spy('pushState'))
.toHaveBeenCalledWith(null, 'Title', '#app/foo?bar=baz');
});
it('should not prepend a hash to external urls for an empty internal URL', () => {
expect(locationStrategy.prepareExternalUrl('')).toEqual('#app');
locationStrategy.pushState(null, 'Title', '', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#app');
});
});
describe('with APP_BASE_HREF with leading slash', () => {
beforeEachProviders(() => [{provide: APP_BASE_HREF, useValue: '/app'}]);
beforeEach(inject(
[PlatformLocation, HashLocationStrategy],
(pl: any /** TODO #9100 */, ls: any /** TODO #9100 */) => {
platformLocation = pl;
locationStrategy = ls;
platformLocation.spy('pushState');
platformLocation.pathname = '';
}));
it('should prepend urls with a hash for non-empty URLs', () => {
expect(locationStrategy.prepareExternalUrl('foo')).toEqual('#/app/foo');
locationStrategy.pushState(null, 'Title', 'foo', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#/app/foo');
});
it('should prepend urls with a hash for URLs with query params', () => {
expect(locationStrategy.prepareExternalUrl('foo?bar')).toEqual('#/app/foo?bar');
locationStrategy.pushState(null, 'Title', 'foo', 'bar=baz');
expect(platformLocation.spy('pushState'))
.toHaveBeenCalledWith(null, 'Title', '#/app/foo?bar=baz');
});
it('should not prepend a hash to external urls for an empty internal URL', () => {
expect(locationStrategy.prepareExternalUrl('')).toEqual('#/app');
locationStrategy.pushState(null, 'Title', '', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#/app');
});
});
describe('with APP_BASE_HREF with both leading and trailing slash', () => {
beforeEachProviders(() => [{provide: APP_BASE_HREF, useValue: '/app/'}]);
beforeEach(inject(
[PlatformLocation, HashLocationStrategy],
(pl: any /** TODO #9100 */, ls: any /** TODO #9100 */) => {
platformLocation = pl;
locationStrategy = ls;
platformLocation.spy('pushState');
platformLocation.pathname = '';
}));
it('should prepend urls with a hash for non-empty URLs', () => {
expect(locationStrategy.prepareExternalUrl('foo')).toEqual('#/app/foo');
locationStrategy.pushState(null, 'Title', 'foo', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#/app/foo');
});
it('should prepend urls with a hash for URLs with query params', () => {
expect(locationStrategy.prepareExternalUrl('foo?bar')).toEqual('#/app/foo?bar');
locationStrategy.pushState(null, 'Title', 'foo', 'bar=baz');
expect(platformLocation.spy('pushState'))
.toHaveBeenCalledWith(null, 'Title', '#/app/foo?bar=baz');
});
it('should not prepend a hash to external urls for an empty internal URL', () => {
expect(locationStrategy.prepareExternalUrl('')).toEqual('#/app/');
locationStrategy.pushState(null, 'Title', '', '');
expect(platformLocation.spy('pushState')).toHaveBeenCalledWith(null, 'Title', '#/app/');
});
});
describe('hashLocationStrategy bugs', () => {
beforeEach(inject(
[PlatformLocation, HashLocationStrategy],
(pl: any /** TODO #9100 */, ls: any /** TODO #9100 */) => {
platformLocation = pl;
locationStrategy = ls;
platformLocation.spy('pushState');
platformLocation.pathname = '';
}));
it('should not include platform search', () => {
platformLocation.search = '?donotinclude';
expect(locationStrategy.path()).toEqual('');
});<|fim▁hole|> expect(locationStrategy.path()).toEqual('hashPath');
});
});
});
}<|fim▁end|>
|
it('should not include platform search even with hash', () => {
platformLocation.hash = '#hashPath';
platformLocation.search = '?donotinclude';
|
<|file_name|>IntermediateRepository.java<|end_file_name|><|fim▁begin|>package io.quarkus.it.spring.data.jpa;
import java.io.Serializable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.repository.NoRepositoryBean;
@NoRepositoryBean
public interface IntermediateRepository<T, ID extends Serializable> extends JpaRepository<T, ID> {
default public void doNothing() {
}
default public T findMandatoryById(ID id) {
return findById(id).orElseThrow(() -> new IllegalStateException("not found: " + id));
}
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>install.js<|end_file_name|><|fim▁begin|>// increment the number below each time you push a new model version
// (forces rebuild on phone)
var current_migration_version = 1;
// create the tables if required<|fim▁hole|><|fim▁end|>
|
joli.models.migrate(current_migration_version);
joli.models.initialize();
|
<|file_name|>339NestedListWeightSum.py<|end_file_name|><|fim▁begin|># """
# This is the interface that allows for creating nested lists.
# You should not implement it, or speculate about its implementation
# """
#class NestedInteger:
# def __init__(self, value=None):
# """<|fim▁hole|># If value is not specified, initializes an empty list.
# Otherwise initializes a single integer equal to value.
# """
#
# def isInteger(self):
# """
# @return True if this NestedInteger holds a single integer, rather than a nested list.
# :rtype bool
# """
#
# def add(self, elem):
# """
# Set this NestedInteger to hold a nested list and adds a nested integer elem to it.
# :rtype void
# """
#
# def setInteger(self, value):
# """
# Set this NestedInteger to hold a single integer equal to value.
# :rtype void
# """
#
# def getInteger(self):
# """
# @return the single integer that this NestedInteger holds, if it holds a single integer
# Return None if this NestedInteger holds a nested list
# :rtype int
# """
#
# def getList(self):
# """
# @return the nested list that this NestedInteger holds, if it holds a nested list
# Return None if this NestedInteger holds a single integer
# :rtype List[NestedInteger]
# """
class Solution:
def depthSum(self, nestedList, d = 1):
"""
:type nestedList: List[NestedInteger]
:rtype: int
"""
if not nestedList: return 0
ans = 0
for nested_integer in nestedList:
if nested_integer.isInteger():
ans += nested_integer.getInteger() * d
else:
ans += self.depthSum(nested_integer.getList(), d + 1)
return ans<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>VERSION = (0, 0, 1, 'dev')
# Dynamically calculate the version based on VERSION tuple
if len(VERSION) > 2 and VERSION[2] is not None:
if isinstance(VERSION[2], int):
str_version = "%s.%s.%s" % VERSION[:3]
else:<|fim▁hole|>__version__ = str_version<|fim▁end|>
|
str_version = "%s.%s_%s" % VERSION[:3]
else:
str_version = "%s.%s" % VERSION[:2]
|
<|file_name|>tips_goroutine.go<|end_file_name|><|fim▁begin|>/*
goroutine
*/
package tips_dir
import (
"fmt"
"github.com/tlorens/go-ibgetkey"
"io/ioutil"
"math"
"os"
"runtime/pprof"
"strconv"
"sync"
"time"
)
//---------------------------------------------------
// goroutineを生成する
//---------------------------------------------------
/*
goroutineはスレッドのようなものです。
joinはないので、終了を待つ場合はchannelを使います。
```
<-quit
```
とすると、quitというchannelに値が書き込まれるまで待ちます。
私の環境では、
```
Create goroutine
Waiting for the goroutine to complete
Start goroutine
End goroutine
Test compleated
```
という順番で表示されました。WaitingとStartの順番が直感と合いませんが、
goroutineの起動にオーバヘッドがあるのでしょうかね。
*/
// import "time"
func goroutine_Create() {
fmt.Println("Create goroutine")
quit := make(chan bool)
go func() {
fmt.Println("Start goroutine")
time.Sleep(3 * time.Second)
fmt.Println("End goroutine")
quit <- true
}()
fmt.Println("Waiting for the goroutine to complete")
<-quit
fmt.Println("Test compleated")
}
//---------------------------------------------------
// goroutineに引数を渡す
//---------------------------------------------------
/*
goroutineは外部宣言したfuncでも起動できます。
channelが必要な場合は引数にchannelを渡します。
もちろん、channel以外の引数も渡すことができます。
```
Start goroutine - Apple 10
```
と表示されます。
*/
//import "time"
func goroutine_Argument() {
fmt.Println("Test start")
fmt.Println("Create goroutine")
quit := make(chan bool)
go appleGoroutine("Apple", 10, quit)
fmt.Println("Waiting for the goroutine to complete")
<-quit
fmt.Println("Test compleated")
}
func appleGoroutine(fruit string, a int, quit chan bool) {
fmt.Printf("Start goroutine - %s %d\n", fruit, a)
time.Sleep(3 * time.Second)
fmt.Println("End goroutine")
quit <- true
}
//---------------------------------------------------
// goroutineの終了を待つ
//---------------------------------------------------
/*
終了待ちはchannelの受信で行います。[goroutineを生成する](#goroutine_Create)
を参照してください。
*/
//---------------------------------------------------
// goroutineの実行を終了させる
//---------------------------------------------------
/*
スレッドと違ってgoroutineは外部から終了させることは出来ません。
チャネルをつかって無限ループを抜けるようにするのが一案です。
無限ループを抜ける際に、breakにラベルを与えてどの階層まで抜けるかを指示できるようです。
下記のプログラムはGorutine内で"."を100msec置きに50回表示して終了しますが、
"."を押すとGorutineを途中で終了します。終了の意思を伝えるためにkillというチャネルで
通信を行っています。
なお、一文字入力を受け付けるのに
[go-ibgetkey](https://github.com/tlorens/go-ibgetkey)という
ライブラリを使いました。
*/
// import "github.com/tlorens/go-ibgetkey"
// import "time"
func goroutine_Kill() {
kill := make(chan bool)
finished := make(chan bool)
go killableGoroutine(kill, finished)
targetkey := "."
t := int(targetkey[0])
loop:
for {
input := keyboard.ReadKey()
select {
case <-finished:
break loop
default:
if input == t {
kill <- true
break loop
}
}
}
}
func killableGoroutine(kill, finished chan bool) {
fmt.Println("Started goroutine. Push \".\" to kill me.")
for i := 0; i < 50; i++ {
select {
case <-kill:
fmt.Println()
fmt.Println("Killed")
finished <- true
return
default:
fmt.Print(".")
time.Sleep(100 * time.Millisecond)
}
}
fmt.Println()
fmt.Println("Finished..push any key to abort.")
finished <- true
return
}
//---------------------------------------------------
// goroutineを停止する
//---------------------------------------------------
/*
[goroutineの実行を終了させる](#goroutine_)と同様にchannelで通信を行います。
下記のプログラムはGorutine内で"."を100msec置きに50回表示して終了しますが、
"."を入力するとGorutineを途中で停止・再開します。
*/
// import "github.com/tlorens/go-ibgetkey"
// import "time"
func goroutine_Stop() {
com := make(chan string)
finished := make(chan bool)
go stoppableGoroutine(com, finished)
targetkey := "."
t := int(targetkey[0])
running := true
loop:
for {
input := keyboard.ReadKey()
select {
case <-finished:
break loop
default:
if input == t {
if running == true {
com <- "stop"
running = false
} else {
com <- "start"
running = true
}
}
}
}
}
func stoppableGoroutine(command chan string, finished chan bool) {
fmt.Println("Started goroutine. Push \".\" to stop/start me.")
running := true
i := 0
for i < 50 {
select {
case com := <-command:
if com == "stop" {
running = false
} else {
running = true
}
default:
}
if running == true {
fmt.Print(".")
time.Sleep(100 * time.Millisecond)
i++
}
}
fmt.Println()
fmt.Println("Finished..push any key to abort.")
finished <- true
return
}
//---------------------------------------------------
// 実行中のgoroutine一覧を取得する
//---------------------------------------------------
/*
Ruby版ではリストを出してそれぞれをkillするというプログラムでしたが、
そもそもkillを簡単に実装する方法がないので、リスト表示だけにとどめます。
goroutineをリスト化する関数自体やmain関数自身もgoroutineなので
多数のgoroutineが表示されます。
*/
//import "os"
//import "runtime/pprof"
//import "time"
func goroutine_ListGoroutines() {
go goroutine1()
go goroutine2()
time.Sleep(1 * time.Second) // goroutineの起動のオーバヘッド待ち
pprof.Lookup("goroutine").WriteTo(os.Stdout, 2) // 2はデバッグレベル。goroutineだけリストする、の意味。
}
func goroutine1() {
time.Sleep(5 * time.Second)
fmt.Println("Goroutine1 finished")
}
func goroutine2() {
time.Sleep(5 * time.Second)
fmt.Println("Goroutine2 finished")
}
//---------------------------------------------------
// goroutine間で通信する
//---------------------------------------------------
/*
Ruby版ではQueueを使っているところを、channelに変更します。
makeでchannelの深さを指定しますが、深さ以上に値を突っ込もうとすると
その時点でロックします。[こちら](http://rosylilly.hatenablog.com/entry/2013/09/26/124801)をご参考。
下記プログラムはキー入力を受付け、平方根を返します。-1を入力すると終了します。
*/
// import "math"
func goroutine_Com() {
queue := make(chan int, 3) // 3はキューの深さ
go sqrtGoroutine(queue)
<|fim▁hole|> line := 0
loop:
for {
fmt.Scanln(&line)
if line == -1 {
break loop
} else {
queue <- line
}
}
}
func sqrtGoroutine(queue chan int) {
for {
n := <-queue
if int(n) >= 0 {
val := math.Sqrt(float64(n))
fmt.Printf("Square(%d) = %f\n", int(n), val)
} else {
fmt.Println("?")
}
}
}
//---------------------------------------------------
// goroutine間の競合を回避する(Mutex)
//---------------------------------------------------
/*
syncパッケージにMutexがあります。複数のgoroutine終了を待つにはWaitGroupを使います。
[こちら](http://mattn.kaoriya.net/software/lang/go/20140625223125.htm)がよくまとまっています。
ファイルcount.txtを作って0を書き込み、gotoutine内ではファイルの数値を読んでインクリメントして書き戻します。
終了後は、goroutineが10個走るのでcount.txtの値は10になっています。
*/
//import "sync"
//import "io/ioutil"
func goroutine_Mutex() {
wg := new(sync.WaitGroup)
m := new(sync.Mutex)
write(0)
for i := 0; i < 10; i++ {
wg.Add(1)
go countupGoroutine(wg, m)
}
wg.Wait()
}
func countupGoroutine(wg *sync.WaitGroup, m *sync.Mutex) {
m.Lock()
defer m.Unlock()
counter := read() + 1
write(counter)
wg.Done()
}
func write(i int) {
s := strconv.Itoa(i)
ioutil.WriteFile("count.txt", []byte(s), os.ModePerm)
}
func read() int {
t, _ := ioutil.ReadFile("count.txt")
i, _ := strconv.Atoi(string(t))
return i
}
//---------------------------------------------------
// goroutine
//---------------------------------------------------
func Tips_goroutine() {
goroutine_Create() // goroutineを生成する
goroutine_Argument() // goroutineに引数を渡す
// goroutineの終了を待つ
goroutine_Kill() // goroutineの実行を終了させる
goroutine_Stop() // goroutineを停止する
goroutine_ListGoroutines() // 実行中のgoroutine一覧を取得する
goroutine_Com() // goroutine間で通信する
goroutine_Mutex() // goroutine間の競合を回避する(Mutex)
}<|fim▁end|>
| |
<|file_name|>job_run_controller.test.js<|end_file_name|><|fim▁begin|>const $ = require('jquery');
const { BagItProfile } = require('../../bagit/bagit_profile');
const { Job } = require('../../core/job');
const { JobRunController } = require('./job_run_controller');
const { PackageOperation } = require('../../core/package_operation');
const path = require('path');
const { StorageService } = require('../../core/storage_service');
const { TestUtil } = require('../../core/test_util');
const { UITestUtil } = require('../common/ui_test_util');
const { UploadOperation } = require('../../core/upload_operation');
const { Util } = require('../../core/util');
beforeEach(() => {
TestUtil.deleteJsonFile('Job');
TestUtil.deleteJsonFile('StorageService');
});
afterAll(() => {
TestUtil.deleteJsonFile('Job');
TestUtil.deleteJsonFile('StorageService');
});
<|fim▁hole|>function getStorageService(name, proto, host) {
let ss = new StorageService({
name: name,
protocol: proto,
host: host
});
ss.save();
return ss;
}
function getUploadOp(name, proto, host) {
let ss = getStorageService(name, proto, host);
let op = new UploadOperation();
op.sourceFiles = ['/dev/null'];
op.storageServiceId = ss.id;
return op;
}
function getJob() {
var job = new Job();
job.packageOp = new PackageOperation('TestBag', '/dev/null');
job.packageOp.packageFormat = 'BagIt';
job.packageOp._trimLeadingPaths = false;
job.packageOp.sourceFiles = [
__dirname,
path.join(__dirname, '..', 'forms')
];
job.dirCount = 2;
job.fileCount = 12;
job.byteCount = 237174;
job.uploadOps = [
getUploadOp('target1', 's3', 'target1.com'),
getUploadOp('target2', 's3', 'target2.com')
];
job.bagItProfile = BagItProfile.load(path.join(__dirname, '..', '..', 'test', 'profiles', 'multi_manifest.json'));
job.save();
return job;
}
function getController() {
let job = getJob();
let params = new URLSearchParams({ id: job.id });
return new JobRunController(params);
}
test('constructor', () => {
let controller = getController();
expect(controller.model).toEqual(Job);
expect(controller.job).not.toBeNull();
});
test('show', () => {
let controller = getController();
let response = controller.show()
expect(response.container).toMatch(controller.job.packageOp.packageName);
expect(response.container).toMatch(controller.job.packageOp.outputPath);
expect(response.container).toMatch(controller.job.bagItProfile.name);
expect(response.container).toMatch(controller.job.bagItProfile.description);
expect(response.container).toMatch('2 Directories');
expect(response.container).toMatch('12 Files');
expect(response.container).toMatch('231.62 KB');
expect(response.container).toMatch(controller.job.packageOp.sourceFiles[0]);
expect(response.container).toMatch(controller.job.packageOp.sourceFiles[1]);
});<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models.pluginmodel import CMSPlugin
from djangocms_text_ckeditor.fields import HTMLField
from easy_thumbnails.alias import aliases
from easy_thumbnails.signals import saved_file
from easy_thumbnails.signal_handlers import generate_aliases_global
# Define aliases for easy_thumbnail
# See http://easy-thumbnails.readthedocs.org/en/latest/usage/#thumbnail-aliases
if not aliases.get('badge'):
aliases.set('badge', {'size': (150, 80), 'crop': True})
class PersonBiography(models.Model):
"""Stores biographical information about a Person."""
first_name = models.CharField(max_length=200)
last_name = models.CharField(max_length=250)
suffix = models.CharField(max_length=40, blank=True)
title = models.CharField(max_length=250, blank=True)
employer = models.CharField(max_length=250, blank=True)
description = HTMLField()
image = models.ImageField(upload_to='biography_person', blank=True)
active = models.BooleanField(default=True,
help_text=_('If checked, this biography will be available in the plugin list.'))
class Meta:
ordering = ('last_name', 'first_name', )
verbose_name = 'Person biography'
verbose_name_plural = 'Person biographies'
def __unicode__(self):
return '%s, %s' % (self.last_name, self.first_name)
class PersonBiographyPluginModel(CMSPlugin):
"""
Stores a reference to a PersonBiography. This is used so a given
PersonBiography can be referenced by 0 or more PersonBiographyPlugins.
"""
person = models.ForeignKey(PersonBiography)
short_description = HTMLField(blank=True, help_text="If specified, this text will replace the person's normal description.")
event_description = HTMLField(blank=True, help_text="If specified, this text will appear after the person's normal description.")
class Meta:
ordering = ('person', )<|fim▁hole|> def copy_relations(self, oldinstance):
self.person = oldinstance.person
# Generate thumbnails when an image is uploaded.
saved_file.connect(generate_aliases_global)<|fim▁end|>
|
def __unicode__(self):
return unicode(self.person)
|
<|file_name|>cm_placement_group.py<|end_file_name|><|fim▁begin|>###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from starthinker.util.bigquery import table_create
from starthinker.util.data import get_rows
from starthinker.util.data import put_rows
from starthinker.util.google_api import API_DCM
from starthinker.util.cm import get_profile_for_api
from starthinker.util.discovery_to_bigquery import Discovery_To_BigQuery
from starthinker.util.regexp import lookup_id
def cm_placement_group_clear(config, task):
table_create(
config,
task['auth_bigquery'],
config.project,
task['dataset'],
'CM_PlacementGroups',
Discovery_To_BigQuery(
'dfareporting',
'v3.4'
).method_schema(
'placementGroups.list',
iterate=True
)
)
def cm_placement_group_load(config, task):
# load multiple partners from user defined sheet
def load_multiple():
campaigns = [str(lookup_id(r)) for r in set(get_rows(<|fim▁hole|> config,
task['auth_cm'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Campaigns',
'header':False,
'range': 'A2:A'
}},
unnest=True
))]
for row in get_rows(
config,
task['auth_sheets'],
{ 'sheets': {
'sheet': task['sheet'],
'tab': 'CM Accounts',
'header':False,
'range': 'A2:A'
}}
):
if row:
account_id = lookup_id(row[0])
is_superuser, profile_id = get_profile_for_api(config, task['auth_cm'], account_id)
kwargs = { 'profileId': profile_id, 'campaignIds':campaigns, 'archived':False }
if is_superuser:
kwargs['accountId'] = account_id
yield from API_DCM(
config,
task['auth_cm'],
iterate=True,
internal=is_superuser
).placementGroups().list( **kwargs).execute()
cm_placement_group_clear(config, task)
# write placement_groups to database
put_rows(
config,
task['auth_bigquery'],
{ 'bigquery': {
'dataset': task['dataset'],
'table': 'CM_PlacementGroups',
'schema': Discovery_To_BigQuery(
'dfareporting',
'v3.4'
).method_schema(
'placementGroups.list',
iterate=True
),
'format':'JSON'
}},
load_multiple()
)<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># coding=utf-8
from ..base import BitbucketBase
class BitbucketCloudBase(BitbucketBase):
def __init__(self, url, *args, **kwargs):
"""
Init the rest api wrapper
:param url: string: The base url used for the rest api.
:param *args: list: The fixed arguments for the AtlassianRestApi.
:param **kwargs: dict: The keyword arguments for the AtlassianRestApi.
:return: nothing
"""
expected_type = kwargs.pop("expected_type", None)
super(BitbucketCloudBase, self).__init__(url, *args, **kwargs)
if expected_type is not None and not expected_type == self.get_data("type"):
raise ValueError("Expected type of data is [{}], got [{}].".format(expected_type, self.get_data("type")))
def get_link(self, link):
"""
Get a link from the data.
:param link: string: The link identifier
:return: The requested link or None if it isn't present
"""
links = self.get_data("links")
if links is None or link not in links:
return None
return links[link]["href"]
def _get_paged(
self, url, params=None, data=None, flags=None, trailing=None, absolute=False, paging_workaround=False
):
"""
Used to get the paged data
:param url: string: The url to retrieve
:param params: dict (default is None): The parameters
:param data: dict (default is None): The data
:param flags: string[] (default is None): The flags
:param trailing: bool (default is None): If True, a trailing slash is added to the url
:param absolute: bool (default is False): If True, the url is used absolute and not relative to the root<|fim▁hole|> :param paging_workaround: bool (default is False): If True, the paging is done on our own because
of https://jira.atlassian.com/browse/BCLOUD-13806
:return: A generator object for the data elements
"""
if params is None:
params = {}
if paging_workaround:
params["page"] = 1
while True:
response = super(BitbucketCloudBase, self).get(
url,
trailing=trailing,
params=params,
data=data,
flags=flags,
absolute=absolute,
)
if len(response.get("values", [])) == 0:
return
for value in response["values"]:
yield value
if paging_workaround:
params["page"] += 1
else:
url = response.get("next")
if url is None:
break
# From now on we have absolute URLs with parameters
absolute = True
# Params are now provided by the url
params = {}
# Trailing should not be added as it is already part of the url
trailing = False
return<|fim▁end|>
| |
<|file_name|>report_errors_service_client_config.py<|end_file_name|><|fim▁begin|>config = {
"interfaces": {
"google.devtools.clouderrorreporting.v1beta1.ReportErrorsService": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,<|fim▁hole|> "rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000
}
},
"methods": {
"ReportErrorEvent": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
}
}
}
}
}<|fim▁end|>
|
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
|
<|file_name|>loggedInUser.js<|end_file_name|><|fim▁begin|>exports.definition = {
config: {
adapter: {
type: "properties",<|fim▁hole|> },
extendModel: function(Model) {
_.extend(Model.prototype, {
/**
* Backbone의 경우 idAttribute는 model에 extend를 통해 바로 넣는다.
* Alloy의 경우 adapter에 넣고 adapter가 model에 넣어준다.
* 그런데 properties 아답터는 model에 idAttribute를 넣는 코드가 없다. (3.2.0.GA)
*
*/
idAttribute : 'bogoyoLoggedInUser',
defaults :{
bogoyoLoggedInUser : 'staticId'
},
clearWithoutId : function(options) {
this.clear();
return this.set('bogoyoLoggedInUser','staticId');
},
getProfileImageUrl : function(){
return String.format("https://graph.facebook.com/%s/picture?width=%d&height=%d",
this.get('external_accounts')[0].external_id,
80,
80);
}
// extended functions and properties go here
});
return Model;
},
extendCollection: function(Collection) {
_.extend(Collection.prototype, {
// extended functions and properties go here
});
return Collection;
}
};<|fim▁end|>
|
collection_name: "loggedInUser"
}
|
<|file_name|>upload.py<|end_file_name|><|fim▁begin|>import sys
from craystack import cf
if len(sys.argv) < 4:
print "Usage: %s <key> <subkey> <path>" % sys.argv[0]
sys.exit(2)
_, key, subkey, filename = sys.argv
with open(filename) as f:<|fim▁hole|><|fim▁end|>
|
content = f.read()
cf.insert(key, {subkey: content})
print "Uploaded %s to %s/%s (%s bytes)" % (filename, key, subkey, len(content))
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>import copy
from decimal import Decimal
import datetime
from celery_formtask.tasks import processform, PROGRESS_STATE<|fim▁hole|>class FormTaskMixin:
def __init__(self, *args, task=None, ignored_kwargs=[], **kwargs):
self._task = task
self._args_bak = args
self._kwargs_bak = copy.copy(kwargs)
for kwarg in ignored_kwargs:
kwargs.pop(kwarg)
super().__init__(*args, **kwargs)
def set_progress(self, current=None, total=None, description=""):
if self._task:
meta_total = total or getattr(self, "formtask_total_steps", None)
meta_description = description or getattr(
self, "formtask_description", None
)
if (
meta_total is not None
and meta_total > 0
and current is not None
and current >= 0
):
percent = (Decimal(current) / Decimal(meta_total)) * Decimal(100)
meta_percent = float(round(percent, 2))
else:
meta_percent = None
self._task.update_state(
state=PROGRESS_STATE,
meta={
"current": current,
"total": meta_total,
"percent": meta_percent,
"description": meta_description,
"time": datetime.datetime.utcnow(),
},
)
def enqueue(self, name=None):
opts = {"shadow": name} if name else {}
async_result = processform.apply_async(
args=(self.__class__.__module__, self.__class__.__name__) + self._args_bak,
kwargs=self._kwargs_bak,
**opts
)
return async_result<|fim▁end|>
| |
<|file_name|>issue-19190-2.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>use std::ops::Deref;
pub struct Bar;
impl Deref for Bar {
type Target = String;
fn deref(&self) -> &String { loop {} }
}
// @has issue_19190_2/struct.Bar.html
// @!has - '//*[@id="method.new"]' 'fn new() -> String'
// @has - '//*[@id="method.as_str"]' 'fn as_str(&self) -> &str'<|fim▁end|>
| |
<|file_name|>test_registration.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Registration tests """
import os
from shutil import copy
import pytest
from tempfile import TemporaryDirectory
from nipype.pipeline import engine as pe
from ..interfaces.reportlets.registration import (
FLIRTRPT,
SpatialNormalizationRPT,
ANTSRegistrationRPT,
BBRegisterRPT,
MRICoregRPT,
ApplyXFMRPT,
SimpleBeforeAfterRPT,
)
from .conftest import _run_interface_mock, datadir, has_fsl, has_freesurfer
def _smoke_test_report(report_interface, artifact_name):
with TemporaryDirectory() as tmpdir:
res = pe.Node(report_interface, name="smoke_test", base_dir=tmpdir).run()
out_report = res.outputs.out_report
save_artifacts = os.getenv("SAVE_CIRCLE_ARTIFACTS", False)
if save_artifacts:
copy(out_report, os.path.join(save_artifacts, artifact_name))
assert os.path.isfile(out_report), "Report does not exist"
@pytest.mark.skipif(not has_fsl, reason="No FSL")
def test_FLIRTRPT(reference, moving):
""" the FLIRT report capable test """
flirt_rpt = FLIRTRPT(generate_report=True, in_file=moving, reference=reference)
_smoke_test_report(flirt_rpt, "testFLIRT.svg")
@pytest.mark.skipif(not has_freesurfer, reason="No FreeSurfer")
def test_MRICoregRPT(monkeypatch, reference, moving, nthreads):
""" the MRICoreg report capable test """
def _agg(objekt, runtime):
outputs = objekt.output_spec()
outputs.out_lta_file = os.path.join(datadir, "testMRICoregRPT-out_lta_file.lta")
outputs.out_report = os.path.join(runtime.cwd, objekt.inputs.out_report)
return outputs
# Patch the _run_interface method
monkeypatch.setattr(MRICoregRPT, "_run_interface", _run_interface_mock)
monkeypatch.setattr(MRICoregRPT, "aggregate_outputs", _agg)
mri_coreg_rpt = MRICoregRPT(
generate_report=True,
source_file=moving,
reference_file=reference,
num_threads=nthreads,
)
_smoke_test_report(mri_coreg_rpt, "testMRICoreg.svg")
@pytest.mark.skipif(not has_fsl, reason="No FSL")
def test_ApplyXFMRPT(reference, moving):
""" the ApplyXFM report capable test """
flirt_rpt = FLIRTRPT(generate_report=False, in_file=moving, reference=reference)
applyxfm_rpt = ApplyXFMRPT(
generate_report=True,
in_file=moving,
in_matrix_file=flirt_rpt.run().outputs.out_matrix_file,
reference=reference,
apply_xfm=True,
)
_smoke_test_report(applyxfm_rpt, "testApplyXFM.svg")
@pytest.mark.skipif(not has_fsl, reason="No FSL")
def test_SimpleBeforeAfterRPT(reference, moving):
""" the SimpleBeforeAfterRPT report capable test """
flirt_rpt = FLIRTRPT(generate_report=False, in_file=moving, reference=reference)
ba_rpt = SimpleBeforeAfterRPT(
generate_report=True, before=reference, after=flirt_rpt.run().outputs.out_file
)
_smoke_test_report(ba_rpt, "test_SimpleBeforeAfterRPT.svg")
@pytest.mark.skipif(not has_fsl, reason="No FSL")
def test_FLIRTRPT_w_BBR(reference, reference_mask, moving):
""" test FLIRTRPT with input `wm_seg` set.
For the sake of testing ONLY, `wm_seg` is set to the filename of a brain mask """
flirt_rpt = FLIRTRPT(
generate_report=True, in_file=moving, reference=reference, wm_seg=reference_mask
)
_smoke_test_report(flirt_rpt, "testFLIRTRPTBBR.svg")
@pytest.mark.skipif(not has_freesurfer, reason="No FreeSurfer")
def test_BBRegisterRPT(monkeypatch, moving):
""" the BBRegister report capable test """
def _agg(objekt, runtime):
outputs = objekt.output_spec()
outputs.out_lta_file = os.path.join(
datadir, "testBBRegisterRPT-out_lta_file.lta"<|fim▁hole|>
# Patch the _run_interface method
monkeypatch.setattr(BBRegisterRPT, "_run_interface", _run_interface_mock)
monkeypatch.setattr(BBRegisterRPT, "aggregate_outputs", _agg)
subject_id = "fsaverage"
bbregister_rpt = BBRegisterRPT(
generate_report=True,
contrast_type="t1",
init="fsl",
source_file=moving,
subject_id=subject_id,
registered_file=True,
)
_smoke_test_report(bbregister_rpt, "testBBRegister.svg")
def test_SpatialNormalizationRPT(monkeypatch, moving):
""" the SpatialNormalizationRPT report capable test """
def _agg(objekt, runtime):
outputs = objekt.output_spec()
outputs.warped_image = os.path.join(
datadir, "testSpatialNormalizationRPTMovingWarpedImage.nii.gz"
)
outputs.out_report = os.path.join(runtime.cwd, objekt.inputs.out_report)
return outputs
# Patch the _run_interface method
monkeypatch.setattr(
SpatialNormalizationRPT, "_run_interface", _run_interface_mock
)
monkeypatch.setattr(SpatialNormalizationRPT, "aggregate_outputs", _agg)
ants_rpt = SpatialNormalizationRPT(
generate_report=True, moving_image=moving, flavor="testing"
)
_smoke_test_report(ants_rpt, "testSpatialNormalizationRPT.svg")
def test_SpatialNormalizationRPT_masked(monkeypatch, moving, reference_mask):
""" the SpatialNormalizationRPT report capable test with masking """
def _agg(objekt, runtime):
outputs = objekt.output_spec()
outputs.warped_image = os.path.join(
datadir, "testSpatialNormalizationRPTMovingWarpedImage.nii.gz"
)
outputs.out_report = os.path.join(runtime.cwd, objekt.inputs.out_report)
return outputs
# Patch the _run_interface method
monkeypatch.setattr(
SpatialNormalizationRPT, "_run_interface", _run_interface_mock
)
monkeypatch.setattr(SpatialNormalizationRPT, "aggregate_outputs", _agg)
ants_rpt = SpatialNormalizationRPT(
generate_report=True,
moving_image=moving,
reference_mask=reference_mask,
flavor="testing",
)
_smoke_test_report(ants_rpt, "testSpatialNormalizationRPT_masked.svg")
def test_ANTSRegistrationRPT(monkeypatch, reference, moving):
""" the SpatialNormalizationRPT report capable test """
import pkg_resources as pkgr
def _agg(objekt, runtime):
outputs = objekt.output_spec()
outputs.warped_image = os.path.join(
datadir, "testANTSRegistrationRPT-warped_image.nii.gz"
)
outputs.out_report = os.path.join(runtime.cwd, objekt.inputs.out_report)
return outputs
# Patch the _run_interface method
monkeypatch.setattr(ANTSRegistrationRPT, "_run_interface", _run_interface_mock)
monkeypatch.setattr(ANTSRegistrationRPT, "aggregate_outputs", _agg)
ants_rpt = ANTSRegistrationRPT(
generate_report=True,
moving_image=moving,
fixed_image=reference,
from_file=pkgr.resource_filename(
"niworkflows.data", "t1w-mni_registration_testing_000.json"
),
)
_smoke_test_report(ants_rpt, "testANTSRegistrationRPT.svg")<|fim▁end|>
|
)
outputs.out_report = os.path.join(runtime.cwd, objekt.inputs.out_report)
return outputs
|
<|file_name|>constructors.rs<|end_file_name|><|fim▁begin|>#![doc="Provides functions to create different types of matrices.
"]
// std imports
// external imports
use num::{Num, One, Zero};
// local imports
use algebra::structure::{MagmaBase, CommutativeMonoidAddPartial, FieldPartial};
use matrix::matrix::{Matrix,
MatrixI8, MatrixI16, MatrixI32, MatrixI64,
MatrixU8, MatrixU16, MatrixU32, MatrixU64,
MatrixF32, MatrixF64,
MatrixC32, MatrixC64};
use matrix::traits::{Shape};
use error::SRError;
// complex numbers
use num::complex::{Complex32, Complex64};
#[doc="Returns a Hadamard matrix of size n x n
n must be a power of 2.
"]
pub fn hadamard(n : usize) -> Result<MatrixF64, SRError>{
if !n.is_power_of_two(){
return Err(SRError::IsNotPowerOfTwo);
}
let mut m : MatrixF64 = Matrix::new(n, n);
// Take the log of n with respect to 2.
let order = n.trailing_zeros();
// We are going to use Sylvester's construction
// http://en.wikipedia.org/wiki/Hadamard_matrix
// Let's fill the first level Hadamard matrix
m.set(0, 0, 1.0);
for o in 0..order{
// We will construct four views.
let size : usize = 2i32.pow(o) as usize;
// top left block
let tl = m.view(0, 0, size, size);
// top right block
let mut tr = m.view(0, size, size, size);
// bottom left block
let mut bl = m.view(size, 0, size, size);
// bottom right block
let mut br = m.view(size, size, size, size);
tr.copy_from(&tl);
bl.copy_from(&tl);
br.copy_scaled_from(&tl, -1.0);
}
Ok(m)
}
#[doc="Returns a Hilbert matrix.
"]
pub fn hilbert(n : usize) -> MatrixF64{
let mut m : MatrixF64 = Matrix::new(n, n);
for r in 0..n{
for c in 0..n{
let l = (r + c + 1) as f64;
m.set(r, c, 1.0 / l);
}
}
m
}
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[doc="Returns a matrix whose entries are picked up from
a range in column wise order.
# Remarks
Say you wish to create a matrix of 100 elements. If you
provide a range of 80 numbers, the first 80 entries in the
matrix will be filled by the numbers from the range. The
remaining 20 entries will be filled with zeros. On the
other hand, if you provide a range with more than 100 numbers,
then only the first 100 numbers will be used to fill the matrix
(off course in column major order). The remaining numbers
in the range will not be used. They will also not be generated.
# Examples
Constructing a 4x4 matrix of floating point numbers:
use scirust::api::{MatrixF64, from_range_cw, Shape};
let start = 0.0;
let stop = 16.0;
let m : MatrixF64 = from_range_cw(4, 4, start, stop);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f64);<|fim▁hole|>
"]
pub fn from_range_cw<T:MagmaBase+Num>(rows : usize, cols : usize,
start : T, stop : T )-> Matrix<T> {
// TODO this is not working.
//let m : Matrix<T> = Matrix::from_iter_cw(rows, cols, range);
let mut m : Matrix<T> = Matrix::new(rows, cols);
let mut cur = start;
'outer: for c in 0..cols{
for r in 0..rows{
m.set(r, c, cur);
cur = cur + One::one();
if cur == stop {
break 'outer;
}
}
}
m
}
#[doc="Returns a 64-bit floating point matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_f64(rows : usize, cols : usize,
start : f64, stop : f64)->MatrixF64 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 8-bit signed integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_i8(rows : usize, cols : usize,
start : i8, stop : i8)->MatrixI8 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 16-bit signed integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_i16(rows : usize, cols : usize,
start : i16, stop : i16)->MatrixI16 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 32-bit signed integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_i32(rows : usize, cols : usize,
start : i32, stop : i32)->MatrixI32 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 64-bit signed integer matrix whose entries are
picked up from a range in column wise order.
See from_range_cw function for further discussion.
# Examples
use scirust::api::{from_range_cw_i64, Shape};
let m = from_range_cw_i64(4, 4, 0, 16);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i64);
}
"]
#[inline]
pub fn from_range_cw_i64(rows : usize, cols : usize,
start : i64, stop : i64)->MatrixI64 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 8-bit unsigned integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_u8(rows : usize, cols : usize,
start : u8, stop : u8)->MatrixU8 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 16-bit unsigned integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_u16(rows : usize, cols : usize,
start : u16, stop : u16)->MatrixU16 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 32-bit unsigned integer matrix whose entries are
picked up from a range in column wise order.
"]
#[inline]
pub fn from_range_cw_u32(rows : usize, cols : usize,
start : u32, stop : u32)->MatrixU32 {
from_range_cw(rows, cols, start, stop)
}
#[doc="Returns an 64-bit unsigned integer matrix whose entries are
picked up from a range in column wise order.
See from_range_cw function for further discussion.
# Examples
use scirust::api::{from_range_cw_u64, Shape};
let m = from_range_cw_u64(4, 4, 0, 16);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u64);
}
"]
#[inline]
pub fn from_range_cw_u64(rows : usize, cols : usize,
start : u64, stop : u64)->MatrixU64 {
from_range_cw(rows, cols, start, stop)
}
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[doc="Returns a matrix whose entries are picked up from
a range in row wise order.
"]
pub fn from_range_rw<T:MagmaBase+Num>(rows : usize, cols : usize,
start : T, stop : T )-> Matrix<T> {
// TODO make it work.
//let m : Matrix<T> = Matrix::from_iter_rw(rows, cols, start..stop);
let mut m : Matrix<T> = Matrix::new(rows, cols);
let mut cur = start;
'outer: for r in 0..rows{
for c in 0..cols{
m.set(r, c, cur);
cur = cur + One::one();
if cur == stop {
break 'outer;
}
}
}
m
}
#[doc="Returns an 8-bit signed integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_i8(rows : usize, cols : usize,
start : i8, stop : i8)->MatrixI8 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 16-bit signed integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_i16(rows : usize, cols : usize,
start : i16, stop : i16)->MatrixI16 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 32-bit signed integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_i32(rows : usize, cols : usize,
start : i32, stop : i32)->MatrixI32 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 64-bit signed integer matrix whose entries are
picked up from a range in row wise order.
See from_range_rw function for further discussion.
# Examples
use scirust::api::{from_range_rw_i64, Shape};
let m = from_range_rw_i64(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i64);
}
"]
#[inline]
pub fn from_range_rw_i64(rows : usize, cols : usize,
start : i64, stop : i64)->MatrixI64 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 8-bit unsigned integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_u8(rows : usize, cols : usize,
start : u8, stop : u8)->MatrixU8 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 16-bit unsigned integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_u16(rows : usize, cols : usize,
start : u16, stop : u16)->MatrixU16 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 32-bit unsigned integer matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_u32(rows : usize, cols : usize,
start : u32, stop : u32)->MatrixU32 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns an 64-bit unsigned integer matrix whose entries are
picked up from a range in row wise order.
See from_range_rw function for further discussion.
# Examples
use scirust::api::{from_range_rw_u64, Shape};
let m = from_range_rw_u64(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u64);
}
"]
#[inline]
pub fn from_range_rw_u64(rows : usize, cols : usize,
start : u64, stop : u64)->MatrixU64 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns a 64-bit floating point matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_f64(rows : usize, cols : usize,
start : f64, stop : f64)->MatrixF64 {
from_range_rw(rows, cols, start, stop)
}
#[doc="Returns a 32-bit floating point matrix whose entries are
picked up from a range in row wise order.
"]
#[inline]
pub fn from_range_rw_f32(rows : usize, cols : usize,
start : f32, stop : f32)->MatrixF32 {
from_range_rw(rows, cols, start, stop)
}
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[doc="Returns an 8-bit unsigned integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_u8(rows : usize, cols : usize, values: &[u8])->MatrixU8 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 16-bit unsigned integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_u16(rows : usize, cols : usize, values: &[u16])->MatrixU16 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 32-bit unsigned integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_u32(rows : usize, cols : usize, values: &[u32])->MatrixU32 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 64-bit unsigned integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_u64(rows : usize, cols : usize, values: &[u64])->MatrixU64 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns an 8-bit signed integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_i8(rows : usize, cols : usize, values: &[i8])->MatrixI8 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 16-bit signed integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_i16(rows : usize, cols : usize, values: &[i16])->MatrixI16 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 32-bit signed integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_i32(rows : usize, cols : usize, values: &[i32])->MatrixI32 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 64-bit signed integer matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_i64(rows : usize, cols : usize, values: &[i64])->MatrixI64 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 32-bit float matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_f32(rows : usize, cols : usize, values: &[f32])->MatrixF32 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 64-bit float matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_f64(rows : usize, cols : usize, values: &[f64])->MatrixF64 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 32-bit complex matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_c32(rows : usize, cols : usize, values: &[Complex32])->MatrixC32 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns a 64-bit complex matrix whose entries are
picked up from a slice in column wise order.
"]
#[inline]
pub fn matrix_cw_c64(rows : usize, cols : usize, values: &[Complex64])->MatrixC64 {
Matrix::from_slice_cw(rows, cols, values)
}
#[doc="Returns an 8-bit unsigned integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_u8(rows : usize, cols : usize, values: &[u8])->MatrixU8 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 16-bit unsigned integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_u16(rows : usize, cols : usize, values: &[u16])->MatrixU16 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 32-bit unsigned integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_u32(rows : usize, cols : usize, values: &[u32])->MatrixU32 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 64-bit unsigned integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_u64(rows : usize, cols : usize, values: &[u64])->MatrixU64 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns an 8-bit signed integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_i8(rows : usize, cols : usize, values: &[i8])->MatrixI8 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 16-bit signed integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_i16(rows : usize, cols : usize, values: &[i16])->MatrixI16 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 32-bit signed integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_i32(rows : usize, cols : usize, values: &[i32])->MatrixI32 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 64-bit signed integer matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_i64(rows : usize, cols : usize, values: &[i64])->MatrixI64 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 32-bit float matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_f32(rows : usize, cols : usize, values: &[f32])->MatrixF32 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 64-bit float matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_f64(rows : usize, cols : usize, values: &[f64])->MatrixF64 {
Matrix::from_slice_rw(rows, cols, values)
}
/*
#[doc="Returns a 32-bit complex matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_c32(rows : usize, cols : usize, values: &[Complex32])->MatrixC32 {
Matrix::from_slice_rw(rows, cols, values)
}
#[doc="Returns a 64-bit complex matrix whose entries are
picked up from a slice in row wise order.
"]
#[inline]
pub fn matrix_rw_c64(rows : usize, cols : usize, values: &[Complex64])->MatrixC64 {
Matrix::from_slice_rw(rows, cols, values)
}
*/
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[doc="Returns a column vector with entries from a slice.
"]
#[inline]
pub fn col_vector<T:CommutativeMonoidAddPartial>(values: &[T])-> Matrix<T> {
let m : Matrix<T> = Matrix::from_slice_cw(values.len(), 1, values);
m
}
#[doc="Returns a column vector with entries from an iterator.
"]
#[inline]
pub fn col_vector_from_iter<T:CommutativeMonoidAddPartial, A : Iterator<Item=T>>(
values: A,
len : usize)-> Matrix<T> {
let m : Matrix<T> = Matrix::from_iter_rw(len, 1, values);
m
}
#[doc="Returns a 8-bit unsigned int column vector with entries from a slice.
"]
#[inline]
pub fn vector_u8(values: &[u8])->MatrixU8 {
col_vector(values)
}
#[doc="Returns a 16-bit unsigned int column vector with entries from a slice.
"]
#[inline]
pub fn vector_u16(values: &[u16])->MatrixU16 {
col_vector(values)
}
#[doc="Returns a 32-bit unsigned int column vector with entries from a slice.
"]
#[inline]
pub fn vector_u32(values: &[u32])->MatrixU32 {
col_vector(values)
}
#[doc="Returns a 64-bit unsigned int column vector with entries from a slice.
"]
#[inline]
pub fn vector_u64(values: &[u64])->MatrixU64 {
col_vector(values)
}
#[doc="Returns an 8-bit signed int column vector with entries from a slice.
"]
#[inline]
pub fn vector_i8(values: &[i8])->MatrixI8 {
col_vector(values)
}
#[doc="Returns a 16-bit signed int column vector with entries from a slice.
"]
#[inline]
pub fn vector_i16(values: &[i16])->MatrixI16 {
col_vector(values)
}
#[doc="Returns a 32-bit signed int column vector with entries from a slice.
"]
#[inline]
pub fn vector_i32(values: &[i32])->MatrixI32 {
col_vector(values)
}
#[doc="Returns a 64-bit signed int column vector with entries from a slice.
"]
#[inline]
pub fn vector_i64(values: &[i64])->MatrixI64 {
col_vector(values)
}
#[doc="Returns a 32-bit float column vector with entries from a slice.
"]
#[inline]
pub fn vector_f32(values: &[f32])->MatrixF32 {
col_vector(values)
}
#[doc="Returns a 64-bit float column vector with entries from a slice.
"]
#[inline]
pub fn vector_f64(values: &[f64])->MatrixF64 {
col_vector(values)
}
//pub fn vec_f64<T:Number+ToPrimitive>(values: &[T])->MatrixF64 {
// let n = values.len();
// let iter = values.iter().map(|&x| x.to_f64().unwrap());
// col_vector_from_iter(iter, n)
//}
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
//
// Elementary row operation matrices
//
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[doc="Returns elementary matrix which can
exchange rows i and j
on left multiplication.
"]
pub fn ero_switch<T:FieldPartial>(n : usize,
i : usize,
j : usize)-> Matrix<T> {
debug_assert! (i < n);
debug_assert! (j < n);
let mut m : Matrix<T> = Matrix::identity(n, n);
let z : T = Zero::zero();
let o : T = One::one();
m.set(i, i, z);
m.set(j, j, z);
m.set(i, j, o);
m.set(j, i, o);
m
}
#[doc="Returns elementary matrix which can scale
a particular row by a factor on left multiplication.
"]
pub fn ero_scale<T:FieldPartial>(n : usize,
r : usize,
scale : T)-> Matrix<T> {
let mut m : Matrix<T> = Matrix::identity(n, n);
m.set(r,r, scale);
m
}
#[doc="Returns elementary matrix which can scale
a particular row by a factor and add it to
another row
on left multiplication.
r_i = r_i + k * r_j
"]
pub fn ero_scale_add<T:FieldPartial>(n : usize,
i : usize,
j : usize,
scale : T)-> Matrix<T> {
let mut m : Matrix<T> = Matrix::identity(n, n);
m.set(i, j, scale);
m
}
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
//
// Unit tests follow
//
/////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////////////////
#[cfg(test)]
mod test{
use api::*;
#[test]
fn test_hadamard(){
let m = hadamard(1).unwrap();
assert_eq!(m.num_cells(), 1);
assert_eq!(m.get(0,0).unwrap(), 1.0);
let m = hadamard(2).unwrap();
assert_eq!(m.num_cells(), 4);
assert_eq!(m.get(0,0).unwrap(), 1.0);
assert_eq!(m.get(0,1).unwrap(), 1.0);
assert_eq!(m.get(1,0).unwrap(), 1.0);
assert_eq!(m.get(1,1).unwrap(), -1.0);
let m = hadamard(4).unwrap();
assert!(m.is_square());
}
#[test]
fn test_range_cw_functions(){
let m = from_range_cw_i64(4, 4, 0, 16);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i64);
}
let start = 0.0;
let stop = 16.0;
let m : MatrixF64 = from_range_cw(4, 4, start, stop);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f64);
}
let m = from_range_cw_f64(4, 4, start, stop);
for i in 0..16{
let c = i >> 2;
let r = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f64);
}
}
#[test]
fn test_range_rw_functions(){
let start = 0.0;
let stop = 16.0;
let m : MatrixF64 = from_range_rw(4, 4, start, stop);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f64);
}
let m = from_range_rw_i8(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i8);
}
let m = from_range_rw_i16(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i16);
}
let m = from_range_rw_i32(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i32);
}
let m = from_range_rw_i64(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as i64);
}
let m = from_range_rw_u8(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u8);
}
let m = from_range_rw_u16(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u16);
}
let m = from_range_rw_u32(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u32);
}
let m = from_range_rw_u64(4, 4, 0, 16);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as u64);
}
let m = from_range_rw_f64(4, 4, start, stop);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f64);
}
let m = from_range_rw_f32(4, 4, 0.0, 16.0);
for i in 0..16{
let r = i >> 2;
let c = i & 3;
assert_eq!(m.get(r, c).unwrap(), i as f32);
}
}
#[test]
fn test_matrix_type_functions(){
let m = matrix_cw_u8(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u8);
}
let m = matrix_cw_u16(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u16);
}
let m = matrix_cw_u32(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u32);
}
let m = matrix_cw_u64(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u64);
}
let m = matrix_cw_i8(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i8);
}
let m = matrix_cw_i16(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i16);
}
let m = matrix_cw_i32(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i32);
}
let m = matrix_cw_i64(2,2, &[1,2,3,4]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i64);
}
let m = matrix_cw_f64(2,2, &[1.0,2.0,3.0,4.0]);
for i in 0..4{
let c = i >> 1;
let r = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as f64);
}
// We will now test row wise construction functions.
let m = matrix_rw_u8(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u8);
}
let m = matrix_rw_u16(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u16);
}
let m = matrix_rw_u32(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u32);
}
let m = matrix_rw_u64(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as u64);
}
let m = matrix_rw_i8(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i8);
}
let m = matrix_rw_i16(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i16);
}
let m = matrix_rw_i32(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i32);
}
let m = matrix_rw_i64(2,2, &[1,2,3,4]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as i64);
}
let m = matrix_rw_f64(2,2, &[1.0,2.0,3.0,4.0]);
for i in 0..4{
let r = i >> 1;
let c = i & 1;
assert_eq!(m.get(r, c).unwrap(), (i + 1) as f64);
}
}
#[test]
fn test_vector_type_functions(){
let v = vector_u8(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as u8);
}
let v = vector_u16(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as u16);
}
let v = vector_u32(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as u32);
}
let v = vector_u64(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as u64);
}
let v = vector_i8(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as i8);
}
let v = vector_i16(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as i16);
}
let v = vector_i32(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as i32);
}
let v = vector_i64(&[1,2,3,4]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as i64);
}
let v = vector_f32(&[1.,2.,3.,4.]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as f32);
}
let v = vector_f64(&[1.,2.,3.,4.]);
assert!(v.is_vector());
assert!(v.is_col());
for i in 0..4{
assert_eq!(v.get(i, 0).unwrap(), (i + 1) as f64);
}
}
#[test]
fn test_ero_switch_scale(){
let eswitch : MatrixF64 = ero_switch(4, 1, 3);
let escale : MatrixF64 = ero_scale(4, 2, 2.0);
let mut m = matrix_rw_f64(4,4, &[0., 1., 2., 3.,
4., 5., 6., 7.,
8., 9., 10., 11.,
12., 13., 14., 15.]);
// Carry out transformation through multiplying
// elementary matrices
let m2 = &eswitch * &m;
let m3 = &escale * &m2;
// Do ERO operations directly.
m.ero_switch(1, 3);
m.ero_scale(2, 2.0);
println!("eswitch: {:?}", eswitch);
println!("escale: {:?}", escale);
println!("m2: {:?}", m2);
println!("m3: {:?}", m3);
assert_eq!(m3, m);
}
#[test]
fn test_ero_scale_add(){
let mut m = matrix_rw_f64(4,4, &[0., 1., 2., 3.,
4., 5., 6., 7.,
8., 9., 10., 11.,
12., 13., 14., 15.]);
let esa : MatrixF64 = ero_scale_add(4, 1, 2, 3.0);
println!("esa: {:?}", esa);
let m2 = &esa * &m;
println!("m2: {:?}", m2);
m.ero_scale_add(1, 2, 3.);
assert_eq!(m2, m);
}
}<|fim▁end|>
|
}
|
<|file_name|>os_release.rs<|end_file_name|><|fim▁begin|>use std::fs;
pub enum OsReleaseId {
Amazon,
CentOs,
Debian,
Ubuntu,
}
const OS_RELEASE_PATH: &str = "/etc/os-release";
impl OsReleaseId {
fn from_os_release_str(s: &str) -> Option<Self> {<|fim▁hole|> "centos" => Some(OsReleaseId::CentOs),
"debian" => Some(OsReleaseId::Debian),
"ubuntu" => Some(OsReleaseId::Ubuntu),
_ => None,
}
}
pub fn parse_os_release() -> Option<Self> {
fs::read_to_string(OS_RELEASE_PATH)
.ok()
.as_deref()
.and_then(Self::from_os_release_str)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_from_os_release() {
let actual =
OsReleaseId::from_os_release_str(include_str!("os-release-data/amazonlinux-2"));
assert!(matches!(actual, Some(OsReleaseId::Amazon)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/centos-7.8"));
assert!(matches!(actual, Some(OsReleaseId::CentOs)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/debian-8"));
assert!(matches!(actual, Some(OsReleaseId::Debian)));
let actual = OsReleaseId::from_os_release_str(include_str!("os-release-data/ubuntu-14.04"));
assert!(matches!(actual, Some(OsReleaseId::Ubuntu)));
}
}<|fim▁end|>
|
let id_line = s.lines().find(|l| l.starts_with("ID="))?;
let id = id_line.trim_start_matches("ID=").trim_matches('"');
match id {
"amzn" => Some(OsReleaseId::Amazon),
|
<|file_name|>xcode_emulation.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import copy
import gyp.common
import os
import os.path
import re
import shlex
import subprocess
import sys
import tempfile
from gyp.common import GypError
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
_sdk_root_cache = {}
# Populated lazily by GetExtraPlistItems(). Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_plist_cache = {}
# Populated lazily by GetIOSPostbuilds. Shared by all XcodeSettings, so
# cached at class-level for efficiency.
_codesigning_key_cache = {}
# Populated lazily by _XcodeVersion. Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_xcode_version_cache = ()
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
self._ConvertConditionalKeys(configname)
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _ConvertConditionalKeys(self, configname):
"""Converts or warns on conditional keys. Xcode supports conditional keys,
such as CODE_SIGN_IDENTITY[sdk=iphoneos*]. This is a partial implementation
with some keys converted while the rest force a warning."""
settings = self.xcode_settings[configname]
conditional_keys = [key for key in settings if key.endswith(']')]
for key in conditional_keys:
# If you need more, speak up at http://crbug.com/122592
if key.endswith("[sdk=iphoneos*]"):
if configname.endswith("iphoneos"):
new_key = key.split("[")[0]
settings[new_key] = settings[key]
else:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
del settings[key]
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', [self._DefaultArch()])
def _GetStdout(self, cmdlist):
job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running %s' % (job.returncode, cmdlist[0]))
return out.rstrip('\n')
def _GetSdkVersionInfoItem(self, sdk, infoitem):
return self._GetStdout(['xcodebuild', '-version', '-sdk', sdk, infoitem])
def _SdkRoot(self, configname):
if configname is None:
configname = self.configname
return self.GetPerConfigSetting('SDKROOT', configname, default='')
def _SdkPath(self, configname=None):
sdk_root = self._SdkRoot(configname)
if sdk_root.startswith('/'):
return sdk_root
return self._XcodeSdkPath(sdk_root)
def _XcodeSdkPath(self, sdk_root):
if sdk_root not in XcodeSettings._sdk_path_cache:
sdk_path = self._GetSdkVersionInfoItem(sdk_root, 'Path')
XcodeSettings._sdk_path_cache[sdk_root] = sdk_path
if sdk_root:
XcodeSettings._sdk_root_cache[sdk_path] = sdk_root
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings():
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def _AddObjectiveCMissingPropertySynthesisFlags(self, flags):
if self._Test('CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS',
'YES', default='NO'):
flags.append('-Wobjc-missing-property-synthesis')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
self._AddObjectiveCMissingPropertySynthesisFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', [self._DefaultArch()])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name and self.spec['type'] != 'loadable_module':
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerConfigSetting(self, setting, configname, default=None):
if configname in self.xcode_settings:
return self.xcode_settings[configname].get(setting, default)
else:
return self.GetPerTargetSetting(setting, default)
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
is_first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if is_first_pass:
result = self.xcode_settings[configname].get(setting, None)
is_first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, self.spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self.spec['type'] == 'loadable_module' and self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def _GetTargetPostbuilds(self, configname, output, output_binary,
quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _GetIOSPostbuilds(self, configname, output_binary):
"""Return a shell command to codesign the iOS output binary so it can
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and self.spec['type'] == "executable"):
return []
settings = self.xcode_settings[configname]
key = self._GetIOSCodeSignIdentityKey(settings)
if not key:
return []
# Warn for any unimplemented signing xcode keys.
unimpl = ['OTHER_CODE_SIGN_FLAGS']
unimpl = set(unimpl) & set(self.xcode_settings[configname].keys())
if unimpl:
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
def _GetIOSCodeSignIdentityKey(self, settings):
identity = settings.get('CODE_SIGN_IDENTITY')
if not identity:
return None
if identity not in XcodeSettings._codesigning_key_cache:
output = subprocess.check_output(
['security', 'find-identity', '-p', 'codesigning', '-v'])
for line in output.splitlines():
if identity in line:
fingerprint = line.split()[1]
cache = XcodeSettings._codesigning_key_cache
assert identity not in cache or fingerprint == cache[identity], (
"Multiple codesigning fingerprints for identity: %s" % identity)
XcodeSettings._codesigning_key_cache[identity] = fingerprint
return XcodeSettings._codesigning_key_cache.get(identity, '')
def AddImplicitPostbuilds(self, configname, output, output_binary,
postbuilds=[], quiet=False):
"""Returns a list of shell commands that should run before and after
|postbuilds|."""
assert output_binary is not None
pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
post = self._GetIOSPostbuilds(configname, output_binary)
return pre + postbuilds + post
def _AdjustLibrary(self, library, config_name=None):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath(config_name))
def AdjustLibraries(self, libraries, config_name=None):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [self._AdjustLibrary(library, config_name)
for library in libraries]
return libraries
def _BuildMachineOSBuild(self):
return self._GetStdout(['sw_vers', '-buildVersion'])
def _XcodeVersion(self):
# `xcodebuild -version` output looks like
# Xcode 4.6.3
# Build version 4H1503
# or like
# Xcode 3.2.6
# Component versions: DevToolsCore-1809.0; DevToolsSupport-1806.0
# BuildVersion: 10M2518
# Convert that to '0463', '4H1503'.
if len(XcodeSettings._xcode_version_cache) == 0:
version_list = self._GetStdout(['xcodebuild', '-version']).splitlines()
version = version_list[0]
build = version_list[-1]
# Be careful to convert "4.2" to "0420":
version = version.split()[-1].replace('.', '')
version = (version + '0' * (3 - len(version))).zfill(4)
build = build.split()[-1]
XcodeSettings._xcode_version_cache = (version, build)
return XcodeSettings._xcode_version_cache
def _XcodeIOSDeviceFamily(self, configname):
family = self.xcode_settings[configname].get('TARGETED_DEVICE_FAMILY', '1')
return [int(x) for x in family.split(',')]
def GetExtraPlistItems(self, configname=None):
"""Returns a dictionary with extra items to insert into Info.plist."""
if configname not in XcodeSettings._plist_cache:
cache = {}
cache['BuildMachineOSBuild'] = self._BuildMachineOSBuild()
xcode, xcode_build = self._XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
cache['DTSDKName'] = sdk_root
if xcode >= '0430':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductBuildVersion')
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
cache['DTPlatformName'] = cache['DTSDKName']
if configname.endswith("iphoneos"):
cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
sdk_root, 'ProductVersion')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
# XcodeSettings.
items = dict(XcodeSettings._plist_cache[configname])
if self.isIOS:
items['UIDeviceFamily'] = self._XcodeIOSDeviceFamily(configname)
return items
def _DefaultSdkRoot(self):
"""Returns the default SDKROOT to use.
Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
project, then the environment variable was empty. Starting with this
version, Xcode uses the name of the newest SDK installed.
"""
if self._XcodeVersion() < '0500':
return ''
default_sdk_path = self._XcodeSdkPath('')
default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
if default_sdk_root:
return default_sdk_root
all_sdks = self._GetStdout(['xcodebuild', '-showsdks'])
for line in all_sdks.splitlines():
items = line.split()
if len(items) >= 3 and items[-2] == '-sdk':
sdk_root = items[-1]
sdk_path = self._XcodeSdkPath(sdk_root)
if sdk_path == default_sdk_path:
return sdk_root
return ''
def _DefaultArch(self):
# For Mac projects, Xcode changed the default value used when ARCHS is not
# set from "i386" to "x86_64".
#
# For iOS projects, if ARCHS is unset, it defaults to "armv7 armv7s" when
# building for a device, and the simulator binaries are always build for
# "i386".
#
# For new projects, ARCHS is set to $(ARCHS_STANDARD_INCLUDING_64_BIT),
# which correspond to "armv7 armv7s arm64", and when building the simulator
# the architecture is either "i386" or "x86_64" depending on the simulated
# device (respectively 32-bit or 64-bit device).
#
# Since the value returned by this function is only used when ARCHS is not
# set, then on iOS we return "i386", as the default xcode project generator
# does not set ARCHS if it is not set in the .gyp file.
if self.isIOS:
return 'i386'
version, build = self._XcodeVersion()
if version >= '0500':
return 'x86_64'
return 'i386'
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = os.path.splitext(output)[0] + '.nib'
# Compiled storyboard files are referred to by .storyboardc.
if output.endswith('.storyboard'):
output = os.path.splitext(output)[0] + '.storyboardc'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,<|fim▁hole|>
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
def _HasIOSTarget(targets):
"""Returns true if any target contains the iOS specific key
IPHONEOS_DEPLOYMENT_TARGET."""
for target_dict in targets.values():
for config in target_dict['configurations'].values():
if config.get('xcode_settings', {}).get('IPHONEOS_DEPLOYMENT_TARGET'):
return True
return False
def _AddIOSDeviceConfigurations(targets):
"""Clone all targets and append -iphoneos to the name. Configure these targets
to build for iOS devices."""
for target_dict in targets.values():
for config_name in target_dict['configurations'].keys():
config = target_dict['configurations'][config_name]
new_config_name = config_name + '-iphoneos'
new_config_dict = copy.deepcopy(config)
if target_dict['toolset'] == 'target':
new_config_dict['xcode_settings']['ARCHS'] = ['armv7']
new_config_dict['xcode_settings']['SDKROOT'] = 'iphoneos'
target_dict['configurations'][new_config_name] = new_config_dict
return targets
def CloneConfigurationForDeviceAndEmulator(target_dicts):
"""If |target_dicts| contains any iOS targets, automatically create -iphoneos
targets for iOS device builds."""
if _HasIOSTarget(target_dicts):
return _AddIOSDeviceConfigurations(target_dicts)
return target_dicts<|fim▁end|>
|
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
|
<|file_name|>viewhelpers.py<|end_file_name|><|fim▁begin|>from django.core.urlresolvers import reverse
import django.http
import django.utils.simplejson as json
import functools<|fim▁hole|>def make_url(request, reversible):
return request.build_absolute_uri(reverse(reversible))
def json_output(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
output = func(*args, **kwargs)
return django.http.HttpResponse(json.dumps(output),
content_type="application/json")
return wrapper<|fim▁end|>
| |
<|file_name|>follow.js<|end_file_name|><|fim▁begin|>/* This is the basic linkfollowing script.
* Its pretty stable, and you configure which keys to use for hinting
*
* TODO: Some pages mess around a lot with the zIndex which
* lets some hints in the background.
* TODO: Some positions are not calculated correctly (mostly
* because of uber-fancy-designed-webpages. Basic HTML and CSS
* works good
* TODO: Still some links can't be followed/unexpected things
* happen. Blame some freaky webdesigners ;)
*/
//Just some shortcuts and globals
var uzblid = 'uzbl_link_hint';
var uzbldivid = uzblid + '_div_container';
var doc = document;
var win = window;
var links = document.links;
var forms = document.forms;
//Make onlick-links "clickable"
try {
HTMLElement.prototype.click = function() {
if (typeof this.onclick == 'function') {
this.onclick({
type: 'click'
});
}
};
} catch(e) {}
//Catch the ESC keypress to stop linkfollowing
function keyPressHandler(e) {
var kC = window.event ? event.keyCode: e.keyCode;
var Esc = window.event ? 27 : e.DOM_VK_ESCAPE;
if (kC == Esc) {
removeAllHints();
}
}
//Calculate element position to draw the hint
//Pretty accurate but on fails in some very fancy cases
function elementPosition(el) {
var up = el.offsetTop;
var left = el.offsetLeft;
var width = el.offsetWidth;
var height = el.offsetHeight;
while (el.offsetParent) {
el = el.offsetParent;
up += el.offsetTop;
left += el.offsetLeft;
}
return [up, left, width, height];
}
//Calculate if an element is visible
function isVisible(el) {
if (el == doc) {
return true;
}
if (!el) {
return false;
}
if (!el.parentNode) {
return false;
}
if (el.style) {
if (el.style.display == 'none') {
return false;
}
if (el.style.visibility == 'hidden') {
return false;
}
}
return isVisible(el.parentNode);
}
//Calculate if an element is on the viewport.
function elementInViewport(el) {
offset = elementPosition(el);
var up = offset[0];
var left = offset[1];
var width = offset[2];
var height = offset[3];
return up < window.pageYOffset + window.innerHeight && left < window.pageXOffset + window.innerWidth && (up + height) > window.pageYOffset && (left + width) > window.pageXOffset;
}
//Removes all hints/leftovers that might be generated
//by this script.
function removeAllHints() {
var elements = doc.getElementById(uzbldivid);
if (elements) {
elements.parentNode.removeChild(elements);
}
}
//Generate a hint for an element with the given label
//Here you can play around with the style of the hints!
function generateHint(el, label) {
var pos = elementPosition(el);
var hint = doc.createElement('div');
hint.setAttribute('name', uzblid);
hint.innerText = label;
hint.style.display = 'inline';
hint.style.backgroundColor = '#B9FF00';
hint.style.border = '2px solid #4A6600';
hint.style.color = 'black';
hint.style.fontSize = '9px';
hint.style.fontWeight = 'bold';
hint.style.lineHeight = '9px';
hint.style.margin = '0px';
hint.style.width = 'auto'; // fix broken rendering on w3schools.com
hint.style.padding = '1px';
hint.style.position = 'absolute';
hint.style.zIndex = '1000';
// hint.style.textTransform = 'uppercase';
hint.style.left = pos[1] + 'px';
hint.style.top = pos[0] + 'px';
// var img = el.getElementsByTagName('img');
// if (img.length > 0) {
// hint.style.top = pos[1] + img[0].height / 2 - 6 + 'px';
// }
hint.style.textDecoration = 'none';
// hint.style.webkitBorderRadius = '6px'; // slow
// Play around with this, pretty funny things to do :)
// hint.style.webkitTransform = 'scale(1) rotate(0deg) translate(-6px,-5px)';
return hint;
}
//Here we choose what to do with an element if we
//want to "follow" it. On form elements we "select"
//or pass the focus, on links we try to perform a click,
//but at least set the href of the link. (needs some improvements)
function clickElem(item) {
removeAllHints();
if (item) {
var name = item.tagName;
if (name == 'A') {
item.click();
window.location = item.href;
} else if (name == 'INPUT') {
var type = item.getAttribute('type').toUpperCase();
if (type == 'TEXT' || type == 'FILE' || type == 'PASSWORD') {
item.focus();
item.select();
} else {
item.click();
}
} else if (name == 'TEXTAREA' || name == 'SELECT') {
item.focus();
item.select();
} else {
item.click();
window.location = item.href;
}
}
}
//Returns a list of all links (in this version
//just the elements itself, but in other versions, we
//add the label here.
function addLinks() {
res = [[], []];
for (var l = 0; l < links.length; l++) {
var li = links[l];
if (isVisible(li) && elementInViewport(li)) {
res[0].push(li);
}
}
return res;
}
//Same as above, just for the form elements
function addFormElems() {
res = [[], []];
for (var f = 0; f < forms.length; f++) {
for (var e = 0; e < forms[f].elements.length; e++) {
var el = forms[f].elements[e];
if (el && ['INPUT', 'TEXTAREA', 'SELECT'].indexOf(el.tagName) + 1 && isVisible(el) && elementInViewport(el)) {
res[0].push(el);
}
}
}
return res;
}
//Draw all hints for all elements passed. "len" is for
//the number of chars we should use to avoid collisions
function reDrawHints(elems, chars) {
removeAllHints();
var hintdiv = doc.createElement('div');
hintdiv.setAttribute('id', uzbldivid);
for (var i = 0; i < elems[0].length; i++) {
if (elems[0][i]) {
var label = elems[1][i].substring(chars);
var h = generateHint(elems[0][i], label);
hintdiv.appendChild(h);
}
}
if (document.body) {
document.body.appendChild(hintdiv);
}
}
// pass: number of keys
// returns: key length
function labelLength(n) {
var oldn = n;
var keylen = 0;
if(n < 2) {
return 1;
}
n -= 1; // our highest key will be n-1
while(n) {
keylen += 1;
n = Math.floor(n / charset.length);
}
return keylen;
}
// pass: number
// returns: label
function intToLabel(n) {
var label = '';
do {
label = charset.charAt(n % charset.length) + label;
n = Math.floor(n / charset.length);
} while(n);
return label;
}
// pass: label
// returns: number
function labelToInt(label) {
var n = 0;
var i;
for(i = 0; i < label.length; ++i) {
n *= charset.length;
n += charset.indexOf(label[i]);
}
return n;
}
//Put it all together
function followLinks(follow) {
// if(follow.charAt(0) == 'l') {
// follow = follow.substr(1);
// charset = 'thsnlrcgfdbmwvz-/';
// }
var s = follow.split('');
var linknr = labelToInt(follow);
if (document.body) document.body.setAttribute('onkeyup', 'keyPressHandler(event)');
var linkelems = addLinks();
var formelems = addFormElems();
var elems = [linkelems[0].concat(formelems[0]), linkelems[1].concat(formelems[1])];
var len = labelLength(elems[0].length);
var oldDiv = doc.getElementById(uzbldivid);
var leftover = [[], []];
if (s.length == len && linknr < elems[0].length && linknr >= 0) {
clickElem(elems[0][linknr]);<|fim▁hole|> var b = true;
var label = intToLabel(j);
var n = label.length;
for (n; n < len; n++) {
label = charset.charAt(0) + label;
}
for (var k = 0; k < s.length; k++) {
b = b && label.charAt(k) == s[k];
}
if (b) {
leftover[0].push(elems[0][j]);
leftover[1].push(label);
}
}
reDrawHints(leftover, s.length);
}
}
//Parse input: first argument is follow keys, second is user input.
var args = '%s'.split(' ');
var charset = args[0];
followLinks(args[1]);<|fim▁end|>
|
} else {
for (var j = 0; j < elems[0].length; j++) {
|
<|file_name|>test_index_handler.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
<|fim▁hole|>import sys
sys.path.append('.')
from tornado.testing import AsyncHTTPSTestCase
from application import APP
class TestSomeHandler(AsyncHTTPSTestCase):
'''
Test
'''
def get_app(self):
'''
Test
'''
return APP
def test_index(self):
'''
Test index.
'''
response = self.fetch('/')
self.assertEqual(response.code, 200)<|fim▁end|>
|
'''
Test
'''
|
<|file_name|>Solution4.java<|end_file_name|><|fim▁begin|>package com.hyh.arithmetic.skills;
import android.annotation.SuppressLint;
import java.util.ArrayList;
import java.util.List;
/**
* 规划了一份需求的技能清单 req_skills,并打算从备选人员名单 people 中选出些人组成一个「必要团队」
* ( 编号为 i 的备选人员 people[i] 含有一份该备选人员掌握的技能列表)。
* 所谓「必要团队」,就是在这个团队中,对于所需求的技能列表 req_skills 中列出的每项技能,团队中至少有一名成员已经掌握。
* 我们可以用每个人的编号来表示团队中的成员:例如,团队 team = [0, 1, 3] 表示掌握技能分别为 people[0],people[1],和 people[3] 的备选人员。
* 请你返回 任一 规模最小的必要团队,团队成员用人员编号表示。你可以按任意顺序返回答案,本题保证答案存在。
* <p>
* 示例 1:
* 输入:req_skills = ["java","nodejs","reactjs"],
* people = [["java"],["nodejs"],["nodejs","reactjs"]]
* 输出:[0,2]
* <p>
* 示例 2:
* 输入:req_skills = ["algorithms","math","java","reactjs","csharp","aws"],
* people = [["algorithms","math","java"],["algorithms","math","reactjs"],["java","csharp","aws"],["reactjs","csharp"],["csharp","math"],["aws","java"]]
* 输出:[1,2]
* <p>
* <p>
* 1 <= req_skills.length <= 16
* 1 <= people.length <= 60
* 1 <= people[i].length, req_skills[i].length, people[i][j].length <= 16
* req_skills 和 people[i] 中的元素分别各不相同
* req_skills[i][j], people[i][j][k] 都由小写英文字母组成
* 本题保证「必要团队」一定存在
*/
public class Solution4 {
@SuppressLint("UseSparseArrays")
public int[] smallestSufficientTeam(String[] req_skills, List<List<String>> people) {
int req_skills_code = (int) (Math.pow(2, req_skills.length) - 1);
List<Integer> people_code = new ArrayList<>();
for (int i = 0; i < people.size(); i++) {
List<String> person_skills = people.get(i);
int person_code = 0;
for (int j = 0; j < person_skills.size(); j++) {
String skill = person_skills.get(j);
int index = indexOf(req_skills, skill);
if (index >= 0) {<|fim▁hole|> people_code.add(person_code);
}
for (int i = 0; i < people_code.size(); i++) {
Integer i_person_code = people_code.get(i);
if (i_person_code == 0) continue;
if (i == people_code.size() - 1) break;
for (int j = i + 1; j < people_code.size(); j++) {
Integer j_person_code = people_code.get(j);
if ((i_person_code | j_person_code) == j_person_code) {
people_code.set(i, 0);
} else if ((i_person_code | j_person_code) == i_person_code) {
people_code.set(j, 0);
}
}
}
Object[] preResult = new Object[req_skills.length];
Object[] result = new Object[req_skills.length];
/*Integer person_code = people_code.get(0);
for (int i = 0; i < req_skills.length; i++) {
int skills_code = (int) (Math.pow(2, i + 1) - 1);
if ((person_code | skills_code) == person_code) {
preResult[i] = new int[]{0};
} else {
break;
}
}*/
int person_code = 0;
for (int i = 0; i < people_code.size(); i++) {
person_code |= people_code.get(i);
for (int j = 0; j < req_skills.length; j++) {
int skills_code = (int) (Math.pow(2, j + 1) - 1);
if ((person_code | skills_code) == person_code) {
//result[i] = new int[]{0};
} else {
}
}
}
/*for (int i = 0; i < req_skills.length; i++) {
int skills_code = (int) (Math.pow(2, i + 1) - 1);
int people_code_temp = 0;
for (int j = 0; j < people_code.size(); j++) {
people_code_temp |= people_code.get(j);
if () {
}
}
preResult = result;
}*/
return null;
}
private int indexOf(String[] req_skills, String skill) {
for (int index = 0; index < req_skills.length; index++) {
String req_skill = req_skills[index];
if (req_skill.equals(skill)) return index;
}
return -1;
}
}<|fim▁end|>
|
person_code += Math.pow(2, index);
}
}
|
<|file_name|>JBossCliXmlValidationTestCase.java<|end_file_name|><|fim▁begin|>/*
* JBoss, Home of Professional Open Source.
* Copyright 2014, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.test.integration.management.cli;
import java.io.File;
import java.net.URL;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.jboss.as.test.shared.TestSuiteEnvironment;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import org.junit.Test;
import org.w3c.dom.Document;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
<|fim▁hole|>/**
*
* @author Dominik Pospisil <[email protected]>
*/
public class JBossCliXmlValidationTestCase {
@Test
public void validateJBossCliXmlTestCase() throws Exception {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
DocumentBuilder parser = factory.newDocumentBuilder();
final String jbossDist = TestSuiteEnvironment.getSystemProperty("jboss.dist");
Document document = parser.parse(new File(jbossDist, "bin/jboss-cli.xml"));
SchemaFactory schemaFactory = SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
schemaFactory.setErrorHandler(new ErrorHandlerImpl());
//schemaFactory.setResourceResolver(new XMLResourceResolver());
Schema schema = schemaFactory.newSchema(resourceToURL("schema/wildfly-cli_3_4.xsd"));
Validator validator = schema.newValidator();
validator.validate(new DOMSource(document));
}
protected static final class ErrorHandlerImpl implements ErrorHandler {
@Override
public void error(SAXParseException e) throws SAXException {
fail(formatMessage(e));
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
fail(formatMessage(e));
}
@Override
public void warning(SAXParseException e) throws SAXException {
System.out.println(formatMessage(e));
}
private String formatMessage(SAXParseException e) {
StringBuffer sb = new StringBuffer();
sb.append(e.getLineNumber()).append(':').append(e.getColumnNumber());
if (e.getPublicId() != null)
sb.append(" publicId='").append(e.getPublicId()).append('\'');
if (e.getSystemId() != null)
sb.append(" systemId='").append(e.getSystemId()).append('\'');
sb.append(' ').append(e.getLocalizedMessage());
sb.append(" a possible cause may be that a subsystem is not using the most up to date schema.");
return sb.toString();
}
}
private URL resourceToURL(final String name) {
final ClassLoader classLoader = getClass().getClassLoader();
final URL resource = classLoader.getResource(name);
assertNotNull("Can't locate resource " + name + " on " + classLoader, resource);
return resource;
}
}<|fim▁end|>
| |
<|file_name|>submit_batch.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# coding: utf-8
import BRT
from collections import namedtuple
import configparser
import os
import logging
from os.path import expanduser
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--submit', help='Execute the submission', action='store_true')
parser.add_argument('-q', '--quiet', help='Jast do the job. Stay quiet', action='store_true')
parser.add_argument('-v', '--verbose', help='Print more status info', action='store_true')
parser.add_argument('-d', '--debug', help='Print debugging info', action='store_true')
args = parser.parse_args()
if args.verbose :
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
if args.debug :
logging.basicConfig(level=os.environ.get("LOGLEVEL", "DEBUG"))
log = logging.getLogger(__name__)
VStar=namedtuple('VStar', 'name comm expos')
config = configparser.ConfigParser()
config.read(expanduser('~/.config/telescope.ini'))
log.info('Log in to telescope.org ...')
brt=BRT.Telescope(config['telescope.org']['user'], config['telescope.org']['password'])
BRT.astrometryAPIkey=config['astrometry.net']['apikey']
def qprint(*ar, **kwar):
if not args.quiet:
print(*ar, **kwar)
def vprint(*ar, **kwar):
if args.verbose and not args.quiet:
print(*ar, **kwar)
obslst=[
VStar('S Ori', comm='Mira AAVSO', expos=120),
VStar('CH Cyg', comm='Symbiotic AAVSO', expos=60),
VStar('SS Cyg', comm='Mira', expos=180),
VStar('EU Cyg', comm='Mira', expos=180),
VStar('IP Cyg', comm='Mira', expos=180),
VStar('V686 Cyg', comm='Mira', expos=180),
#VStar('AS Lac', comm='Mira', expos=120),
VStar('BI Her', comm='Mira', expos=180),
VStar('DX Vul', comm='Mira', expos=180),
VStar('DQ Vul', comm='Mira', expos=180),
VStar('EQ Lyr', comm='Mira', expos=180),
VStar('LX Cyg', comm='AAVSO', expos=180),
]
log.info('Getting observing queue ...')
reqlst=brt.get_user_requests(sort='completion')
q=[r for r in reqlst if int(r['status'])<8]
qn=[r['objectname'] for r in q]
missing = [vs for vs in obslst if vs.name not in qn]
<|fim▁hole|>if missing :
if args.submit:
qprint('Submitting missing jobs:')
else:
qprint('Dry run. Add -s to the command line to do actual submissions.')
for vs in missing:
qprint(f'{vs.name.split()[0]:>8} {vs.name.split()[1]} exp:{vs.expos:3.1f}s {vs.comm}', end='')
if args.submit :
r, i = brt.submitVarStar(vs.name, expos=vs.expos, comm=vs.comm)
if r :
qprint(f' => id: {i}', end='')
else :
qprint(f' Failure:{i}', end='')
qprint()
else :
qprint('No missing jobs. Nothing to do!')
log.info('Done.')<|fim▁end|>
| |
<|file_name|>policy.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common Policy Engine Implementation
Policies can be expressed in one of two forms: A list of lists, or a
string written in the new policy language.
In the list-of-lists representation, each check inside the innermost
list is combined as with an "and" conjunction--for that check to pass,
all the specified checks must pass. These innermost lists are then
combined as with an "or" conjunction. This is the original way of
expressing policies, but there now exists a new way: the policy
language.
In the policy language, each check is specified the same way as in the
list-of-lists representation: a simple "a:b" pair that is matched to
the correct code to perform that check. However, conjunction
operators are available, allowing for more expressiveness in crafting
policies.
As an example, take the following rule, expressed in the list-of-lists
representation::
[["role:admin"], ["project_id:%(project_id)s", "role:projectadmin"]]
In the policy language, this becomes::
role:admin or (project_id:%(project_id)s and role:projectadmin)
The policy language also has the "not" operator, allowing a richer
policy rule::
project_id:%(project_id)s and not role:dunce
It is possible to perform policy checks on the following user
attributes (obtained through the token): user_id, domain_id or
project_id::
domain_id:<some_value>
Attributes sent along with API calls can be used by the policy engine
(on the right side of the expression), by using the following syntax::
<some_value>:user.id
Contextual attributes of objects identified by their IDs are loaded
from the database. They are also available to the policy engine and
can be checked through the `target` keyword::
<some_value>:target.role.name
All these attributes (related to users, API calls, and context) can be
checked against each other or against constants, be it literals (True,
<a_number>) or strings.
Finally, two special policy checks should be mentioned; the policy
check "@" will always accept an access, and the policy check "!" will
always reject an access. (Note that if a rule is either the empty
list ("[]") or the empty string, this is equivalent to the "@" policy
check.) Of these, the "!" policy check is probably the most useful,
as it allows particular rules to be explicitly disabled.
"""
import abc
import ast
import copy
import os
import re
from oslo.config import cfg
from oslo.serialization import jsonutils
import six
import six.moves.urllib.parse as urlparse
import six.moves.urllib.request as urlrequest
from murano.openstack.common import fileutils
from murano.openstack.common._i18n import _, _LE, _LI
from murano.openstack.common import log as logging
policy_opts = [
cfg.StrOpt('policy_file',
default='policy.json',
help=_('The JSON file that defines policies.')),
cfg.StrOpt('policy_default_rule',
default='default',
help=_('Default rule. Enforced when a requested rule is not '
'found.')),
cfg.MultiStrOpt('policy_dirs',
default=['policy.d'],
help=_('Directories where policy configuration files are '
'stored. They can be relative to any directory '
'in the search path defined by the config_dir '
'option, or absolute paths. The file defined by '
'policy_file must exist for these directories to '
'be searched.')),
]
CONF = cfg.CONF
CONF.register_opts(policy_opts)
LOG = logging.getLogger(__name__)
_checks = {}
def list_opts():
"""Entry point for oslo.config-generator."""
return [(None, copy.deepcopy(policy_opts))]
class PolicyNotAuthorized(Exception):
def __init__(self, rule):
msg = _("Policy doesn't allow %s to be performed.") % rule
super(PolicyNotAuthorized, self).__init__(msg)
class Rules(dict):
"""A store for rules. Handles the default_rule setting directly."""
@classmethod
def load_json(cls, data, default_rule=None):
"""Allow loading of JSON rule data."""
# Suck in the JSON data and parse the rules
rules = dict((k, parse_rule(v)) for k, v in
jsonutils.loads(data).items())
return cls(rules, default_rule)
def __init__(self, rules=None, default_rule=None):
"""Initialize the Rules store."""
super(Rules, self).__init__(rules or {})
self.default_rule = default_rule
def __missing__(self, key):
"""Implements the default rule handling."""
if isinstance(self.default_rule, dict):
raise KeyError(key)
# If the default rule isn't actually defined, do something
# reasonably intelligent
if not self.default_rule:
raise KeyError(key)
if isinstance(self.default_rule, BaseCheck):
return self.default_rule
# We need to check this or we can get infinite recursion
if self.default_rule not in self:
raise KeyError(key)
elif isinstance(self.default_rule, six.string_types):
return self[self.default_rule]
def __str__(self):
"""Dumps a string representation of the rules."""
# Start by building the canonical strings for the rules
out_rules = {}
for key, value in self.items():
# Use empty string for singleton TrueCheck instances
if isinstance(value, TrueCheck):
out_rules[key] = ''
else:
out_rules[key] = str(value)
# Dump a pretty-printed JSON representation
return jsonutils.dumps(out_rules, indent=4)
class Enforcer(object):
"""Responsible for loading and enforcing rules.
:param policy_file: Custom policy file to use, if none is
specified, `CONF.policy_file` will be
used.
:param rules: Default dictionary / Rules to use. It will be
considered just in the first instantiation. If
`load_rules(True)`, `clear()` or `set_rules(True)`
is called this will be overwritten.
:param default_rule: Default rule to use, CONF.default_rule will
be used if none is specified.
:param use_conf: Whether to load rules from cache or config file.
:param overwrite: Whether to overwrite existing rules when reload rules
from config file.
"""
def __init__(self, policy_file=None, rules=None,
default_rule=None, use_conf=True, overwrite=True):
self.default_rule = default_rule or CONF.policy_default_rule
self.rules = Rules(rules, self.default_rule)
self.policy_path = None
self.policy_file = policy_file or CONF.policy_file
self.use_conf = use_conf
self.overwrite = overwrite
def set_rules(self, rules, overwrite=True, use_conf=False):
"""Create a new Rules object based on the provided dict of rules.
:param rules: New rules to use. It should be an instance of dict.
:param overwrite: Whether to overwrite current rules or update them
with the new rules.
:param use_conf: Whether to reload rules from cache or config file.
"""
if not isinstance(rules, dict):
raise TypeError(_("Rules must be an instance of dict or Rules, "
"got %s instead") % type(rules))
self.use_conf = use_conf
if overwrite:
self.rules = Rules(rules, self.default_rule)
else:
self.rules.update(rules)
def clear(self):
"""Clears Enforcer rules, policy's cache and policy's path."""
self.set_rules({})
fileutils.delete_cached_file(self.policy_path)
self.default_rule = None
self.policy_path = None
def load_rules(self, force_reload=False):
"""Loads policy_path's rules.
Policy file is cached and will be reloaded if modified.
:param force_reload: Whether to reload rules from config file.
"""
if force_reload:
self.use_conf = force_reload
if self.use_conf:
if not self.policy_path:
self.policy_path = self._get_policy_path(self.policy_file)
self._load_policy_file(self.policy_path, force_reload,
overwrite=self.overwrite)
for path in CONF.policy_dirs:
try:
path = self._get_policy_path(path)
except cfg.ConfigFilesNotFoundError:
LOG.info(_LI("Can not find policy directory: %s"), path)
continue
self._walk_through_policy_directory(path,
self._load_policy_file,
force_reload, False)
@staticmethod
def _walk_through_policy_directory(path, func, *args):
# We do not iterate over sub-directories.
policy_files = next(os.walk(path))[2]
policy_files.sort()
for policy_file in [p for p in policy_files if not p.startswith('.')]:
func(os.path.join(path, policy_file), *args)
def _load_policy_file(self, path, force_reload, overwrite=True):
reloaded, data = fileutils.read_cached_file(
path, force_reload=force_reload)
if reloaded or not self.rules or not overwrite:
rules = Rules.load_json(data, self.default_rule)
self.set_rules(rules, overwrite=overwrite, use_conf=True)
LOG.debug("Rules successfully reloaded")
def _get_policy_path(self, path):
"""Locate the policy json data file/path.
:param path: It's value can be a full path or related path. When
full path specified, this function just returns the full
path. When related path specified, this function will
search configuration directories to find one that exists.
:returns: The policy path
:raises: ConfigFilesNotFoundError if the file/path couldn't
be located.
"""
policy_path = CONF.find_file(path)
if policy_path:
return policy_path
raise cfg.ConfigFilesNotFoundError((path,))
def enforce(self, rule, target, creds, do_raise=False,
exc=None, *args, **kwargs):
"""Checks authorization of a rule against the target and credentials.
:param rule: A string or BaseCheck instance specifying the rule
to evaluate.
:param target: As much information about the object being operated
on as possible, as a dictionary.
:param creds: As much information about the user performing the
action as possible, as a dictionary.
:param do_raise: Whether to raise an exception or not if check
fails.
:param exc: Class of the exception to raise if the check fails.
Any remaining arguments passed to enforce() (both
positional and keyword arguments) will be passed to
the exception class. If not specified, PolicyNotAuthorized
will be used.
:return: Returns False if the policy does not allow the action and
exc is not provided; otherwise, returns a value that
evaluates to True. Note: for rules using the "case"
expression, this True value will be the specified string
from the expression.
"""
self.load_rules()
# Allow the rule to be a Check tree
if isinstance(rule, BaseCheck):
result = rule(target, creds, self)
elif not self.rules:
# No rules to reference means we're going to fail closed
result = False
else:
try:
# Evaluate the rule
result = self.rules[rule](target, creds, self)
except KeyError:
LOG.debug("Rule [%s] doesn't exist" % rule)
# If the rule doesn't exist, fail closed
result = False
# If it is False, raise the exception if requested
if do_raise and not result:
if exc:
raise exc(*args, **kwargs)
raise PolicyNotAuthorized(rule)
return result
@six.add_metaclass(abc.ABCMeta)
class BaseCheck(object):
"""Abstract base class for Check classes."""
@abc.abstractmethod
def __str__(self):
"""String representation of the Check tree rooted at this node."""
pass
@abc.abstractmethod
def __call__(self, target, cred, enforcer):
"""Triggers if instance of the class is called.
Performs the check. Returns False to reject the access or a
true value (not necessary True) to accept the access.
"""
pass
class FalseCheck(BaseCheck):
"""A policy check that always returns False (disallow)."""
def __str__(self):
"""Return a string representation of this check."""
return "!"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return False
class TrueCheck(BaseCheck):
"""A policy check that always returns True (allow)."""
def __str__(self):
"""Return a string representation of this check."""
return "@"
def __call__(self, target, cred, enforcer):
"""Check the policy."""
return True
class Check(BaseCheck):
"""A base class to allow for user-defined policy checks."""
def __init__(self, kind, match):
"""Initiates Check instance.
:param kind: The kind of the check, i.e., the field before the
':'.
:param match: The match of the check, i.e., the field after
the ':'.
"""
self.kind = kind
self.match = match
def __str__(self):
"""Return a string representation of this check."""
return "%s:%s" % (self.kind, self.match)
class NotCheck(BaseCheck):
"""Implements the "not" logical operator.
A policy check that inverts the result of another policy check.
"""
def __init__(self, rule):
"""Initialize the 'not' check.
:param rule: The rule to negate. Must be a Check.
"""
self.rule = rule
def __str__(self):
"""Return a string representation of this check."""
return "not %s" % self.rule
def __call__(self, target, cred, enforcer):
"""Check the policy.
Returns the logical inverse of the wrapped check.
"""
return not self.rule(target, cred, enforcer)
class AndCheck(BaseCheck):
"""Implements the "and" logical operator.
A policy check that requires that a list of other checks all return True.
"""
def __init__(self, rules):
"""Initialize the 'and' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' and '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that all rules accept in order to return True.
"""
for rule in self.rules:
if not rule(target, cred, enforcer):
return False
return True
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the AndCheck object for convenience.
"""
self.rules.append(rule)
return self
class OrCheck(BaseCheck):
"""Implements the "or" operator.
A policy check that requires that at least one of a list of other
checks returns True.
"""
def __init__(self, rules):
"""Initialize the 'or' check.
:param rules: A list of rules that will be tested.
"""
self.rules = rules
def __str__(self):
"""Return a string representation of this check."""
return "(%s)" % ' or '.join(str(r) for r in self.rules)
def __call__(self, target, cred, enforcer):
"""Check the policy.
Requires that at least one rule accept in order to return True.
"""
for rule in self.rules:
if rule(target, cred, enforcer):
return True
return False
def add_check(self, rule):
"""Adds rule to be tested.
Allows addition of another rule to the list of rules that will
be tested. Returns the OrCheck object for convenience.
"""
self.rules.append(rule)
return self
def _parse_check(rule):
"""Parse a single base check rule into an appropriate Check object."""
# Handle the special checks
if rule == '!':
return FalseCheck()
elif rule == '@':
return TrueCheck()
try:
kind, match = rule.split(':', 1)
except Exception:
LOG.exception(_LE("Failed to understand rule %s") % rule)
# If the rule is invalid, we'll fail closed
return FalseCheck()
# Find what implements the check
if kind in _checks:
return _checks[kind](kind, match)
elif None in _checks:
return _checks[None](kind, match)
else:
LOG.error(_LE("No handler for matches of kind %s") % kind)
return FalseCheck()
def _parse_list_rule(rule):
"""Translates the old list-of-lists syntax into a tree of Check objects.
Provided for backwards compatibility.
"""
# Empty rule defaults to True
if not rule:
return TrueCheck()
# Outer list is joined by "or"; inner list by "and"
or_list = []
for inner_rule in rule:
# Elide empty inner lists
if not inner_rule:
continue
# Handle bare strings
if isinstance(inner_rule, six.string_types):
inner_rule = [inner_rule]
# Parse the inner rules into Check objects
and_list = [_parse_check(r) for r in inner_rule]
# Append the appropriate check to the or_list
if len(and_list) == 1:
or_list.append(and_list[0])
else:
or_list.append(AndCheck(and_list))
# If we have only one check, omit the "or"
if not or_list:
return FalseCheck()
elif len(or_list) == 1:
return or_list[0]
return OrCheck(or_list)
# Used for tokenizing the policy language
_tokenize_re = re.compile(r'\s+')
def _parse_tokenize(rule):
"""Tokenizer for the policy language.
Most of the single-character tokens are specified in the
_tokenize_re; however, parentheses need to be handled specially,
because they can appear inside a check string. Thankfully, those
parentheses that appear inside a check string can never occur at
the very beginning or end ("%(variable)s" is the correct syntax).
"""
for tok in _tokenize_re.split(rule):
# Skip empty tokens
if not tok or tok.isspace():
continue
# Handle leading parens on the token
clean = tok.lstrip('(')
for i in range(len(tok) - len(clean)):
yield '(', '('
# If it was only parentheses, continue
if not clean:
continue
else:
tok = clean
# Handle trailing parens on the token
clean = tok.rstrip(')')
trail = len(tok) - len(clean)
# Yield the cleaned token
lowered = clean.lower()
if lowered in ('and', 'or', 'not'):
# Special tokens
yield lowered, clean
elif clean:
# Not a special token, but not composed solely of ')'
if len(tok) >= 2 and ((tok[0], tok[-1]) in
[('"', '"'), ("'", "'")]):
# It's a quoted string
yield 'string', tok[1:-1]
else:
yield 'check', _parse_check(clean)
# Yield the trailing parens
for i in range(trail):
yield ')', ')'
class ParseStateMeta(type):
"""Metaclass for the ParseState class.
Facilitates identifying reduction methods.
"""
def __new__(mcs, name, bases, cls_dict):
"""Create the class.
Injects the 'reducers' list, a list of tuples matching token sequences
to the names of the corresponding reduction methods.
"""
reducers = []
for key, value in cls_dict.items():
if not hasattr(value, 'reducers'):
continue
for reduction in value.reducers:
reducers.append((reduction, key))
cls_dict['reducers'] = reducers
return super(ParseStateMeta, mcs).__new__(mcs, name, bases, cls_dict)
def reducer(*tokens):
"""Decorator for reduction methods.
Arguments are a sequence of tokens, in order, which should trigger running
this reduction method.
"""
def decorator(func):
# Make sure we have a list of reducer sequences
if not hasattr(func, 'reducers'):
func.reducers = []
# Add the tokens to the list of reducer sequences
func.reducers.append(list(tokens))
return func
return decorator
@six.add_metaclass(ParseStateMeta)
class ParseState(object):
"""Implement the core of parsing the policy language.
Uses a greedy reduction algorithm to reduce a sequence of tokens into
a single terminal, the value of which will be the root of the Check tree.
Note: error reporting is rather lacking. The best we can get with
this parser formulation is an overall "parse failed" error.
Fortunately, the policy language is simple enough that this
shouldn't be that big a problem.
"""
def __init__(self):
"""Initialize the ParseState."""
self.tokens = []
self.values = []
def reduce(self):
"""Perform a greedy reduction of the token stream.
If a reducer method matches, it will be executed, then the
reduce() method will be called recursively to search for any more
possible reductions.
"""
for reduction, methname in self.reducers:
if (len(self.tokens) >= len(reduction) and
self.tokens[-len(reduction):] == reduction):
# Get the reduction method
meth = getattr(self, methname)
# Reduce the token stream
results = meth(*self.values[-len(reduction):])
# Update the tokens and values
self.tokens[-len(reduction):] = [r[0] for r in results]
self.values[-len(reduction):] = [r[1] for r in results]
# Check for any more reductions
return self.reduce()
def shift(self, tok, value):
"""Adds one more token to the state. Calls reduce()."""
self.tokens.append(tok)
self.values.append(value)
# Do a greedy reduce...
self.reduce()
@property
def result(self):
"""Obtain the final result of the parse.
Raises ValueError if the parse failed to reduce to a single result.
"""
if len(self.values) != 1:
raise ValueError("Could not parse rule")
return self.values[0]
@reducer('(', 'check', ')')
@reducer('(', 'and_expr', ')')
@reducer('(', 'or_expr', ')')
def _wrap_check(self, _p1, check, _p2):
"""Turn parenthesized expressions into a 'check' token."""
return [('check', check)]
@reducer('check', 'and', 'check')
def _make_and_expr(self, check1, _and, check2):
"""Create an 'and_expr'.
Join two checks by the 'and' operator.
"""
return [('and_expr', AndCheck([check1, check2]))]
@reducer('and_expr', 'and', 'check')
def _extend_and_expr(self, and_expr, _and, check):
"""Extend an 'and_expr' by adding one more check."""
return [('and_expr', and_expr.add_check(check))]
@reducer('check', 'or', 'check')
def _make_or_expr(self, check1, _or, check2):
"""Create an 'or_expr'.
Join two checks by the 'or' operator.
"""
return [('or_expr', OrCheck([check1, check2]))]
@reducer('or_expr', 'or', 'check')
def _extend_or_expr(self, or_expr, _or, check):
"""Extend an 'or_expr' by adding one more check."""
return [('or_expr', or_expr.add_check(check))]
@reducer('not', 'check')
def _make_not_expr(self, _not, check):
"""Invert the result of another check."""
return [('check', NotCheck(check))]
def _parse_text_rule(rule):
"""Parses policy to the tree.
Translates a policy written in the policy language into a tree of
Check objects.
"""
# Empty rule means always accept
if not rule:
return TrueCheck()
# Parse the token stream
state = ParseState()
for tok, value in _parse_tokenize(rule):
state.shift(tok, value)
try:
return state.result
except ValueError:
# Couldn't parse the rule
LOG.exception(_LE("Failed to understand rule %s") % rule)
# Fail closed
return FalseCheck()
def parse_rule(rule):
"""Parses a policy rule into a tree of Check objects."""
# If the rule is a string, it's in the policy language
if isinstance(rule, six.string_types):
return _parse_text_rule(rule)
return _parse_list_rule(rule)
def register(name, func=None):
"""Register a function or Check class as a policy check.
:param name: Gives the name of the check type, e.g., 'rule',
'role', etc. If name is None, a default check type
will be registered.
:param func: If given, provides the function or class to register.
If not given, returns a function taking one argument
to specify the function or class to register,
allowing use as a decorator.
"""
# Perform the actual decoration by registering the function or
# class. Returns the function or class for compliance with the
# decorator interface.
def decorator(func):
_checks[name] = func
return func
# If the function or class is given, do the registration
if func:
return decorator(func)
return decorator
<|fim▁hole|>
try:
return enforcer.rules[self.match](target, creds, enforcer)
except KeyError:
# We don't have any matching rule; fail closed
return False
@register("role")
class RoleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check that there is a matching role in the cred dict."""
return self.match.lower() in [x.lower() for x in creds['roles']]
@register('http')
class HttpCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check http: rules by calling to a remote server.
This example implementation simply verifies that the response
is exactly 'True'.
"""
url = ('http:' + self.match) % target
# Convert instances of object() in target temporarily to
# empty dict to avoid circular reference detection
# errors in jsonutils.dumps().
temp_target = copy.deepcopy(target)
for key in target.keys():
element = target.get(key)
if type(element) is object:
temp_target[key] = {}
data = {'target': jsonutils.dumps(temp_target),
'credentials': jsonutils.dumps(creds)}
post_data = urlparse.urlencode(data)
f = urlrequest.urlopen(url, post_data)
return f.read() == "True"
@register(None)
class GenericCheck(Check):
def __call__(self, target, creds, enforcer):
"""Check an individual match.
Matches look like:
tenant:%(tenant_id)s
role:compute:admin
True:%(user.enabled)s
'Member':%(role.name)s
"""
try:
match = self.match % target
except KeyError:
# While doing GenericCheck if key not
# present in Target return false
return False
try:
# Try to interpret self.kind as a literal
leftval = ast.literal_eval(self.kind)
except ValueError:
try:
kind_parts = self.kind.split('.')
leftval = creds
for kind_part in kind_parts:
leftval = leftval[kind_part]
except KeyError:
return False
return match == six.text_type(leftval)<|fim▁end|>
|
@register("rule")
class RuleCheck(Check):
def __call__(self, target, creds, enforcer):
"""Recursively checks credentials based on the defined rules."""
|
<|file_name|>names.py<|end_file_name|><|fim▁begin|>names = [ # NOQA
'Aan',
'Aalia',
'Aaliah',
'Aaliyah',
'Aaron',
'Aaryanna',
'Aavree',
'Abbie',
'Abbott',
'Abbra',
'Abby',
'Abe',
'Abel',
'Abelardo',
'Abeni',
'Abia',
'Abiba',
'Abie',
'Abigail',
'Abner',
'Abraham',
'Abram',
'Abrial',
'Abrianna',
'Abrienda',
'Abril',
'Abryl',
'Absolom',
'Abu',
'Acacia',
'Acadia',
'Ace',
'Achika',
'Acsah; achsah',
'Ada',
'Adabeel',
'Adah',
'Adair',
'Adalia',
'Adam',
'Adamina',
'Adamma',
'Adara',
'Addison',
'Ade',
'Adela',
'Adelaide',
'Adele',
'Adeline',
'Adelio',
'Adelle',
'Adem',
'Aden',
'Aderes',
'Adian',
'Adie',
'Adiel',
'Adil',
'Adila',
'Adina',
'Adir',
'Adita',
'Adkins',
'Adlai',
'Adler',
'Adli',
'Adolfo',
'Adolph',
'Adonai',
'Adonia',
'Adonijah',
'Adora',
'Adra',
'Adrian',
'Adriana',
'Adriano',
'Adriel',
'Adrienne',
'Adrina',
'Ady',
'Aelan',
'Aeyesha',
'Affrica',
'Afra',
'Afric',
'Africa',
'Afton',
'Agamemnon',
'Agatha',
'Aggie',
'Agnes',
'Ah cy',
'Ahava',
'Ai',
'Aida',
'Aidan',
'Aiko',
'Aileen',
'Ailis',
'Ailish',
'Ailo',
'Aimee',
'Aine',
'Ainsley',
'Aisha',
'Aisleigh',
'Aisling',
'Aislinn',
'Aiyan',
'Aizza',
'Aja',
'Ajani',
'Ajay',
'Ajel',
'Akeel',
'Akeem',
'Akili',
'Akira',
'Akoya',
'Akuji',
'Al',
'Alaina',
'Alair',
'Alake',
'Alan',
'Alana',
'Alanna',
'Alara',
'Alastair',
'Alaura',
'Alazne',
'Alban',
'Albany',
'Albert',
'Alberta',
'Alberto',
'Albin',
'Albina',
'Alda',
'Aldan',
'Alden',
'Alder',
'Aldina',
'Aldo',
'Aldon',
'Aldona',
'Alec',
'Aleda',
'Alejandra',
'Alejandro',
'Alem',
'Alena',
'Alesia',
'Alessa',
'Aleta',
'Aletha',
'Alethea',
'Aletta',
'Alex',
'Alexa',
'Alexander',
'Alexandra',
'Alexandria',
'Alexei',
'Alexia',
'Alexis',
'Alexsandra',
'Alfonso',
'Alfred',
'Algeron',
'Ali',
'Alia',
'Alice',
'Alicia',
'Alida',
'Alijah',
'Alika',
'Alima',
'Alina',
'Alisa',
'Alisha',
'Alison',
'Alissa',
'Alitza',
'Alivia',
'Aliya',
'Aliyah',
'Aliza',
'Alize',
'Alka',
'Allegra',
'Allen',
'Allene',
'Allie',
'Allison',
'Allyson',
'Alma',
'Almeda',
'Alohilani',
'Alonzo',
'Aloysius',
'Alphonse',
'Alsatia',
'Alta',
'Altagrace',
'Althea',
'Alva',
'Alvin',
'Alyanah',
'Alyn',
'Alyse & alysse',
'Alyson',
'Alyssa',
'Amadeus',
'Amador',
'Amalia',
'Amalie',
'Aman',
'Amana',
'Amanda',
'Amandla',
'Amara',
'Amaranta',
'Amarante',
'Amaranth',
'Amaris',
'Amaryllis',
'Amaya',
'Amber',
'Ambrose',
'Ambrosia',
'Ame',
'Amelia',
'Amena',
'America',
'Amerigo',
'Ami',
'Amia',
'Amie',
'Amiel',
'Amilynne',
'Amina',
'Amir',
'Amiri',
'Amity',
'Amma',
'Amorina',
'Amos',
'Amy',
'An',
'Ana',
'Anabelle',
'Anahi',
'Anais',
'Anaiya',
'Analiese',
'Analise',
'Anana',
'Anando',
'Anastasia',
'Anatola',
'Anatole',
'Ande',
'Andra',
'Andralyn',
'Andre',
'Andrea',
'Andreas',
'Andres',
'Andrew',
'Andy',
'Anecia',
'Aneesa',
'Anel',
'Anemone',
'Anevay',
'Angel',
'Angela',
'Angelica',
'Angelina',
'Angelo',
'Angie',
'Angus',
'Anh',
'Ani',
'Ania',
'Anibal',
'Anika',
'Anila',
'Anisa',
'Anita',
'Anitra',
'Anja',
'Anlon',
'Ann',
'Anna',
'Annakin',
'Annalise',
'Annamae',
'Annamika',
'Anne',
'Anneke',
'Annette',
'Annice',
'Annick',
'Annika (anika, aneka)',
'Annis',
'Annissa',
'Anniyyah',
'Annora',
'Annot',
'Ansley',
'Anthea',
'Anthony',
'Anthy',
'Antigone',
'Antionette',
'Antipholus',
'Antoine',
'Antoinette',
'Antonia',
'Antonie',
'Antonio',
'Antony',
'Anu',
'Anwar',
'Anya',
'Aoko',
'Aolani',
'Aowyn',
'Aphrodite',
'Apollo',
'Appollo',
'Apria',
'April',
'Aquila',
'Arabela',
'Arabella',
'Araceli',
'Aram',
'Aran',
'Aravis',
'Arch',
'Archibald',
'Archie',
'Ardith',
'Aren',
'Aretha',
'Argus',
'Ari',
'Aria',
'Ariana',
'Ariel',
'Ariella',
'Arielle',
'Arien',
'Aries',
'Arissa',
'Arista',
'Ariza',
'Arkadiy',
'Arland',
'Arlen',
'Arlene',
'Arlo',
'Arlynda',
'Armand',
'Armande',
'Armando',
'Armelle',
'Armetha',
'Armina',
'Armon',
'Arnaud',
'Arne',
'Arnie',
'Arnold',
'Aron',
'Aroq',
'Arpan',
'Art',
'Artemis',
'Arthur',
'Artie',
'Arty',
'Arvid',
'Arvin',
'Aryiah',
'Aryka',
'Asa',
'Asabi',
'Asalie',
'Asasia',
'Ash',
'Asha',
'Ashai',
'Ashby',
'Asher',
'Ashlea',
'Ashlee',
'Ashleigh',
'Ashley',
'Ashlie',
'Ashling',
'Ashlyn',
'Ashtin',
'Ashton',
'Ashtyn',
'Asia',
'Asis',
'Asli',
'Asnee',
'Aspen',
'Asta',
'Asthore',
'Astin',
'Astra',
'Astrid',
'Atalo',
'Athalia',
'Athena',
'Atira',
'Atlas',
'Atreyu',
'Atticus',
'Attylaka',
'Auberta',
'Aubrey',
'Aubrianna',
'Audi',
'Audra',
'Audrey',
'August',
'Augustin',
'Augustus',
'Auhna',
'Aulii',
'Aure',
'Aurelia',
'Aurora',
'Aurorette',
'Austin',
'Autumn',
'Ava',
'Avalie',
'Avalon',
'Avel',
'Aveline',
'Avery',
'Avi',
'Avianna',
'Avis',
'Avital',
'Aviv',
'Aviva',
'Avongara',
'Axel',
'Axelle',
'Aya',
'Ayame',
'Ayanna',
'Ayeka',
'Ayla',
'Aylieah',
'Aylira',
'Ayoka',
'Aysha',
'Azana',
'Aziza',
'Azize',
'Azra',
'Azriel',
'Azuka',
'Azura',
'Azza',
'Baba',
'Babette',
'Bagley',
'Bahari',
'Bailey',
'Baird',
'Bairn',
'Bakula',
'Ballard',
'Balthasar',
'Balu',
'Bambi',
'Banji',
'Barake',
'Barb',
'Barbara',
'Barbie',
'Barclay',
'Bari',
'Barke',
'Barnabas',
'Barnard',
'Barney',
'Barny',
'Barr',
'Barran',
'Barrett',
'Barretta',
'Barry',
'Bart',
'Barth',
'Bartholemew',
'Barto',
'Barton',
'Baruch',
'Bary',
'Bash',
'Basil',
'Basiliso',
'Bast',
'Bastien',
'Baxter',
'Bayard',
'Bayen',
'Baylee',
'Bayo',
'Bea',
'Beata',
'Beate',
'Beatrice',
'Beatriz',
'Beau',
'Beauregard',
'Bebe',
'Bebhin',
'Becca',
'Beck',
'Becka',
'Becky',
'Bel',
'Bela',
'Belay',
'Belden',
'Belen',
'Belinda',
'Belita',
'Bell',
'Bella',
'Belle',
'Bellini',
'Ben',
'Bena',
'Benard',
'Benedict & benedicta',
'Benen',
'Benita',
'Benjamin',
'Benjy',
'Bennett',
'Benny',
'Benson',
'Berdine',
'Berg',
'Berke',
'Bern',
'Bernadette',
'Bernadine',
'Bernard',
'Berne',
'Bernice',
'Bernie',
'Berny',
'Bert',
'Bertha',
'Bertille',
'Beryl',
'Bess',
'Bessie',
'Beth',
'Bethan',
'Bethany',
'Betsy',
'Bette',
'Bettina',
'Betty',
'Beulah',
'Bevan',
'Beverly',
'Bevis',
'Beyla',
'Biana',
'Bianca',
'Bibiane',
'Bidelia',
'Bikita',
'Bilen',
'Bill',
'Billy',
'Bin',
'Bina',
'Bing',
'Bingham',
'Birch',
'Bisbee',
'Bishop',
'Biton',
'Bjorn',
'Blade',
'Blaine',
'Blair',
'Blaise',
'Blake',
'Blanche',
'Blaze',
'Blenda',
'Blinda',
'Bliss',
'Blithe',
'Blodwyn',
'Blossom',
'Blum',
'Bluma',
'Bly',
'Blythe',
'Bo',
'Boaz',
'Bob',
'Bodee',
'Bona',
'Bonaventure',
'Bond',
'Bonita',
'Bonna',
'Bonnie',
'Bono',
'Boone',
'Boris',
'Botarea',
'Bowen',
'Bowie',
'Boyd',
'Bracha',
'Brad',
'Bradden',
'Braden',
'Bradford',
'Bradley',
'Brady',
'Braeden',
'Braima',
'Bran',
'Brand',
'Brandee',
'Branden',
'Brandi',
'Brandie',
'Brandon',
'Brandy',
'Branellan',
'Brant',
'Braxton',
'Brayden',
'Brazil',
'Breanna',
'Breckin',
'Brede',
'Bree',
'Brein',
'Brend',
'Brenda',
'Brendan',
'Brenden',
'Brendon',
'Brenna',
'Brennan',
'Brennon',
'Brent',
'Brett',
'Brewster',
'Brian',
'Briana',
'Brianna',
'Brianne',
'Briar',
'Brice',
'Brick',
'Bridget',
'Bridgit',
'Brie',
'Brielle',
'Brier',
'Brigham',
'Brighton',
'Brigit',
'Brigitte',
'Brilane',
'Brilliant',
'Brin',
'Brina',
'Brinkley',
'Brinly',
'Brit',
'Brita',
'Britain',
'Britannia',
'Britany',
'Britt',
'Britt-marie',
'Brittani',
'Britannia',
'Brittany',
'Brittnee & brittney',
'Brock',
'Brody',
'Bron',
'Brondyn',
'Brone',
'Bronson',
'Bronwen',
'Brooke',
'Brooklyn',
'Brooks',
'Bruce',
'Bruno',
'Bryan',
'Bryanne',
'Bryant',
'Bryce',
'Brygid',
'Brynn',
'Bryony',
'Bryton',
'Buck',
'Bud',
'Buddy',
'Buffi',
'Buffy',
'Buford',
'Bunny',
'Burdette',
'Burke',
'Burlak',
'Burt',
'Burton',
'Butterfly',
'Buzz',
'Byrd',
'Byron',
'Cade',
'Cadee',
'Caden',
'Cadence',
'Cady',
'Cael',
'Caelan',
'Caeley',
'Caesar',
'Cai',
'Cailean',
'Caimile',
'Cain',
'Caine',
'Caique',
'Cairbre',
'Cairo',
'Cais',
'Caitlin',
'Caitlyn',
'Cal',
'Cala',
'Calais',
'Calandra',
'Calantha',
'Calder',
'Cale',
'Caleah',
'Caleb',
'Caley',
'Calhoun',
'Calix',
'Calixte',
'Calla',
'Callia',
'Calliope',
'Callista',
'Callum',
'Calvin',
'Calvine',
'Calypso',
'Cam',
'Cambria',
'Camden',
'Camdyn',
'Cameron',
'Camilla',
'Camille',
'Camilo',
'Camlin',
'Cana',
'Canaan',
'Candace',
'Candice',
'Candida',
'Candide',
'Candie',
'Candy',
'Cannon',
'Capri',
'Caprice',
'Caquise',
'Cara',
'Caralee',
'Caresse',
'Carey',
'Carha',
'Cari',
'Carina',
'Carissa',
'Carl',
'Carla',
'Carleton',
'Carley',
'Carlie',
'Carlisle',
'Carlos',
'Carlota',
'Carlotta',
'Carlton',
'Carly',
'Carmel',
'Carmela',
'Carmelita',
'Carmen',
'Carmine',
'Carol',
'Carolena',
'Carolina',
'Caroline',
'Carolyn',
'Caron',
'Carra',
'Carr',
'Carrick',
'Carrie',
'Carrieann',
'Carson',
'Carsyn',
'Carter',
'Carver',
'Cary',
'Casandra',
'Casey',
'Cashlin',
'Casimir',
'Casondra',
'Caspar',
'Casper',
'Cassandra',
'Cassee',
'Cassia',
'Cassidy',
'Cassie',
'Cassius',
'Castel',
'Catalin',
'Catherine',
'Cathy',
'Catrin',
'Caven',
'Cayla',
'Ceana',
'Cearo',
'Cece',
'Cecil',
'Cecile',
'Cecilia',
'Cecily',
'Cedric',
'Celeste',
'Celestyn',
'Celia',
'Celina',
'Celine',
'Cera',
'Cerise',
'Cesar',
'Ceylan',
'Chad',
'Chaela',
'Chaeli',
'Chailyn',
'Chaim',
'Chakotay',
'Chalina',
'Chalsie',
'Chana',
'Chance',
'Chancellor',
'Chandler',
'Chandra',
'Chanel',
'Chanell',
'Chanelle',
'Chaney',
'Channing',
'Channon',
'Chantal',
'Chantel',
'Chaplin',
'Chardonnay',
'Charis',
'Charisse',
'Charity',
'Charla',
'Charlee',
'Charleigh',
'Charlene',
'Charles',
'Charlet',
'Charlin',
'Charlot',
'Charlotte',
'Charmaine',
'Charo',
'Chars',
'Charu',
'Chas',
'Chase',
'Chastity',
'Chauncey',
'Chava',
'Chavi',
'Chay',
'Chaya',
'Chaylse',
'Chayza',
'Cheche',
'Chelsa',
'Chelsea',
'Chelsey',
'Chelsi',
'Chelsia',
'Chen',
'Cheney',
'Chenoa',
'Cher',
'Cheri',
'Cherie',
'Cherish',
'Cherlin',
'Cherry',
'Cheryl',
'Chesna',
'Chester',
'Cheston',
'Chet',
'Cheyenne',
'Cheyne',
'Chezarina',
'Chhaya',
'Chia',
'Chick',
'Chico',
'Chika',
'Chill',
'Chilton',
'Chimelu',
'Chimon',
'China',
'Chip',
'Chipo',
'Chirag',
'Chloe',
'Chloris',
'Chris',
'Chrissy',
'Christa',
'Christena',
'Christian',
'Christiana',
'Christie',
'Christina',
'Christine',
'Christopher',
'Christy',
'Chuck',
'Chumani',
'Chun',
'Chyna',
'Chynna',
'Cian',
'Cianna',
'Ciara',
'Cicely',
'Cicero',
'Cicily',
'Cid',
'Ciel',
'Cindy',
'Cira',
'Cirila',
'Ciro',
'Cirocco',
'Cissy',
'Citlalli',
'Claire',
'Clancy',
'Clara',
'Claral',
'Clare',
'Clarence',
'Clarissa',
'Clark',
'Clarke',
'Claude',
'Claudia',
'Claudine',
'Clay',
'Clayland',
'Clayton',
'Clea',
'Cleantha',
'Cleatus',
'Cleavant',
'Cleave',
'Cleavon',
'Clem',
'Clemens',
'Clement',
'Clementine',
'Cleo',
'Cleta',
'Cleveland',
'Cliff',
'Clifford',
'Clifton',
'Clint',
'Clinton',
'Clio',
'Clitus',
'Clive',
'Clodagh',
'Clodia',
'Cloris',
'Clove',
'Clover',
'Coby',
'Cocheta',
'Cody',
'Colby',
'Cole',
'Colebrook',
'Colette',
'Coligny',
'Colin',
'Colista',
'Colleen',
'Collice',
'Collin',
'Colm',
'Colman',
'Colton',
'Columbia',
'Comfort',
'Conan',
'Conlan',
'Conley',
'Conner',
'Connie',
'Connley',
'Connor',
'Conor',
'Conrad',
'Constance',
'Constantine',
'Consuela',
'Consuelo',
'Contavious',
'Content',
'Contessa',
'Conway',
'Conyers',
'Cooper',
'Cora',
'Coral',
'Coralia',
'Coralie',
'Corban',
'Corbin',
'Corby',
'Cordelia',
'Corentine',
'Corey',
'Corin',
'Corina',
'Corine',
'Corinna',
'Corinne',
'Corky',
'Corliss',
'Corlista',
'Cornelia',
'Cornelius',
'Cornell',
'Corretta',
'Corrine',
'Cort',
'Cory',
'Cosette',
'Cosima',
'Cosmo',
'Coty',
'Courtney',
'Coy',
'Coye',
'Craig',
'Cray',
'Creighton',
'Creola',
'Crescent',
'Crete',
'Crevan',
'Crispian',
'Crispin',
'Crissa',
'Cristee',
'Cristiana',
'Cristy',
'Crystal',
'Crysti',
'Cullen',
'Curry',
'Curt',
'Curtis',
'Cuthbert',
'Cutler',
'Cutter',
'Cy',
'Cybele',
'Cybil',
'Cybill',
'Cyd',
'Cyle',
'Cyma',
'Cyndi',
'Cynthia',
'Cypress',
'Cypriss',
'Cyrah',
'Cyril',
'Cyrus',
'D''lorah',
'Da-xia',
'Dabrisha',
'Dacey',
'Dafydd',
'Dagan',
'Dagmar',
'Dagobert',
'Dahlia',
'Dairne',
'Daisy',
'Dakir',
'Dakota',
'Dale',
'Dalene',
'Dalena',
'Dalia',
'Dalila',
'Dalit',
'Dallas',
'Dallin',
'Dalton',
'Dalva',
'Damaris',
'Dameion',
'Damian',
'Damiana',
'Damita',
'Damon',
'Dan',
'Dana',
'Danae',
'Dane',
'Danette',
'Dani',
'Danica',
'Daniel',
'Daniela',
'Danielle',
'Danika',
'Danil',
'Danitra',
'Dannie',
'Danniell',
'Danny',
'Dantae',
'Dante',
'Danton',
'Danyl',
'Daphne',
'Dara',
'Daray',
'Darby',
'Darcey',
'Darcie',
'Darcy',
'Dard',
'Daria',
'Darian',
'Darin',
'Dario',
'Daris',
'Darla',
'Darlene',
'Darnell',
'Darrell',
'Darren',
'Darrin',
'Darrion',
'Darrius',
'Darryl',
'Darshan',
'Darwin',
'Daryl',
'Dasan',
'Dasani',
'Dasha',
'Davan',
'Dave',
'Davi',
'David',
'Davida',
'Davin',
'Davina',
'Davis',
'Davonna',
'Davu',
'Dawn',
'Dawson',
'Dax',
'Daxton',
'Daylin',
'Dayna',
'Dayne',
'Dayton',
'Dea',
'Dean',
'Deandra',
'Deanna',
'Deanne',
'D''ante',
'Debbie',
'Debby',
'Deborah',
'Debra',
'Declan',
'Deidra',
'Deiondre',
'Deirdra',
'Deirdre',
'Deiter',
'Deja',
'Dejah',
'Dejalysse',
'Dejaun',
'Deka',
'Del',
'Delaine',
'Delaney',
'Delbert',
'Delfina',
'Delia',
'Delila',
'Delilah',
'Deliz',
'Della',
'Delling',
'Delores',
'Delphine',
'Delta',
'Delu',
'Demario',
'Dembe',
'Demetria',
'Demetrius',
'Demi',
'Demitrius',
'Demonio',
'Demoryea',
'Dempster',
'Den''e',
'Dena',
'Denali',
'Deniro',
'Denis',
'Denisa',
'Denise',
'Denna',
'Dennis',
'Dennise',
'Denver',
'Denyce',
'Denyne',
'Denyse',
'Denzil',
'Denzyl',
'Deo',
'Deon',
'Derby',
'Derek',
'Derex',
'Derica',
'Dermot',
'Derora',
'Derrick',
'Derron',
'Derry',
'Des',
'Desana',
'Desdemona',
'Deserae',
'Desi',
'Desiderio',
'Desiree',
'Desmond',
'Dessa',
'Dessie',
'Destiny',
'Deva',
'Devaki',
'Devereaux',
'Devi',
'Devin',
'Devon',
'Devorah',
'Devorit',
'Dewey',
'Dewitt',
'Dexter',
'Dextra',
'Deyana',
'Dezarae',
'Diallo',
'Diamond',
'Diana',
'Diane',
'Dianne',
'Diantha',
'Dianthe',
'Diata',
'Diavion',
'Dick',
'Didier',
'Didrika',
'Diego',
'Dijon',
'Diliza',
'Dillan',
'Dillian',
'Dillon',
'Dina',
'Dinah',
'Dino',
'Dion',
'Diondra',
'Dionna',
'Dionne',
'Dionysius',
'Dionysus',
'Dior',
'Dirk',
'Dixie',
'Dixon',
'Dmitri',
'Doane',
'Doctor',
'Doda',
'Doi',
'Dolly',
'Dolores',
'Dolph',
'Dom',
'Domani',
'Dominic',
'Dominick',
'Dominique',
'Dominy',
'Don',
'Donagh',
'Donahi',
'Donal',
'Donald',
'Donat',
'Donato',
'Donelle',
'Donna',
'Donnel',
'Donnica',
'Donny',
'Donovan',
'Dora',
'Doran',
'Dorcas',
'Dore',
'Dori',
'Doria',
'Dorian',
'Dorie',
'Dorinda',
'Doris',
'Dorit',
'Dorothea',
'Dorothy',
'Dorrance',
'Dorset',
'Dorsey',
'Dory',
'Dot',
'Dotty',
'Doug',
'Dougal',
'Douglas',
'Douglass',
'Dove',
'Doyle',
'Doyt',
'Drake',
'Dreama',
'Drew',
'Dru',
'Dryden',
'Duane',
'Duc',
'Dudley',
'Duena',
'Duff',
'Dugan',
'Duka',
'Duke',
'Dulce',
'Dulcea',
'Dulcina',
'Dulcinea',
'Dumi',
'Duncan',
'Dunixi',
'Dunja',
'Dunn',
'Dunne',
'Durlie',
'Duscha',
'Dustin',
'Dusty',
'Duvon',
'Duwane',
'Dwayne',
'Dwight',
'Dyan',
'Dyani',
'Dyanne',
'Dylan',
'Dyllis',
'Dyre',
'Dysis',
'Eadoin',
'Eamon',
'Earl',
'Earlene',
'Earnest',
'Easter',
'Easton',
'Eavan',
'Ebony',
'Echo',
'Ed',
'Edalene',
'Edaline',
'Edana',
'Edda',
'Eddie',
'Eddy',
'Edeline',
'Eden',
'Edena',
'Edgar',
'Edie',
'Edison',
'Edita',
'Edith',
'Edmund',
'Edna',
'Edric',
'Edward',
'Edwardo',
'Edwin',
'Edwina',
'Edwiygh',
'Edythe',
'Effie',
'Efrat',
'Efrem',
'Egan',
'Ehren',
'Eileen',
'Eilis',
'Eiman',
'Eitan',
'Ejlicey',
'Ela',
'Elaina',
'Elaine',
'Elan',
'Elana',
'Elani',
'Elata',
'Elda',
'Elden',
'Eldon',
'Eldora',
'Eleanor',
'Electra',
'Elena',
'Eleni',
'Elephteria',
'Elgin',
'Eli',
'Elia',
'Eliana',
'Elias',
'Elie',
'Elijah',
'Elin',
'Eliora',
'Eliot',
'Elisabeth',
'Elise',
'Elisha',
'Elita',
'Eliza',
'Elizabeth',
'Eljah',
'Elkan',
'Elke',
'Ella',
'Ellard',<|fim▁hole|>'Ellema',
'Ellen',
'Ellery',
'Ellie',
'Elliot',
'Elliott',
'Ellis',
'Ellisa',
'Elmo',
'Elodie',
'Eloise',
'Elsa',
'Elsie',
'Elspeth',
'Elton',
'Elu',
'Elva',
'Elvin',
'Elvina',
'Elvira',
'Elvis',
'Ely',
'Elysia',
'Elyssa',
'Elza',
'Emaline',
'Emani',
'Emanuel',
'Emanuele',
'Emele',
'Emene',
'Emera',
'Emerald',
'Emery',
'Emese',
'Emil',
'Emilia',
'Emilie',
'Emiliee',
'Emilio',
'Emily',
'Emira',
'Emma',
'Emmagin',
'Emmanuel',
'Emmet',
'Emmett',
'Emmly',
'Emory',
'Enid',
'Ennis',
'Enos',
'Enrico',
'Envy',
'Eolande',
'Ephraim',
'Epifanio',
'Er',
'Erasmus',
'Eri',
'Eric',
'Erica',
'Erik',
'Erika',
'Erimentha',
'Erin',
'Eris',
'Erland',
'Erma',
'Erme',
'Ermin',
'Erna',
'Ernest',
'Ernie',
'Erno',
'Eron',
'Eros',
'Errin',
'Errol',
'Erv',
'Ervin',
'Erwin',
'Eryk',
'Esben',
'Eshe',
'Esma',
'Esmerelda',
'Essie',
'Esteban',
'Estefania',
'Estelle',
'Ester',
'Esther',
'Estralita',
'Etan',
'Etana',
'Eternity',
'Ethan',
'Ethel',
'Ethelda',
'Etta',
'Eudora',
'Eugene',
'Eulalia',
'Eulalie',
'Eupemia',
'Euphemia',
'Euridice',
'Eva',
'Evalina',
'Evan',
'Evane',
'Evangeline',
'Evania',
'Eve',
'Evelia',
'Evelien',
'Evelyn',
'Everett',
'Evette',
'Evi',
'Evie',
'Evita',
'Evonne',
'Ewa',
'Eyal',
'Eydie',
'Ezekiel',
'Ezra',
'Fabian',
'Fabienne',
'Fabiola',
'Fabricio',
'Fabrizio',
'Fabunni',
'Fahaad',
'Fahd',
'Faire',
'Fairfax',
'Fairly',
'Faith',
'Fala',
'Fale',
'Fallon',
'Falona',
'Fanchon',
'Fane',
'Farah',
'Farica',
'Faris',
'Farley',
'Farrah',
'Farrell',
'Farren',
'Farrest',
'Fatima',
'Fatmira',
'Fausta',
'Faustine',
'Favian',
'Fawn',
'Fay',
'Faye',
'Faylinn',
'Faymatu',
'Fedora',
'Feivel',
'Feleti',
'Felice',
'Felicia',
'Felicity',
'Felimy',
'Felina',
'Felix',
'Fell',
'Felton',
'Fennella',
'Feoras',
'Ferdinand',
'Fergal',
'Fergus',
'Ferguson',
'Fern',
'Fernandez',
'Fernando',
'Ferris',
'Ferrol',
'Fiachra',
'Fico',
'Fidel',
'Fidelia',
'Fidelio',
'Fidella',
'Field',
'Filbert',
'Filia',
'Filipina',
'Fineen',
'Finley',
'Finn',
'Finna',
'Finola',
'Fiona',
'Fionan',
'Fionn',
'Fionnula',
'Fiorenza',
'Fisk',
'Fisseha',
'Flan',
'Flannery',
'Flavia',
'Flavian',
'Fletcher',
'Fleur',
'Flint',
'Flo',
'Flora',
'Floramaria',
'Florence',
'Floria',
'Floriane',
'Florida',
'Florrie',
'Flower',
'Floyd',
'Flynn',
'Fola',
'Fonda',
'Fondea',
'Forbes',
'Ford',
'Fordon',
'Forrest',
'Forrester',
'Forster',
'Fortune',
'Foster',
'Fotini',
'Fountain',
'Fox',
'Foy',
'Fraley',
'Fran',
'Frances',
'Francesca',
'Francis',
'Francois',
'Frank',
'Franklin',
'Franz',
'Frasier',
'Frayne',
'Fred',
'Freddy',
'Frederica',
'Frederick',
'Fredrica',
'Freed',
'Freeman',
'Freja',
'Fremont',
'Freya',
'Frieda',
'Fritz',
'Fritzi',
'Frode',
'Fronde',
'Fruma',
'Frye',
'Fulbright',
'Fuller',
'Fynn',
'Gabby',
'Gabe',
'Gabi',
'Gabriel',
'Gabriela',
'Gabriella',
'Gabrielle',
'Gaby',
'Gaetan',
'Gaetane',
'Gafna',
'Gage',
'Gail',
'Gailia',
'Gaille',
'Gainell',
'Gaius',
'Gale',
'Galen',
'Galeno',
'Gali',
'Gallagher',
'Gallia',
'Galvin',
'Gamada',
'Gamal',
'Gamaliel',
'Ganaya',
'Ganit',
'Gannon',
'Ganya',
'Gardner',
'Gareth',
'Garfield',
'Garland',
'Garren',
'Garret',
'Garrett',
'Garrick',
'Garrison',
'Garron',
'Garry',
'Garson',
'Garth',
'Garvey',
'Gary',
'Gates',
'Gaurav',
'Gautier',
'Gavan',
'Gavin',
'Gavivi',
'Gavril',
'Gawain',
'Gay',
'Gaye',
'Gayle',
'Gaylord',
'Gaynell',
'Gazali',
'Gazelle',
'Gazit',
'Gella',
'Gelsey',
'Gemma',
'Gene',
'Genell',
'Genesis',
'Genet',
'Geneva',
'Genevieve',
'Genna',
'Gent',
'Geoff',
'Geoffrey',
'Geordi',
'George',
'Georgette',
'Georgia',
'Georgina',
'Gerald',
'Geraldene',
'Geraldine',
'Geraldo',
'Gerard',
'Gerardo',
'Gerene',
'Gerda',
'Geri',
'Gerik',
'Germain',
'Germaine',
'Gerodi',
'Gerry',
'Gershom',
'Gertrude',
'Gethan',
'Ghita',
'Giacomo',
'Gian',
'Gianina',
'Gianna',
'Giavanna',
'Gibson',
'Gideon',
'Gigi',
'Gil',
'Gilbert',
'Gilda',
'Giles',
'Gili',
'Gillespie',
'Gillian',
'Gin',
'Gina',
'Ginacarlo',
'Ginata',
'Ginger',
'Ginny',
'Gino',
'Giolla',
'Giorgio',
'Giovanett',
'Giovanni',
'Gira',
'Gisela',
'Giselle',
'Gita',
'Gitano',
'Gitel',
'Gittel',
'Giulio',
'Giuseppe',
'Giva',
'Giza',
'Gladys',
'Glen',
'Glenda',
'Glenn',
'Glenna',
'Glennis',
'Glenys',
'Glinora',
'Glora',
'Gloria',
'Glory',
'Glyn',
'Glynis',
'Glynnis',
'Godana',
'Godfrey',
'Golda',
'Goldie',
'Goldy',
'Gomer',
'Gordon',
'Gordy',
'Grace',
'Gracie',
'Grady',
'Graham',
'Gram',
'Grania',
'Grant',
'Granville',
'Gratia',
'Gratiana',
'Grayce',
'Grayson',
'Grazia',
'Greer',
'Greg',
'Gregg',
'Gregory',
'Greta',
'Gretchen',
'Gretel',
'Grier',
'Griffin',
'Griselda',
'Grizelda',
'Grover',
'Guadalupe',
'Gualtier',
'Guban',
'Gudrun',
'Guenevere',
'Guido',
'Guinevere',
'Gunda',
'Gunnar',
'Gunther',
'Gur',
'Gure',
'Guri',
'Gurit',
'Gusanthony',
'Gustav',
'Guy',
'Gwen',
'Gwendolyn',
'Gwyn',
'Gwyneth',
'Gypsy',
'Haben',
'Habib',
'Hachi',
'Hada',
'Hadar',
'Hadassah',
'Hadley',
'Hafiz',
'Haile',
'Haines',
'Hajari',
'Hal',
'Halen',
'Haley',
'Hali',
'Halim',
'Halley',
'Halona',
'Ham',
'Hamal',
'Hamdia',
'Hamilton',
'Hamlet',
'Hamlin',
'Hampton',
'Hana',
'Hanan',
'Hanibal',
'Hanifa',
'Hank',
'Hanley',
'Hanna',
'Hannah',
'Hannelore',
'Hannibal',
'Hans',
'Hanzila',
'Hao',
'Haracha',
'Harel or harrell',
'Harlan',
'Harley',
'Harlow',
'Harmon',
'Harmony',
'Harold',
'Haroun',
'Harper',
'Harriet',
'Harrison',
'Harry',
'Hart',
'Hartwell',
'Haru',
'Haruki',
'Haruko',
'Haruni',
'Harva',
'Harvey',
'Hasad',
'Hasan',
'Hasana',
'Hastin',
'Hateya',
'Haven',
'Hawa',
'Hayden',
'Haylee',
'Hayleigh',
'Hayley',
'Hayward',
'Hazeka',
'Hazel',
'Hazelle',
'Hazina',
'Heath',
'Heather',
'Heaven',
'Heavynne',
'Hector',
'Hedda',
'Hedia',
'Hedva',
'Hedwig',
'Hedy',
'Hedya',
'Heidi',
'Heinz',
'Helaina',
'Helaine',
'Helen',
'Helena',
'Helene',
'Helga',
'Helia',
'Heller',
'Heloise',
'Henri',
'Henrietta',
'Henrik',
'Henry',
'Hera',
'Herb',
'Herbert',
'Herbst',
'Heremon',
'Herman',
'Herschel',
'Hertz',
'Hesper',
'Hester',
'Hestia',
'Hewitt',
'Hidalgo',
'Hidi',
'Hiero',
'Hija',
'Hila',
'Hilaire',
'Hilary',
'Hilda',
'Hilde',
'Hillary',
'Hilzarie',
'Hina',
'Hinda',
'Hiroko',
'Hirsi',
'Holden',
'Holiday',
'Hollace',
'Holli',
'Hollie',
'Hollis',
'Holly',
'Hollye',
'Holt',
'Homer',
'Honey',
'Honora',
'Honoria',
'Hope',
'Horace',
'Horst',
'Horus',
'Hosea',
'Hosein',
'Hoshi',
'Hoshiko',
'Houston',
'Howard',
'Howe',
'Howell',
'Howie',
'Hoyt',
'Hubert',
'Hue',
'Huela',
'Huey',
'Hugh',
'Hugo',
'Humphrey',
'Hunter',
'Hurley',
'Huslu',
'Huso',
'Hussein',
'Huxley',
'Hy',
'Hyacinth',
'Hyman',
'Hyroniemus',
'Ian',
'Ianna',
'Ianthe',
'Ida',
'Idalee',
'Idalia',
'Idana',
'Idande',
'Idania',
'Idra',
'Iesha',
'Ife',
'Ifeoma',
'Igball',
'Ige',
'Iggi',
'Iggy',
'Ignacio',
'Ignatius',
'Ike',
'Ikechukwa',
'Ikenna',
'Ikennachukwa',
'Ilana',
'Ilario',
'Ileana',
'Ilia',
'Iliana',
'Ilit',
'Ilo',
'Ilom',
'Ilori',
'Ilse',
'Ilyssa',
'Iman',
'Imogene',
'Ina',
'Inari',
'Inci',
'Independence',
'India',
'Indira',
'Indra',
'Inez',
'Infinity',
'Inga',
'Inge',
'Ingrid',
'Inoke',
'Iola',
'Iolani',
'Ion',
'Iona',
'Ipo',
'Ira',
'Iram',
'Irene',
'Iria',
'Irida',
'Irina',
'Iris',
'Irisa',
'Irma',
'Irving',
'Iryl',
'Isaac',
'Isabel',
'Isabis',
'Isadora',
'Isaiah',
'Isanne',
'Isao',
'Isha',
'Isi',
'Isidro',
'Isis',
'Isleen',
'Ismaela',
'Ismail',
'Ismet',
'Isolde',
'Isra',
'Israel',
'Issay',
'Ita',
'Italia',
'Iuliana',
'Iulianna',
'Ivan',
'Ivet',
'Ivi',
'Ivie',
'Ivo',
'Ivria',
'Ivrit',
'Ivy',
'Iyana',
'Iyende',
'Iyindi',
'Izefia',
'Izegbe',
'Izellah',
'Ja',
'Jaala',
'Jaali',
'Jabari',
'Jabilo',
'Jabir',
'Jabulani',
'Jace',
'Jacinda',
'Jacinta',
'Jack',
'Jackie',
'Jackson',
'Jaclyn',
'Jacob',
'Jacoba',
'Jacqueline',
'Jacquelyn',
'Jacques',
'Jacquetta',
'Jacqui',
'Jacquleyn',
'Jada',
'Jade',
'Jaden',
'Jadon',
'Jadyn',
'Jael',
'Jafaru',
'Jahazel',
'Jai',
'Jaime',
'Jaimie',
'Jake',
'Jaleel',
'Jalen',
'Jalene',
'Jalil',
'Jalila',
'Jamal',
'Jamar',
'James',
'Jamesa',
'Jamese',
'Jami',
'Jamie',
'Jamila',
'Jan',
'Jana',
'Janae',
'Janai',
'Jancy',
'Jane',
'Janel',
'Janelis',
'Janelle',
'Janet',
'Janette',
'Jania',
'Janiah',
'Janice',
'Janina',
'Janine',
'Jantz',
'Japheth',
'Jara',
'Jarah',
'Jared',
'Jariath',
'Jarod',
'Jarrett',
'Jarvis',
'Jasa',
'Jasalynn',
'Jasmine',
'Jason',
'Jasper',
'Jatupol',
'Jaurene',
'Javen',
'Javier',
'Jay',
'Jayce',
'Jayden',
'Jaymar',
'Jayme',
'Jazel',
'Jazlynn',
'Jealexiz',
'Jean',
'Jeanette',
'Jeanine',
'Jeanne',
'Jeb',
'Jebediah',
'Jedidiah',
'Jeff',
'Jefferson',
'Jeffrey',
'Jemima',
'Jena',
'Jenelle',
'Jenesis',
'Jengo',
'Jenike',
'Jenis',
'Jenna',
'Jennelle',
'Jennessa',
'Jenni',
'Jennie',
'Jennifer',
'Jennika',
'Jenny',
'Jens',
'Jensen',
'Jered',
'Jeremiah',
'Jeremy',
'Jeri',
'Jerica',
'Jericho',
'Jermaine',
'Jermica',
'Jerod',
'Jeroen',
'Jerold',
'Jerom',
'Jerome',
'Jerommeke',
'Jerrell',
'Jerrick',
'Jerry',
'Jerusha',
'Jess',
'Jessalyn',
'Jesse',
'Jessica',
'Jessie',
'Jesup',
'Jesus',
'Jethro',
'Jett',
'Jewel',
'Jewelysa',
'Jewell',
'Jewl',
'Jewlana',
'Jezebel',
'Jianna',
'Jihan',
'Jill',
'Jillian',
'Jim',
'Jimi',
'Jimmy',
'Jin',
'Jina',
'Jinda',
'Jira',
'Jiro',
'Joan',
'Joann',
'Joanna',
'Joanne',
'Job',
'Jocasta',
'Jocelyn',
'Jock',
'Joda',
'Jodi',
'Jodie',
'Jody',
'Joe',
'Joel',
'Joelle',
'Joey',
'Johann',
'Johanna',
'John',
'Johnny',
'Joi',
'Joie',
'Jola',
'Jolene',
'Jolie',
'Jolina',
'Jon',
'Jonah',
'Jonathan',
'Jonny',
'Jordan',
'Joren',
'Jorge',
'Jorn',
'Jorrin',
'Jorunn',
'Jorryn',
'Jory',
'Jose',
'Josef',
'Joseph',
'Josephine',
'Joselyn',
'Josh',
'Joshua',
'Joshwa',
'Josiah',
'Josie',
'Joslyn',
'Josue',
'Jovan',
'Jovana',
'Jovianne',
'Jovita',
'Joy',
'Joyce',
'Joylyn',
'Juan',
'Juana',
'Juandalynn',
'Juani',
'Juanita',
'Jubal',
'Jud',
'Judah',
'Judd',
'Jude',
'Judith',
'Judson',
'Judy',
'Juji',
'Jules',
'Julia',
'Julian',
'Juliana',
'Julianna',
'Julianne',
'Julliatte',
'Julie',
'Juliet',
'Julieta',
'Juliette',
'Julio',
'Julisa',
'Julissa',
'Julisha',
'July',
'Jumoke',
'Jun',
'June',
'Junior',
'Juniper',
'Justin',
'Justina',
'Justine',
'Justise',
'Justyn',
'Kabibe',
'Kabili',
'Kabira',
'Kacela',
'Kacey',
'Kachina',
'Kacy',
'Kadeem',
'Kadin',
'Kael',
'Kaeley',
'Kaelin',
'Kaethe',
'Kahlia',
'Kahlilia',
'Kai',
'Kaikura',
'Kailey',
'Kaitlin',
'Kaitlyn',
'Kaiya',
'Kalani',
'Kalb',
'Kalea',
'Kaleanne',
'Kaleb',
'Kaleena',
'Kaleeyse',
'Kalena',
'Kalei',
'Kaleigh',
'Kaley',
'Kali',
'Kalila',
'Kalilah',
'Kalin',
'Kalinda',
'Kalista',
'Kalli',
'Kamal',
'Kamali',
'Kamari',
'Kamau',
'Kambriea',
'Kame',
'Kamella',
'Kameryn',
'Kamil',
'Kamilia',
'Kande',
'Kandice',
'Kane',
'Kapi',
'Kara',
'Karan',
'Karana',
'Kare',
'Kareem',
'Karen',
'Karena',
'Kari',
'Karia',
'Karie',
'Karik',
'Karim',
'Karimah',
'Karina',
'Karis',
'Karissa',
'Karl',
'Karla',
'Karli',
'Karma',
'Karmael',
'Karmina',
'Karna',
'Karston',
'Kasi',
'Kasim',
'Kaspar',
'Kassandra',
'Kassel',
'Kassia',
'Kat',
'Kata',
'Kate',
'Katelin',
'Katharine',
'Katherine',
'Kathie',
'Kathleen',
'Kathryn',
'Kathy',
'Katie',
'Katina',
'Katiryn',
'Kato',
'Kator',
'Katrina',
'Katy',
'Kaula',
'Kawena',
'Kay',
'Kaya',
'Kaycee',
'Kaydee',
'Kayden',
'Kayin',
'Kayla',
'Kaylana',
'Kaylee',
'Kaylee-ann',
'Kaylen',
'Kaylia',
'Kayo',
'Kayonga',
'Kaz',
'Kazi',
'Kazimierz',
'Kazu',
'Keagan',
'Keaira',
'Keanu',
'Keara',
'Keb',
'Kedem',
'Kedma',
'Keefe',
'Keefer',
'Keegan',
'Keelan',
'Keelia',
'Keely',
'Keena',
'Keenan',
'Keene',
'Keeya',
'Kefira',
'Kei',
'Keiji',
'Keiki',
'Keiko',
'Keir',
'Keira',
'Keiran',
'Keisha',
'Keita',
'Keitaro',
'Keith',
'Kelby',
'Kelda',
'Kele',
'Kelea',
'Kelii',
'Kelila',
'Kellan',
'Kellee',
'Kellen',
'Kelley',
'Kelli',
'Kellie',
'Kellsie',
'Kelly',
'Kelsey',
'Kelsi',
'Kelsie-lynn',
'Kelton',
'Kelvin',
'Kemmora',
'Ken',
'Kenadia',
'Kenadie',
'Kenda',
'Kendall',
'Kendi',
'Kendis',
'Kendra',
'Keneisha',
'Kenisha',
'Kenley',
'Kenna',
'Kennan',
'Kennedi',
'Kennedie',
'Kennedy',
'Kenneth',
'Kenny',
'Kent',
'Kenton',
'Kenvee',
'Kenyi',
'Kenyon',
'Kenzie',
'Keola',
'Keon',
'Keosha',
'Kera',
'Kerda',
'Keren',
'Kerica',
'Kermit',
'Kern',
'Kerr',
'Kerri',
'Kerry',
'Kerstiana',
'Kesin',
'Kessler',
'Ketara',
'Keturah',
'Kevin',
'Kevina',
'Key',
'Keyanna',
'Keyon',
'Keytanna',
'Keziah',
'Khalida',
'Khalil',
'Khalipha',
'Khiry',
'Khrystin',
'Khursten',
'Kia',
'Kiah',
'Kiaira',
'Kiana',
'Kiandra',
'Kiara',
'Kibibe',
'Kiden',
'Kieran',
'Kiersten',
'Kiho',
'Kiki',
'Kiley',
'Killian',
'Kim',
'Kimball',
'Kimberly',
'Kimi',
'Kimmy',
'Kin',
'Kina',
'Kindra',
'Kinfe',
'King',
'Kingston',
'Kinipela',
'Kioko',
'Kione',
'Kiora',
'Kipling',
'Kiplyn',
'Kipp',
'Kira',
'Kirabo',
'Kiral',
'Kirby',
'Kiri',
'Kiril',
'Kirk',
'Kiros',
'Kirra',
'Kirsi',
'Kirsten',
'Kirstie',
'Kirstin',
'Kirstyn',
'Kisha',
'Kishi',
'Kita',
'Kitoko',
'Kitra',
'Kitty',
'Kiyo',
'Kiyone',
'Kiyoshi',
'Kizzy',
'Kjiristen',
'Klania',
'Klaus',
'Klitos',
'Knut',
'Koda',
'Koen',
'Kohana',
'Koine',
'Koko',
'Kola',
'Kole',
'Kolton',
'Konane',
'Konrad',
'Kordell',
'Koren',
'Korene',
'Kori',
'Korina',
'Korinne',
'Korrie',
'Kortnie',
'Kory',
'Kostya',
'Koto',
'Kourtney',
'Kozue',
'Kris',
'Krisidian',
'Krista',
'Kristen',
'Kristian',
'Kristin',
'Kristina',
'Kristine',
'Kristopher',
'Kristy',
'Krystal',
'Krystyn',
'Kuma',
'Kumi',
'Kumiko',
'Kuniko',
'Kura',
'Kuri',
'Kuron',
'Kurt',
'Kwanita',
'Kyla',
'Kylan',
'Kyle',
'Kyleigh',
'Kylene',
'Kyler',
'Kyley',
'Kylia',
'Kylie',
'Kymberlee',
'Kyna',
'Kynan',
'Kynthia',
'Kynton',
'Kyra',
'Kyrene',
'Kyria',
'L''pree',
'La don',
'Lacey',
'Lachlan',
'Lacy',
'Laddie',
'Ladona',
'Lael',
'Lahela',
'Lahoma',
'Laila',
'Lailani',
'Laina',
'Laird',
'Lajuan',
'Lajuana',
'Lakeisha',
'Lakin',
'Lala',
'Lale',
'Laleh',
'Lalena',
'Lali',
'Lalita',
'Lalo',
'Lamar',
'Lamesha',
'Lamia',
'Lamont',
'Lan',
'Lana',
'Lanai',
'Lanaya',
'Lance',
'Lancelot',
'Landen',
'Landers',
'Landis',
'Landon',
'Landry',
'Lane',
'Lanelle',
'Lang',
'Langer',
'Langston',
'Lani',
'Lanier',
'Lankston',
'Lanza',
'Laqueta',
'Lara',
'Laree',
'Laraine',
'Lareina',
'Larie',
'Larissa',
'Lark',
'Larkin',
'Larry',
'Lars',
'Larue',
'Larvall',
'Lasca',
'Lashanda',
'Lassie',
'Laszlo',
'Latanya',
'Latasha',
'Lateefa',
'Laterian',
'Latham',
'Lathrop',
'Latika',
'Latimer',
'Latisha',
'Latoya',
'Laura',
'Lauren',
'Laurence',
'Laurie',
'Laval',
'Lavaun',
'Lave',
'Laverne',
'Lavey',
'Lavi',
'Lavonn',
'Lavonne',
'Lawanda',
'Lawrence',
'Lawrencia',
'Layla',
'Layne',
'Lazar',
'Lazarus',
'Lazzaro',
'Le',
'Lea',
'Leaerialle',
'Leah',
'Leal',
'Leala',
'Leander',
'Leane',
'Leanna',
'Leanne',
'Learay',
'Leata',
'Leavitt',
'Lecea',
'Lechelsea',
'Ledarrius',
'Leda',
'Ledell',
'Lee',
'Leeanne',
'Leena',
'Leeto',
'Leevy',
'Legend',
'Lehana',
'Leia',
'Leif',
'Leigh',
'Leigha',
'Leighanna',
'Leila',
'Leilani',
'Leimomi',
'Lekey',
'Lel',
'Lela',
'Leland',
'Lelia',
'Lamanuel',
'Lemuel',
'Lena',
'Lencho',
'Leneva',
'Lenka',
'Lenna',
'Lenora',
'Lenore',
'Lente',
'Leo',
'Leola',
'Leoma',
'Leon',
'Leona',
'Leonard',
'Leone',
'Leoni',
'Leonie',
'Leonora',
'Leonzal',
'Leopold',
'Leora',
'Leota (f)',
'Leotas (m)',
'Laquenna',
'Laqueenie',
'Leigh',
'Lerato',
'Lerin',
'Leroy',
'Les',
'Lesa',
'Lesha',
'Lesley',
'Leslie',
'Less',
'Lester',
'Leticia or letticia',
'Letitia',
'Letoya',
'Lev',
'Levana',
'Leverett',
'Levi',
'Levia',
'Levon',
'Lewa',
'Lewis',
'Lex',
'Lexi',
'Lexine',
'Lexiss',
'Leyva',
'Lia',
'Liam',
'Lian',
'Liana',
'Libba',
'Libby',
'Liberty',
'Lida',
'Lidia',
'Lien',
'Liko',
'Lila',
'Lilac',
'Lilah',
'Lilia',
'Liliauna',
'Liliha',
'Lilith',
'Lilli',
'Lillian',
'Lillion',
'Lilo',
'Lily',
'Lin',
'Lina',
'Lincoln',
'Linda',
'Lindley',
'Lindsay',
'Lindsey',
'Lindy',
'Linette',
'Linna',
'Linus',
'Liona',
'Lionel',
'Lirit',
'Lisa',
'Lisandra',
'Lisbet',
'Lisette',
'Lisimba',
'Lisle',
'Lita',
'Liv',
'Livana',
'Livi',
'Livia',
'Livvy',
'Lixue',
'Liz',
'Liza',
'Lizbeth',
'Lizett',
'Lizina',
'Llewellyn',
'Lloyd',
'Loba',
'Lobo',
'Locke',
'Loe',
'Logan',
'Lois',
'Lola',
'Lolonyo',
'Lolovivi',
'Lolymiya',
'Loman',
'Lona',
'Lonato',
'London',
'Lonna',
'Lonni',
'Lonnie',
'Lonnit',
'Lora',
'Lorelei',
'Lorena',
'Lorenzo',
'Loretta',
'Lori',
'Lorie',
'Lorimer',
'Lorin',
'Loring',
'Lorna',
'Lorne',
'Lorraine',
'Lorretta',
'Lory',
'Lotta',
'Lotte',
'Lotus',
'Lou',
'Louanna',
'Loughlin',
'Louis',
'Louisa',
'Louise',
'Loura',
'Lourana',
'Lourdes',
'Lourine',
'Love',
'Lovette',
'Lovey',
'Lovie',
'Lowell',
'Luam',
'Luana',
'Lucas',
'Luce',
'Lucia',
'Lucian',
'Lucie',
'Lucille',
'Lucinda',
'Lucio',
'Lucius',
'Lucretia',
'Lucus',
'Lucy',
'Ludema',
'Ludlow',
'Ludwig',
'Luigi',
'Luis',
'Luke',
'Lula',
'Lulli',
'Lulu',
'Luna',
'Lundy',
'Lunette',
'Lupe',
'Lupita',
'Luthando',
'Luther',
'Ly',
'Lyannie',
'Lyde',
'Lydette',
'Lydia',
'Lyle',
'Lyn',
'Lynae',
'Lynch',
'Lynda',
'Lynde',
'Lyndel',
'Lyndon',
'Lyndsey',
'Lynelle',
'Lynette',
'Lynley',
'Lynn',
'Lynna',
'Lynne',
'Lynnea',
'Lynton',
'Lyre',
'Lyris',
'Lysa',
'Lysander',
'Lysandra',
'Maarten',
'Maat',
'Mabel',
'Mac',
'Macayle',
'Mace',
'Maceo',
'Macha',
'Mackenzie',
'Mactarr',
'Macy',
'Madaleno',
'Maddox',
'Madeleine',
'Madelia',
'Madeline',
'Madge',
'Madison & madyson',
'Madonna',
'Madra',
'Madrona',
'Mae',
'Maeko',
'Maemi',
'Maeron',
'Maeryn',
'Maeve',
'Magan',
'Magda',
'Magdalena',
'Magdalene',
'Magee',
'Maggie',
'Magnar',
'Magnolia',
'Magua',
'Maha',
'Mahala',
'Mahalia',
'Mahari',
'Mahdi',
'Mahitable',
'Mai',
'Maia',
'Maik',
'Maille',
'Maimun',
'Maire',
'Mairi',
'Maisie',
'Maj',
'Major',
'Makaih',
'Makaila',
'Makalah',
'Makale',
'Makalo',
'Makani',
'Makaveli',
'Makayla',
'Makenna',
'Makenzy',
'Makoto',
'Makya',
'Malachi',
'Malaika',
'Malana',
'Malane',
'Malasy',
'Malaya',
'Malcolm',
'Malia',
'Malik',
'Malin',
'Malina',
'Malise',
'Malissa',
'Malka',
'Mallory',
'Malo',
'Malomo',
'Malone',
'Malory',
'Malyn',
'Mamie',
'Mana',
'Mandel',
'Mandelina',
'Mandell',
'Mandy',
'Manica',
'Manina',
'Manning',
'Manolin',
'Manon',
'Mansa',
'Manuel',
'Manuela',
'Maori',
'Mara',
'Marash',
'Marc',
'Marcel',
'Marcell',
'Marcella',
'Marcello',
'Marcellus',
'Marchelle',
'Marcia',
'Marcie',
'Marco',
'Marcus',
'Marcy',
'Mardell',
'Mardi',
'Mare',
'Maree',
'Marek',
'Maren',
'Marenda',
'Margaret',
'Margarita',
'Marge',
'Margo',
'Margot',
'Marguerite',
'Mari',
'Maria',
'Mariah',
'Mariam',
'Marianne',
'Mariatu',
'Maribel',
'Maribeth',
'Marie',
'Mariel',
'Mariella',
'Marietta',
'Marigold',
'Marijke',
'Marika',
'Marilu',
'Marilyn',
'Marin',
'Marina',
'Marinel',
'Marino',
'Mario',
'Marion',
'Maris',
'Marisa',
'Marisela',
'Marisol',
'Marissa',
'Maritza',
'Marius',
'Marjean',
'Marjorie',
'Mark',
'Marka',
'Marlas',
'Marlena',
'Marlene',
'Marli',
'Marlie',
'Marlin',
'Marlo',
'Marlon',
'Marlow',
'Marly',
'Marnie',
'Marnin',
'Marnina',
'Maro',
'Marquette',
'Marquis',
'Marrim',
'Marsha',
'Marshall',
'Marta',
'Martha',
'Martin',
'Martina',
'Marty',
'Marv',
'Marva',
'Marvel',
'Marvela',
'Marvene',
'Marvin',
'Mary',
'Maryjane',
'Masada',
'Mashaka',
'Mason',
'Massimo',
'Matana',
'Mateo',
'Mathilda',
'Mathilde',
'Matia',
'Matias',
'Matilda',
'Matilde',
'Matisse',
'Matrika',
'Matsu',
'Matt',
'Matteo',
'Matthew',
'Matthias',
'Mattox',
'Matty',
'Matusio',
'Maude',
'Mauli',
'Maura',
'Maureen',
'Maurice',
'Maurilio',
'Maurizio',
'Mauro',
'Mauve',
'Maverick',
'Mavis',
'Max',
'Maxim',
'Maxima',
'Maxime',
'Maximilian',
'Maximos',
'Maxine',
'Maxwell',
'May',
'Maya',
'Mayan',
'Mayda',
'Mayes',
'Maylin',
'Maymay',
'Maynard',
'Mayra',
'Mazi',
'Mazya',
'Mazzy',
'Mcdade',
'Mckale',
'Mckayla',
'Mckenna',
'Mckenzie',
'Mckile',
'Mcnamara',
'Mea',
'Mead',
'Meagan',
'Meaghan',
'Meara',
'Meda',
'Medard',
'Medea',
'Meg',
'Megan',
'Meged',
'Meghan',
'Mehalia',
'Mei',
'Meinako',
'Meir',
'Mekayla',
'Mekelle',
'Mel',
'Mela',
'Melania',
'Melanie',
'Melantha',
'Melba',
'Melchior',
'Mele',
'Meli',
'Melia',
'Melina',
'Melinda',
'Meliora',
'Melisande',
'Melissa',
'Melita',
'Melody',
'Melora',
'Melosa',
'Melva',
'Melvin',
'Melvina',
'Melvyn',
'Mendel',
'Menora',
'Mercedes',
'Mercer',
'Mercia',
'Mercy',
'Meredith',
'Merethe',
'Meria',
'Meris',
'Merita',
'Merle',
'Merlin',
'Merlot',
'Merrick',
'Merrill',
'Merritt',
'Merry',
'Mersendies',
'Merton',
'Merv',
'Mervin',
'Mervyn',
'Meryl',
'Meryle',
'Meshal',
'Messina',
'Metea',
'Mettabel',
'Mia',
'Mialyn',
'Micaella',
'Micah',
'Micaiah',
'Michael',
'Michaela',
'Michal',
'Michel',
'Michele',
'Micheline',
'Michelle',
'Michiko',
'Michila',
'Michon',
'Mick',
'Mickey',
'Micol',
'Mieko',
'Miette',
'Migdana',
'Mignon',
'Mihoshi',
'Mika',
'Mikaili',
'Mikal',
'Mike',
'Mike''aha',
'Mikey',
'Mikhail',
'Miki',
'Mikinea',
'Mikkel',
'Milan',
'Milandu',
'Mildred',
'Milena',
'Miles',
'Mili',
'Milia',
'Miliani',
'Miller',
'Millicent',
'Millie',
'Mills',
'Milly',
'Milo',
'Milt',
'Milton',
'Mimi',
'Mina',
'Minako',
'Minda',
'Mindy',
'Minerva',
'Miniya',
'Minna',
'Minnie',
'Minor',
'Minty',
'Mio',
'Mira',
'Mirabel',
'Mirabelle',
'Miracle',
'Miranda',
'Mircea',
'Mireille',
'Mirella',
'Miriam',
'Mirit',
'Miroslav',
'Mirra',
'Misae',
'Misha',
'Misty',
'Misu',
'Mitch',
'Mitchel',
'Mitchell',
'Mitsu',
'Miya',
'Miyana',
'Miyanda',
'Miyoko',
'Mizell',
'Moa',
'Moana',
'Moanna',
'Modesta',
'Modesty',
'Mohammed',
'Mohan',
'Moin',
'Moina',
'Moinuddin',
'Moira',
'Moji',
'Mojtaba',
'Moke',
'Molly',
'Mona',
'Monae',
'Monahan',
'Monica',
'Moniqua',
'Monique',
'Monita',
'Monroe',
'Montague',
'Montana',
'Monte',
'Montego',
'Montgomery',
'Monty',
'Moon',
'Moon-unit',
'Mora',
'Morag',
'Moral',
'Morathi',
'Mordecai',
'More',
'Morela',
'Morey',
'Morgan',
'Morgana',
'Moriah',
'Moriba',
'Morley',
'Morna',
'Morrie',
'Morrigan',
'Morris',
'Morrison',
'Morse',
'Mort',
'Mortimer',
'Morton',
'Morty',
'Morwenna',
'Moses',
'Moshe',
'Moss',
'Mostapha',
'Mostyn',
'Moya',
'Moyna',
'Mrena',
'Muhammad',
'Mulan',
'Muliya',
'Muna',
'Mura',
'Muriel',
'Murphy',
'Murray',
'Murron',
'Musoke',
'Mustafa',
'Mutia',
'Mya',
'Mykel',
'Myles',
'Myra',
'Myrilla',
'Myrladis',
'Myrna',
'Myron',
'Myrtle',
'Myson',
'Myte',
'Naal',
'Nada',
'Nadia',
'Nadie',
'Nadina',
'Nadine',
'Naeco',
'Nafis',
'Nafuna',
'Naghmeh',
'Naila',
'Naiser',
'Najee',
'Najla',
'Najmeh',
'Nakeisha',
'Nakima',
'Nalo',
'Nalonnie',
'Namir',
'Nan',
'Nancy',
'Nanette',
'Nani',
'Naoise',
'Naolin',
'Naoll',
'Naomi',
'Napoleon',
'Nara',
'Narain',
'Narcisse',
'Nardo',
'Narelle',
'Nariah',
'Nariko',
'Narma',
'Nascha',
'Naseem',
'Nasha',
'Nasia',
'Nasser',
'Nat',
'Natala',
'Natalia',
'Natalie',
'Nataly & natalya',
'Natane',
'Natasha',
'Nate',
'Natesa',
'Nathalie',
'Nathan',
'Nathanael or nathaniel',
'Natine',
'Natividad',
'Natori',
'Natsu',
'Nature',
'Nav',
'Nava',
'Navarro',
'Naveen',
'Navid',
'Navora',
'Nawal',
'Nayati',
'Nayelis',
'Nayer',
'Naysa',
'Nazli',
'N''dea',
'Neal',
'Nealon',
'Necia',
'Neda',
'Nedim',
'Nedra',
'Neely',
'Neena',
'Neetee',
'Nefertiti',
'Neil',
'Nelia',
'Nell',
'Nellie',
'Nelson',
'Nemesis',
'Nen',
'Nenet',
'Neola',
'Nephtalie',
'Nerina',
'Nerine',
'Nerissa',
'Nerita',
'Nero',
'Nessa',
'Nessan',
'Nestor',
'Netanya',
'Neva',
'Nevada',
'Nevan',
'Neville',
'Newman',
'Neydi',
'Neylan',
'Nia',
'Niabi',
'Niall',
'Niamh',
'Nichelle',
'Nicholai',
'Nicholas',
'Nichole',
'Nick',
'Nicki',
'Nicodemus',
'Nicola',
'Nicole',
'Nicoletta',
'Nicolette',
'Nidia',
'Nieca',
'Niel',
'Nieves',
'Nigel',
'Nijole',
'Nika',
'Nikhil',
'Nikiesha',
'Nikita',
'Nikki',
'Nikkos',
'Nikoi',
'Nikola',
'Nikole',
'Niks',
'Niles',
'Nimeesha',
'Nina',
'Ninfa',
'Ninon',
'Nira',
'Nire',
'Nirel',
'Nishi',
'Nissa',
'Nita',
'Nitin',
'Nitara',
'Nitesh',
'Nitis',
'Niv',
'Nixie',
'Nizana',
'Noah',
'Noam',
'Nodin',
'Noe',
'Noel',
'Noelani',
'Noell',
'Nokomis',
'Nola',
'Nolan',
'Noland',
'Noma',
'Nomar',
'Nomlanga',
'Nona',
'Nonnie',
'Nora',
'Norah',
'Noreen',
'Nori',
'Norina',
'Norm',
'Norma',
'Norman',
'Normandy',
'Norris',
'Norton',
'Norwood',
'Nova',
'Novalee',
'Novia',
'Nowles',
'Noxolo',
'Noya',
'Nuhad',
'Nuncio',
'Nuri',
'Nuru',
'Nya',
'Nyako',
'Nydia',
'Nyeki',
'Nyler',
'Nyoka',
'Nysa',
'Nyx',
'Oafe',
'Oanh',
'Oakes',
'Oakley',
'Obadiah',
'Obedience',
'Oberon',
'Obert',
'Oceana',
'Octavia',
'Octavio',
'Octavious',
'Odele',
'Odelia',
'Odell',
'Odessa',
'Odetta',
'Odette',
'Odile',
'Odina',
'Odysseus',
'Oedipus',
'Ofer',
'Ogden',
'Ogima',
'Ohio',
'Oistin',
'Okal',
'Okalik',
'Okapi',
'Oke',
'Okechuku',
'Okoth',
'Oksana',
'Ola',
'Olaf',
'Olathe',
'Oleg',
'Olesia',
'Olga',
'Oliana',
'Olin',
'Olinda',
'Olive',
'Oliver',
'Olivia',
'Ollie',
'Olympia',
'Oma',
'Omar',
'Ombler',
'Omega',
'Ona',
'Onan',
'Ondette',
'One',
'Oneida',
'Oni',
'Onslow',
'Oona',
'Opa',
'Opal',
'Ophelia',
'Ophira',
'Oprah',
'Ora',
'Oral',
'Oralee',
'Oran',
'Orane',
'Orde',
'Oren',
'Orenda',
'Oria',
'Oriana',
'Oriel',
'Orien',
'Oringo',
'Orino',
'Oriole',
'Orion',
'Orla',
'Orlando',
'Orleans',
'Orlee',
'Orli',
'Orly',
'Orma',
'Ormand',
'Ornice',
'Orrick',
'Orsen',
'Orsin',
'Orson',
'Orton',
'Orville',
'Osanna',
'Osaze',
'Osborn',
'Osborne',
'Oscar',
'Osgood',
'Osias',
'Oskar',
'Osma',
'Osmond',
'Ossian',
'Ossie',
'Oswald',
'Othello',
'Otis',
'Otto',
'Ouray',
'Ova',
'Overton',
'Ovid',
'Owen',
'Ownah',
'Oz',
'Ozzie',
'Pabla',
'Pablo',
'Pace',
'Pacey',
'Packard',
'Paco',
'Paddy',
'Padhraig',
'Padraic',
'Page',
'Paige',
'Paisley',
'Palani',
'Palesa',
'Paley',
'Pallas',
'Palma',
'Palmer',
'Paloma',
'Palti',
'Pamela',
'Pamelalee',
'Pamelia',
'Pammay',
'Pancho',
'Pandora',
'Panfila',
'Paniga',
'Panya',
'Paola',
'Paolo',
'Papina',
'Paris',
'Parisa',
'Parker',
'Parkin',
'Parlan',
'Parley',
'Parrish',
'Parry',
'Parsifal',
'Parson',
'Pascal',
'Pascale',
'Pascha',
'Pasi',
'Patch',
'Patience',
'Patrice',
'Patricia',
'Patrick',
'Patsy',
'Patty',
'Paul',
'Paula',
'Paulette',
'Paulina',
'Pauline',
'Paulo',
'Paulos',
'Pavithra',
'Paxton',
'Payil',
'Payton',
'Paz',
'Peale',
'Pearl',
'Pearlie',
'Pearly',
'Pebbles',
'Pedro',
'Peggy',
'Peivi',
'Pelagia',
'Pelham',
'Pembroke',
'Pena',
'Penelope',
'Penn',
'Penney',
'Pennie',
'Penny',
'Penrod',
'Peony',
'Pepe',
'Pepper',
'Percival',
'Percy',
'Perdita',
'Perdy',
'Peregrine',
'Peri',
'Perrin',
'Perry',
'Pete',
'Peter',
'Petra',
'Petronella',
'Petula',
'Petunia',
'Peyton',
'Phaedra',
'Pharzam',
'Phemia',
'Phenia',
'Phiala',
'Phil',
'Phila',
'Philana',
'Phillia',
'Philo',
'Philopena',
'Philip',
'Phillip',
'Philomena',
'Philyra',
'Phindiwe',
'Phoebe',
'Phoenix',
'Phylicia',
'Phylisia',
'Phyliss',
'Phyllis',
'Phyre',
'Pia',
'Picabo',
'Pier',
'Piera',
'Pierce',
'Pierre',
'Pierrette',
'Pilar',
'Pillan',
'Piper',
'Pirro',
'Piuta',
'Placido',
'Plato',
'Platt',
'Pleasance',
'Plennie',
'Po mya',
'Polly',
'Polo',
'Ponce',
'Poppy',
'Poria',
'Porsha',
'Porter',
'Portia',
'Posy',
'Powa',
'Prentice',
'Prescott',
'Presencia',
'Presley',
'Preston',
'Price',
'Primo',
'Prince',
'Princessa',
'Priscilla',
'Priya',
'Procopia',
'Prudence',
'Prue',
'Prunella',
'Pryderi',
'Psyche',
'Pyralis',
'Qabil',
'Qamar',
'Qiana',
'Qing-jao',
'Quade',
'Quana',
'Quanda',
'Quang',
'Queenie',
'Quella',
'Quennell',
'Quentin',
'Querida',
'Quiana',
'Quilla',
'Quillan',
'Quimby',
'Quin',
'Quincy',
'Quinella',
'Quinlan',
'Quinn',
'Quinta',
'Quintana',
'Quintin',
'Quinto',
'Quinton',
'Quirino',
'Quolan',
'Quon',
'Qwin',
'Rabertina',
'Rabia',
'Rach',
'Rachael',
'Rachel',
'Rachelle',
'Radley',
'Radwan',
'Rae',
'Raeanne',
'Raegan',
'Raemarie',
'Rafael',
'Raffaello',
'Rafi',
'Rai',
'Raimi',
'Rain',
'Raina',
'Raine',
'Rainer',
'Raisa',
'Raja',
'Raleigh',
'Ralph',
'Ram',
'Ramie',
'Ramiro',
'Ramon',
'Ramona',
'Ramses',
'Ranae',
'Randall',
'Randi (alternate forms: randie, randee, randey)',
'Randilyn',
'Randolph',
'Randy',
'Rane',
'Ranee',
'Rania',
'Ranit',
'Raphael',
'Raphaela',
'Raquel',
'Rasha',
'Rashida',
'Rasia',
'Raul',
'Raven',
'Ravi',
'Ray',
'Raymond',
'Raynell',
'Rayya',
'Razi',
'Razvan',
'Rea',
'Read',
'Reagan',
'Reann',
'Reanna',
'Reasha',
'Reba',
'Rebecca',
'Rebekah',
'Red',
'Redell',
'Redford',
'Redina',
'Reed',
'Reent',
'Reese',
'Reeves',
'Regan',
'Regina',
'Reginald',
'Reilly',
'Reina',
'Remedy',
'Rememberance',
'Remi',
'Remick',
'Remington',
'Remy',
'Ren',
'Rena',
'Renata',
'Renate',
'Rene',
'Renee',
'Renny',
'Reth',
'Reuben',
'Reva',
'Revel',
'Revelin',
'Revelpedro',
'Rex',
'Rey',
'Reye',
'Reyna',
'Reynalynn',
'Reynard',
'Reynold',
'Reza',
'Rhasheem',
'Rhea',
'Rhett',
'Rhiannon',
'Rhoda',
'Rhodes',
'Rhona',
'Rhonda',
'Rhoswen',
'Rhylee',
'Rhys',
'Ria',
'Rianna',
'Rianne',
'Riannon',
'Ricardo',
'Rich',
'Richann',
'Richard',
'Ricjunette',
'Rick',
'Rickesha',
'Rico',
'Rider',
'Riene',
'Rigg',
'Riley',
'Rimca',
'Rimona',
'Rin',
'Rina',
'Ringo',
'Riona',
'Riordan',
'Risa',
'Rita',
'Riva',
'River',
'Rivka',
'Rob',
'Robbin',
'Robert',
'Roberta',
'Robin',
'Robyn',
'Rocco',
'Rochelle',
'Rocio',
'Rock',
'Rockne',
'Rockwell',
'Rocky',
'Rod',
'Rodd',
'Roddy',
'Roderick',
'Rodney',
'Roger',
'Roland',
'Rolando',
'Rolf',
'Rollo',
'Romaine',
'Roman',
'Romeo',
'Rona',
'Ronald',
'Ronalee',
'Ronan',
'Ronat',
'Ronda',
'Ronia',
'Ronica',
'Ronisha',
'Ronli',
'Ronna',
'Ronnie',
'Ronny',
'Roosevelt',
'Rori',
'Rory',
'Ros',
'Rosa',
'Rosalba',
'Rosalia',
'Rosalind',
'Rosalita',
'Rosalyn',
'Rosamunde',
'Rose',
'Roseanne',
'Roselani',
'Rosemary',
'Roshaun',
'Rosie',
'Rosine',
'Ross',
'Rossa',
'Rothrock',
'Rowan',
'Rowdy',
'Rowena',
'Roxanne',
'Roy',
'Royce',
'Roz',
'Roza',
'Ruairi',
'Ruana',
'Ruby',
'Rudolph',
'Rudra',
'Rudy',
'Rufina',
'Rufus',
'Ruggiero',
'Rui',
'Rumer',
'Runa',
'Rune',
'Rupert',
'Rupetina',
'Russ',
'Russell',
'Russom',
'Rusti',
'Rusty',
'Ruth',
'Ruza',
'Ryan',
'Rydell',
'Ryder',
'Ryk',
'Ryker',
'Rylan',
'Ryland',
'Rylee',
'Rylie',
'Ryne',
'Ryo',
'Ryoko',
'Saba',
'Sabeeka',
'Sabina',
'Sabine',
'Sabra',
'Sabrina',
'Sachi',
'Sadie',
'Sadiki',
'Sadira',
'Safara',
'Saffron',
'Safina',
'Sage',
'Sahara',
'Saidi',
'Sailor',
'Saja',
'Saku',
'Sakura',
'Sal',
'Salena',
'Salene',
'Sally',
'Salome',
'Salvador',
'Salvatore',
'Sam',
'Samantha',
'Samia',
'Samson',
'Samuel',
'Sana',
'Sandra',
'Sandro',
'Sandy',
'Sanford',
'Sanjay',
'Sanjeet',
'Sanne',
'Santa',
'Santana',
'Santiago',
'Santo',
'Santos',
'Sanyu',
'Sapphire',
'Sara',
'Sarabrynn',
'Sarah',
'Sarahlyn',
'Sarai',
'Saraid',
'Sarama',
'Sarda',
'Sargent',
'Sarissa',
'Sarita',
'Sarki',
'Sarren',
'Sasami',
'Sasha',
'Sasilvia',
'Saskia',
'Satchel',
'Satin',
'Satinka',
'Satori',
'Satu',
'Saul',
'Savanna',
'Savannah',
'Saville',
'Savion',
'Savon',
'Sawyer',
'Saxen',
'Saxon',
'Saxton',
'Sayaan',
'Sayward',
'Scarlet',
'Scarlett',
'Schuyler',
'Schyler',
'Schylor',
'Scot',
'Scott',
'Scout',
'Seamus',
'Sean',
'Seanna',
'Season',
'Sebastian',
'Sebastien',
'Seda',
'Seema',
'Sef',
'Seghen',
'Seiko',
'Sela',
'Selas',
'Selena',
'Selene',
'Selia',
'Selima',
'Selina',
'Selma',
'Sema',
'Semele',
'Semir',
'Semira',
'Senalda',
'Senia',
'Sephora',
'September',
'Sequoia',
'Sera',
'Serafina',
'Serena',
'Serenity',
'Serepta',
'Serge',
'Sergio',
'Serwa',
'Seth',
'Seven',
'Severino',
'Sevinc',
'Seveyn',
'Sevilla',
'Seville',
'Seymour',
'Shacher',
'Shaelynn',
'Shaina',
'Shainah',
'Shakia',
'Shakila',
'Shakir',
'Shakira',
'Shakti',
'Shakur',
'Shakura',
'Shalaidah',
'Shalamar',
'Shalimar',
'Shaman',
'Shamar',
'Shamara',
'Shamira',
'Shamon',
'Shamus',
'Shana',
'Shandi',
'Shandrell',
'Shane',
'Shani',
'Shania',
'Shanity',
'Shanlee',
'Shanna',
'Shannen',
'Shannon',
'Shanon',
'Shante',
'Shantell',
'Shaquille',
'Sharis',
'Sharlene',
'Sharne',
'Sharon',
'Shasa',
'Shaun',
'Shauna',
'Shaunna',
'Shavonda',
'Shavonne',
'Shaw',
'Shawn',
'Shawnda',
'Shawna',
'Shawndell',
'Shay',
'Shea',
'Sheadon',
'Sheba',
'Sheehan',
'Sheena',
'Sheera',
'Sheila',
'Shel',
'Shelby',
'Sheldon',
'Shella',
'Shelley',
'Shelly',
'Shenelle',
'Sheri',
'Sheridan',
'Sherine',
'Sherise',
'Sherisse',
'Sherman',
'Shermel',
'Sherri',
'Sherry',
'Sheryl',
'Shieefera',
'Shiela',
'Shifra',
'Shiloh',
'Shimon',
'Shing',
'Shino',
'Shira',
'Shiran',
'Shiri',
'Shirley',
'Shirlyn',
'Shlomo',
'Shneek',
'Shona',
'Shoshana',
'Shoshanah',
'Shubha',
'Shyan',
'Shyler',
'Sian',
'Sibley',
'Sibyl',
'Sid',
'Sidhartha',
'Sidney',
'Sidonia',
'Sidra',
'Siegfried',
'Sienna',
'Sierra',
'Signa',
'Sigrid',
'Sika',
'Silvain',
'Silvan',
'Silvana',
'Silver',
'Silvio',
'Sim',
'Sima',
'Simba',
'Simeon',
'Simon',
'Simone',
'Sinclair',
'Sine',
'Sinead',
'Sinjin',
'Sinjon',
'Siobhan',
'Sirus',
'Sissy',
'Sivney',
'Skip',
'Skipper',
'Skylar',
'Skyler',
'Slade',
'Sloan',
'Sloane',
'Slone',
'Smedley',
'Smith',
'Snow',
'Snowy',
'Sofia',
'Sohl-bin',
'Sokphorn',
'Sol',
'Solace',
'Solana',
'Solange',
'Solangel',
'Sole',
'Soleil',
'Solomon',
'Son',
'Sondo',
'Sondra',
'Sonel',
'Sonia',
'Sonja',
'Sonnagh',
'Sonora',
'Sonya',
'Sophia',
'Sophie',
'Sora',
'Sorcha',
'Soren',
'Sorley',
'Spence',
'Spencer',
'Speranza',
'Spike',
'Spring',
'Stacey',
'Stacia',
'Stacy',
'Stan',
'Stanislaus',
'Stanislav',
'Stanislaw',
'Stanley',
'Stanton',
'Star',
'Starla',
'Starr',
'Stavros',
'Stefan',
'Stefanie',
'Steffi',
'Steffie',
'Stefon',
'Stella',
'Step',
'Stephan',
'Stephanie',
'Stephen',
'Stephenie',
'Sterling',
'Stesha',
'Steve',
'Steven',
'Stevie',
'Stew',
'Stewart',
'Stillman',
'Stockton',
'Stone',
'Storm',
'Stormy',
'Strom',
'Stu',
'Stuart',
'Studs',
'Sue',
'Sugar',
'Sukey',
'Suki',
'Sulis',
'Sullivan',
'Sully',
'Sumana',
'Summer',
'Sundeep',
'Sunee',
'Sunny',
'Susan',
'Susane',
'Susanna',
'Susannah',
'Susie',
'Sutton',
'Suzanne',
'Suzette',
'Suzy',
'Svein',
'Sveta',
'Sybil',
'Sydnee',
'Sydney',
'Sylest',
'Sylvain',
'Sylvester',
'Sylvia',
'Sylvie',
'Synan',
'Synclair',
'Syshe',
'Ta''ib',
'Tab',
'Taban',
'Taber',
'Tabetha',
'Tabitha',
'Tacita',
'Tacy',
'Tad',
'Tadelesh',
'Tadhg',
'Taffy',
'Tahlib',
'Tai',
'Taifa',
'Tailynn',
'Taima',
'Tait',
'Taja',
'Tajanea',
'Takeshi',
'Tala',
'Talasi',
'Talen',
'Talia',
'Taliesin',
'Taliliikilyit',
'Talisa',
'Talisha',
'Talitha',
'Tallah',
'Tallis',
'Tallulah',
'Talmai',
'Talynn',
'Tam',
'Tama',
'Tamah',
'Tamara',
'Tamasha',
'Tamasine',
'Tamatha',
'Tambre',
'Tamera',
'Tameron',
'Tamika',
'Tamma',
'Tammy',
'Tamra',
'Tamsen',
'Tamsin',
'Tamzin',
'Tana',
'Tandice',
'Tanesia',
'Tania',
'Tanika',
'Tanisha',
'Tanith',
'Tanna',
'Tannar',
'Tanner',
'Tannor',
'Tanya',
'Tao',
'Tara',
'Tarah',
'Taran',
'Tarana',
'Tarek',
'Tarika',
'Tarin',
'Tariq',
'Taru',
'Taryn',
'Tasha',
'Tasida',
'Tasmine',
'Tassos',
'Tate',
'Tatiana',
'Tatum',
'Tauja',
'Taurean',
'Tave',
'Taveon',
'Tavi',
'Tavia',
'Tavish',
'Tavita',
'Tawana',
'Taya or taia',
'Tayla',
'Taylah',
'Taylor',
'Tazara',
'Tea',
'Teagan',
'Teague',
'Teal',
'Tecla',
'Ted',
'Teddy',
'Teenie',
'Tefo',
'Teige',
'Tekevia',
'Teleza',
'Teli',
'Telly',
'Telma',
'Temima',
'Temira',
'Templeton',
'Tenen',
'Tennille',
'Teo',
'Terah',
'Terena',
'Terence',
'Terentia',
'Teresa',
'Terina',
'Termon',
'Terra',
'Terran',
'Terrel',
'Terrence',
'Terris',
'Terry',
'Terryal',
'Tertius',
'Tertullian',
'Teshi',
'Tess',
'Tessa',
'Teva',
'Tevani',
'Tevin',
'Tex',
'Texil',
'Thackery',
'Thad',
'Thaddeus',
'Thadeus',
'Thady',
'Thais',
'Thalassa',
'Thalia',
'Than',
'Thandeka',
'Thane',
'Thanh',
'Thatcher',
'Thayer',
'Thea',
'Thel',
'Thelma',
'Thema',
'Themba',
'Theo',
'Theodora',
'Theodore',
'Theresa',
'Therese',
'Thina',
'Thom',
'Thomas',
'Thomasina',
'Thor',
'Thora',
'Thorin',
'Thornton',
'Thrine',
'Thron',
'Thurman',
'Thuy',
'Thyra',
'Tia',
'Tiana & tiannah',
'Tiara',
'Tiaret',
'Tiassale',
'Tidus',
'Tiere',
'Tierney',
'Tiffany',
'Tilden',
'Tillie',
'Tilly',
'Tim',
'Timothy',
'Timu',
'Tina',
'Tino',
'Tip',
'Tirza',
'Tirzah',
'Tisha',
'Titan',
'Titus',
'Tivona',
'Toan',
'Toben',
'Tobin',
'Tobit',
'Toby',
'Tod',
'Todd',
'Toki',
'Tolla',
'Tom',
'Tomas',
'Tommy',
'Tomo',
'Tonette',
'Toni',
'Tony',
'Tonya',
'Topaz',
'Topaza',
'Topo',
'Topper',
'Tori',
'Torie',
'Torn',
'Torrance',
'Torrin',
'Tory',
'Tosca',
'Tosha',
'Toshi',
'Toshia',
'Totie',
'Tova',
'Tovah',
'Tovi',
'Townsend',
'Toya',
'Toyah',
'Tracey',
'Tracie',
'Tracy',
'Traelic-an',
'Trahern',
'Tranquilla',
'Trapper',
'Trava',
'Travis',
'Traven',
'Trella',
'Trent',
'Trenton',
'Tressa',
'Tresure',
'Trevon',
'Trevor',
'Trey',
'Tricia',
'Trilby',
'Trina',
'Trinady',
'Trini',
'Trinity',
'Trish',
'Trisha',
'Trista',
'Tristan',
'Tristana',
'Tristessa',
'Tristram',
'Trixie',
'Trory',
'Troy',
'Truda',
'Trude',
'Trudy',
'Truitt',
'Trula',
'Truly',
'Truman',
'Tryphena',
'Tucker',
'Tudor',
'Tuesday',
'Tulla',
'Tully',
'Tumo',
'Tuyen',
'Twila',
'Twyla',
'Ty',
'Tyan',
'Tyanne',
'Tybal',
'Tyler',
'Tylynn',
'Tyme',
'Tyne',
'Tynley',
'Tyra',
'Tyree',
'Tyrell',
'Tyrick',
'Tyriq',
'Tyrone',
'Tyrus',
'Tyson',
'Uang',
'Uba',
'Uday',
'Ugo',
'Ujana',
'Ukiah',
'Ula',
'Ulan',
'Ulani',
'Ulema',
'Ulf',
'Ull',
'Ulla',
'Ulric',
'Ulysses',
'Uma',
'Umay',
'Umberto',
'Umeko',
'Umi',
'Ummi',
'Una',
'Unity',
'Upendo',
'Urania',
'Urbain',
'Urban',
'Uri',
'Uriah',
'Uriel',
'Urilla',
'Urit',
'Ursa',
'Ursala',
'Ursula',
'Uta',
'Utana',
'Ute',
'Utina',
'Uzma',
'Vail',
'Val',
'Vala',
'Valarie',
'Valbona',
'Valeda',
'Valencia',
'Valene',
'Valentina',
'Valentine',
'Valeria',
'Valerie',
'Valeska',
'Valiant',
'Vallerie',
'Valtina',
'Valyn',
'Van',
'Vance',
'Vandalin',
'Vanessa',
'Vangie',
'Vanna',
'Varae',
'Varen',
'Vaschel',
'Vashti',
'Vashni',
'Vatusia',
'Vaughan',
'Vaughn',
'Vea',
'Veasna',
'Veda',
'Vega',
'Velaura',
'Velma',
'Venedict',
'Venetia',
'Vera',
'Verda',
'Verdi',
'Vern',
'Verna',
'Verne',
'Verneil',
'Vernon',
'Veronica',
'Vesta',
'Vevay',
'Vevina',
'Vi',
'Vianey',
'Vic',
'Vicki',
'Vicky',
'Victor',
'Victoria',
'Vida',
'Vidal',
'Vidor',
'Vienna',
'Vila',
'Vince',
'Vincent',
'Vine',
'Vinnie',
'Vinny',
'Vinson',
'Viola',
'Violet',
'Virgil',
'Virgina',
'Virginia',
'Visola',
'Vita',
'Vitalis',
'Vito',
'Vittorio',
'Vivek',
'Vivi',
'Vivian',
'Viviana',
'Vivienne',
'Vlad',
'Vladimir',
'Volleny',
'Von',
'Vonda',
'Vondila',
'Vondra',
'Vonette',
'Vonna',
'Vui',
'Wade',
'Wafa',
'Waggoner',
'Walda',
'Waldo',
'Walker',
'Wallace',
'Walt',
'Walta',
'Walter',
'Wanda',
'Waneta',
'Ward',
'Warner',
'Warren',
'Wasaki',
'Washi',
'Washington',
'Watson',
'Waverly',
'Wayne',
'Webster',
'Weldon',
'Wence',
'Wenceslaus',
'Wenda',
'Wendell',
'Wendi',
'Wendy',
'Werner',
'Wes',
'Wesley',
'Weston',
'Wheeler',
'Whitby',
'Whitfield',
'Whitley',
'Whitney',
'Wilbur',
'Wiley',
'Wilford',
'Wilfred',
'Wilfredo',
'Wilhelmina',
'Will',
'Willa',
'Willard',
'Willem',
'William',
'Williams',
'Willis',
'Willow',
'Wilma',
'Wilson',
'Wilton',
'Win',
'Winda',
'Winfred',
'Winifred',
'Winona',
'Winson',
'Winslow',
'Winston',
'Winta',
'Winter',
'Winthrop',
'Wolfgang',
'Wood',
'Woodrow',
'Woods',
'Woody',
'Worden',
'Wrangler',
'Wyanet',
'Wyatt',
'Wyman',
'Wynn',
'Wynne',
'Wynona',
'Wyome',
'Xander',
'Xandy',
'Xanthe',
'Xanthus',
'Xanto',
'Xavier',
'Xaviera',
'Xena',
'Xenia',
'Xenos',
'Xentrie',
'Xerxes',
'Xi-wang',
'Xinavane',
'Xolani',
'Xuxa',
'Xylon',
'Yachi',
'Yadid',
'Yael',
'Yaholo',
'Yahto',
'Yair',
'Yaksh or yakchh',
'Yale',
'Yamal',
'Yamin',
'Yana',
'Yancy',
'Yank',
'Yanka',
'Yanni',
'Yannis',
'Yardan',
'Yardley',
'Yaro',
'Yaron',
'Yaser',
'Yasin',
'Yasmin',
'Yasnery',
'Yasuo',
'Yates',
'Ye',
'Yeardleigh',
'Yehudi',
'Yelena',
'Yen',
'Yenge',
'Yepa',
'Yered',
'Yeriel',
'Yesenia',
'Yestin',
'Yetty',
'Yeva',
'Yihana',
'Yitro',
'Yitta',
'Ymir',
'Yo',
'Yogi',
'Yoko',
'Yoland',
'Yolanda',
'Yomonda',
'Yonah',
'Yoninah',
'Yorick',
'York',
'Yosef',
'Yosefu',
'Y?shi',
'Yoshi',
'Yoshino',
'Ysabel',
'Ysanne',
'Yuk',
'Yuki',
'Yul',
'Yule',
'Yuma',
'Yuri',
'Yuval',
'Yves',
'Yvette',
'Yvon',
'Yvonne',
'Zaccheus',
'Zach',
'Zachariah',
'Zachary',
'Zaci',
'Zada',
'Zafira',
'Zahava',
'Zahur',
'Zaida',
'Zaide',
'Zaido',
'Zaila',
'Zainab',
'Zaira',
'Zaire',
'Zaki',
'Zak''nefein',
'Zalman',
'Zan',
'Zane',
'Zanna',
'Zara',
'Zareb',
'Zared',
'Zareh',
'Zarek',
'Zarifa',
'Zarina',
'Zarren',
'Zavad',
'Zaybian',
'Zaylyn',
'Zayn',
'Zayne',
'Zayo',
'Zaza',
'Zazu',
'Zbigniew',
'Ze''ev',
'Zea',
'Zeb',
'Zebidy',
'Zebulon',
'Zechariah',
'Zechuriah',
'Zed',
'Zef',
'Zeheb',
'Zeke',
'Zeki',
'Zelda',
'Zelia',
'Zelig',
'Zena',
'Zenas',
'Zene',
'Zenia',
'Zenobia',
'Zenon',
'Zephan',
'Zephiniah',
'Zeppelin',
'Zesiro',
'Zev',
'Zia',
'Ziazan',
'Ziggy',
'Zikomo',
'Zili',
'Zilli',
'Zimri',
'Zinna',
'Zinnia',
'Zion',
'Ziraili',
'Zita',
'Ziv',
'Ziva',
'Zivan',
'Ziven',
'Ziya',
'Zizi',
'Zo',
'Zoan',
'Zoe',
'Zoey',
'Zofia',
'Zohar',
'Zoie',
'Zola',
'Zolen',
'Zoltan',
'Zoltin',
'Zona',
'Zontee',
'Zorada',
'Zoraida',
'Zsa zsa',
'Zsuzsanna',
'Zula',
'Zuleika',
'Zulema',
'Zuriel',
'Zyta', ]<|fim▁end|>
|
'Elle',
|
<|file_name|>MeanAndStandardDeviation.java<|end_file_name|><|fim▁begin|>/* ===========================================================
* JFreeChart : a free chart library for the Java(tm) platform
* ===========================================================
*
* (C) Copyright 2000-2016, by Object Refinery Limited and Contributors.
*
* Project Info: http://www.jfree.org/jfreechart/index.html
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation; either version 2.1 of the License, or
* (at your option) any later version.
*
* This library is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
* USA.
*
* [Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.]
*
* -----------------------------
* MeanAndStandardDeviation.java
* -----------------------------
* (C) Copyright 2003-2008, by Object Refinery Limited.
*
* Original Author: David Gilbert (for Object Refinery Limited);
* Contributor(s): -;
*
* Changes:
* --------
* 05-Feb-2002 : Version 1 (DG);
* 05-Feb-2005 : Added equals() method and implemented Serializable (DG);
* 02-Oct-2007 : Added getMeanValue() and getStandardDeviationValue() methods
* for convenience, and toString() method for debugging (DG);
*
*/
package org.jfree.data.statistics;
import java.io.Serializable;
import org.jfree.util.ObjectUtilities;
/**
* A simple data structure that holds a mean value and a standard deviation
* value. This is used in the
* {@link org.jfree.data.statistics.DefaultStatisticalCategoryDataset} class.
*/
public class MeanAndStandardDeviation implements Serializable {
/** For serialization. */
private static final long serialVersionUID = 7413468697315721515L;
/** The mean. */
private Number mean;
/** The standard deviation. */
private Number standardDeviation;
/**
* Creates a new mean and standard deviation record.
*
* @param mean the mean.
* @param standardDeviation the standard deviation.
*/
public MeanAndStandardDeviation(double mean, double standardDeviation) {
this(new Double(mean), new Double(standardDeviation));
}
/**
* Creates a new mean and standard deviation record.
*
* @param mean the mean ({@code null} permitted).
* @param standardDeviation the standard deviation ({@code null}
* permitted.
*/
public MeanAndStandardDeviation(Number mean, Number standardDeviation) {
this.mean = mean;
this.standardDeviation = standardDeviation;
}
/**
* Returns the mean.
*
* @return The mean.
*/
public Number getMean() {
return this.mean;
}
/**
<|fim▁hole|> *
* @see #getMean()
*
* @since 1.0.7
*/
public double getMeanValue() {
double result = Double.NaN;
if (this.mean != null) {
result = this.mean.doubleValue();
}
return result;
}
/**
* Returns the standard deviation.
*
* @return The standard deviation.
*/
public Number getStandardDeviation() {
return this.standardDeviation;
}
/**
* Returns the standard deviation as a double primitive. If the underlying
* standard deviation is {@code null}, this method will return
* {@code Double.NaN}.
*
* @return The standard deviation.
*
* @since 1.0.7
*/
public double getStandardDeviationValue() {
double result = Double.NaN;
if (this.standardDeviation != null) {
result = this.standardDeviation.doubleValue();
}
return result;
}
/**
* Tests this instance for equality with an arbitrary object.
*
* @param obj the object ({@code null} permitted).
*
* @return A boolean.
*/
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof MeanAndStandardDeviation)) {
return false;
}
MeanAndStandardDeviation that = (MeanAndStandardDeviation) obj;
if (!ObjectUtilities.equal(this.mean, that.mean)) {
return false;
}
if (!ObjectUtilities.equal(
this.standardDeviation, that.standardDeviation)
) {
return false;
}
return true;
}
/**
* Returns a string representing this instance.
*
* @return A string.
*
* @since 1.0.7
*/
@Override
public String toString() {
return "[" + this.mean + ", " + this.standardDeviation + "]";
}
}<|fim▁end|>
|
* Returns the mean as a double primitive. If the underlying mean is
* {@code null}, this method will return {@code Double.NaN}.
*
* @return The mean.
|
<|file_name|>rbac.py<|end_file_name|><|fim▁begin|>#
# Copyright 2012 New Dream Network, LLC (DreamHost)
# Copyright 2014 Hewlett-Packard Company
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># under the License.
"""Access Control Lists (ACL's) control access the API server."""
import pecan
from ceilometer.openstack.common import policy
_ENFORCER = None
def enforce(policy_name, request):
"""Return the user and project the request should be limited to.
:param request: HTTP request
:param policy_name: the policy name to validate authz against.
"""
global _ENFORCER
if not _ENFORCER:
_ENFORCER = policy.Enforcer()
_ENFORCER.load_rules()
rule_method = "telemetry:" + policy_name
headers = request.headers
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['target.user_id'] = (headers.get('X-User-Id'))
policy_dict['target.project_id'] = (headers.get('X-Project-Id'))
for rule_name in _ENFORCER.rules.keys():
if rule_method == rule_name:
if not _ENFORCER.enforce(
rule_name,
{},
policy_dict):
pecan.core.abort(status_code=403,
detail='RBAC Authorization Failed')
# TODO(fabiog): these methods are still used because the scoping part is really
# convoluted and difficult to separate out.
def get_limited_to(headers):
"""Return the user and project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A tuple of (user, project), set to None if there's no limit on
one of these.
"""
global _ENFORCER
if not _ENFORCER:
_ENFORCER = policy.Enforcer()
_ENFORCER.load_rules()
policy_dict = dict()
policy_dict['roles'] = headers.get('X-Roles', "").split(",")
policy_dict['target.user_id'] = (headers.get('X-User-Id'))
policy_dict['target.project_id'] = (headers.get('X-Project-Id'))
if not _ENFORCER.enforce('segregation',
{},
policy_dict):
return headers.get('X-User-Id'), headers.get('X-Project-Id')
return None, None
def get_limited_to_project(headers):
"""Return the project the request should be limited to.
:param headers: HTTP headers dictionary
:return: A project, or None if there's no limit on it.
"""
return get_limited_to(headers)[1]<|fim▁end|>
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
|
<|file_name|>document.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use devtools_traits::ScriptToDevtoolsControlMsg;
use document_loader::{DocumentLoader, LoadType};
use dom::activation::{ActivationSource, synthetic_click_activation};
use dom::attr::Attr;
use dom::beforeunloadevent::BeforeUnloadEvent;
use dom::bindings::callback::ExceptionHandling;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::DOMRectBinding::DOMRectMethods;
use dom::bindings::codegen::Bindings::DocumentBinding;
use dom::bindings::codegen::Bindings::DocumentBinding::{DocumentMethods, DocumentReadyState};
use dom::bindings::codegen::Bindings::ElementBinding::ElementMethods;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::EventHandlerBinding::OnErrorEventHandlerNonNull;
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementBinding::HTMLIFrameElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::codegen::Bindings::NodeFilterBinding::NodeFilter;
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceMethods;
use dom::bindings::codegen::Bindings::TouchBinding::TouchMethods;
use dom::bindings::codegen::Bindings::WindowBinding::{FrameRequestCallback, ScrollBehavior, WindowMethods};
use dom::bindings::codegen::UnionTypes::NodeOrString;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::{Castable, ElementTypeId, HTMLElementTypeId, NodeTypeId};
use dom::bindings::js::{JS, LayoutJS, MutNullableJS, Root};
use dom::bindings::js::RootedReference;
use dom::bindings::num::Finite;
use dom::bindings::refcounted::{Trusted, TrustedPromise};
use dom::bindings::reflector::{DomObject, reflect_dom_object};
use dom::bindings::str::{DOMString, USVString};
use dom::bindings::xmlname::{namespace_from_domstring, validate_and_extract, xml_name_type};
use dom::bindings::xmlname::XMLName::InvalidXMLName;
use dom::browsingcontext::BrowsingContext;
use dom::closeevent::CloseEvent;
use dom::comment::Comment;
use dom::customevent::CustomEvent;
use dom::documentfragment::DocumentFragment;
use dom::documenttype::DocumentType;
use dom::domimplementation::DOMImplementation;
use dom::element::{Element, ElementCreator, ElementPerformFullscreenEnter, ElementPerformFullscreenExit};
use dom::errorevent::ErrorEvent;
use dom::event::{Event, EventBubbles, EventCancelable, EventDefault, EventStatus};
use dom::eventtarget::EventTarget;
use dom::focusevent::FocusEvent;
use dom::forcetouchevent::ForceTouchEvent;
use dom::globalscope::GlobalScope;
use dom::hashchangeevent::HashChangeEvent;
use dom::htmlanchorelement::HTMLAnchorElement;
use dom::htmlappletelement::HTMLAppletElement;
use dom::htmlareaelement::HTMLAreaElement;
use dom::htmlbaseelement::HTMLBaseElement;
use dom::htmlbodyelement::HTMLBodyElement;
use dom::htmlcollection::{CollectionFilter, HTMLCollection};
use dom::htmlelement::HTMLElement;
use dom::htmlembedelement::HTMLEmbedElement;
use dom::htmlformelement::HTMLFormElement;
use dom::htmlheadelement::HTMLHeadElement;
use dom::htmlhtmlelement::HTMLHtmlElement;
use dom::htmliframeelement::HTMLIFrameElement;
use dom::htmlimageelement::HTMLImageElement;
use dom::htmlscriptelement::{HTMLScriptElement, ScriptResult};
use dom::htmltitleelement::HTMLTitleElement;
use dom::keyboardevent::KeyboardEvent;
use dom::location::Location;
use dom::messageevent::MessageEvent;
use dom::mouseevent::MouseEvent;
use dom::node::{self, CloneChildrenFlag, Node, NodeDamage, window_from_node, IS_IN_DOC, LayoutNodeHelpers};
use dom::nodeiterator::NodeIterator;
use dom::nodelist::NodeList;
use dom::pagetransitionevent::PageTransitionEvent;
use dom::popstateevent::PopStateEvent;
use dom::processinginstruction::ProcessingInstruction;
use dom::progressevent::ProgressEvent;
use dom::promise::Promise;
use dom::range::Range;
use dom::servoparser::ServoParser;
use dom::storageevent::StorageEvent;
use dom::stylesheetlist::StyleSheetList;
use dom::text::Text;
use dom::touch::Touch;
use dom::touchevent::TouchEvent;
use dom::touchlist::TouchList;
use dom::treewalker::TreeWalker;
use dom::uievent::UIEvent;
use dom::webglcontextevent::WebGLContextEvent;
use dom::window::{ReflowReason, Window};
use encoding::EncodingRef;
use encoding::all::UTF_8;
use euclid::point::Point2D;
use gfx_traits::ScrollRootId;
use html5ever_atoms::{LocalName, QualName};
use hyper::header::{Header, SetCookie};
use hyper_serde::Serde;
use ipc_channel::ipc::{self, IpcSender};
use js::jsapi::{JSContext, JSObject, JSRuntime};
use js::jsapi::JS_GetRuntime;
use msg::constellation_msg::{ALT, CONTROL, SHIFT, SUPER};
use msg::constellation_msg::{FrameId, Key, KeyModifiers, KeyState};
use net_traits::{FetchResponseMsg, IpcSend, ReferrerPolicy};
use net_traits::CookieSource::NonHTTP;
use net_traits::CoreResourceMsg::{GetCookiesForUrl, SetCookiesForUrl};
use net_traits::request::RequestInit;
use net_traits::response::HttpsState;
use num_traits::ToPrimitive;
use origin::Origin;
use script_layout_interface::message::{Msg, ReflowQueryType};
use script_runtime::{CommonScriptMsg, ScriptThreadEventCategory};
use script_thread::{MainThreadScriptMsg, Runnable};
use script_traits::{AnimationState, CompositorEvent, DocumentActivity};
use script_traits::{MouseButton, MouseEventType, MozBrowserEvent};
use script_traits::{ScriptMsg as ConstellationMsg, TouchpadPressurePhase};
use script_traits::{TouchEventType, TouchId};
use script_traits::UntrustedNodeAddress;
use servo_atoms::Atom;
use servo_config::prefs::PREFS;
use servo_url::ServoUrl;
use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::cell::{Cell, Ref, RefMut};
use std::collections::{HashMap, VecDeque};
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::default::Default;
use std::iter::once;
use std::mem;
use std::rc::Rc;
use std::sync::Arc;
use std::time::{Duration, Instant};
use style::attr::AttrValue;
use style::context::{QuirksMode, ReflowGoal};
use style::restyle_hints::{RestyleHint, RESTYLE_STYLE_ATTRIBUTE};
use style::selector_parser::{RestyleDamage, Snapshot};
use style::str::{HTML_SPACE_CHARACTERS, split_html_space_chars, str_join};
use style::stylesheets::Stylesheet;
use task_source::TaskSource;
use time;
use url::percent_encoding::percent_decode;
pub enum TouchEventResult {
Processed(bool),
Forwarded,
}
#[derive(Clone, Copy, Debug, HeapSizeOf, JSTraceable, PartialEq)]
pub enum IsHTMLDocument {
HTMLDocument,
NonHTMLDocument,
}
#[derive(JSTraceable, HeapSizeOf)]
#[must_root]
pub struct StylesheetInDocument {
pub node: JS<Node>,
#[ignore_heap_size_of = "Arc"]
pub stylesheet: Arc<Stylesheet>,
}
#[derive(Debug, HeapSizeOf)]
pub struct PendingRestyle {
/// If this element had a state or attribute change since the last restyle, track
/// the original condition of the element.
pub snapshot: Option<Snapshot>,
/// Any explicit restyles hints that have been accumulated for this element.
pub hint: RestyleHint,
/// Any explicit restyles damage that have been accumulated for this element.
pub damage: RestyleDamage,
}
impl PendingRestyle {
pub fn new() -> Self {
PendingRestyle {
snapshot: None,
hint: RestyleHint::empty(),
damage: RestyleDamage::empty(),
}
}
}
/// https://dom.spec.whatwg.org/#document
#[dom_struct]
pub struct Document {
node: Node,
window: JS<Window>,
implementation: MutNullableJS<DOMImplementation>,
location: MutNullableJS<Location>,
content_type: DOMString,
last_modified: Option<String>,
encoding: Cell<EncodingRef>,
has_browsing_context: bool,
is_html_document: bool,
activity: Cell<DocumentActivity>,
url: DOMRefCell<ServoUrl>,
quirks_mode: Cell<QuirksMode>,
/// Caches for the getElement methods
id_map: DOMRefCell<HashMap<Atom, Vec<JS<Element>>>>,
tag_map: DOMRefCell<HashMap<LocalName, JS<HTMLCollection>>>,
tagns_map: DOMRefCell<HashMap<QualName, JS<HTMLCollection>>>,
classes_map: DOMRefCell<HashMap<Vec<Atom>, JS<HTMLCollection>>>,
images: MutNullableJS<HTMLCollection>,
embeds: MutNullableJS<HTMLCollection>,
links: MutNullableJS<HTMLCollection>,
forms: MutNullableJS<HTMLCollection>,
scripts: MutNullableJS<HTMLCollection>,
anchors: MutNullableJS<HTMLCollection>,
applets: MutNullableJS<HTMLCollection>,
/// List of stylesheets associated with nodes in this document. |None| if the list needs to be refreshed.
stylesheets: DOMRefCell<Option<Vec<StylesheetInDocument>>>,
/// Whether the list of stylesheets has changed since the last reflow was triggered.
stylesheets_changed_since_reflow: Cell<bool>,
stylesheet_list: MutNullableJS<StyleSheetList>,
ready_state: Cell<DocumentReadyState>,
/// Whether the DOMContentLoaded event has already been dispatched.
domcontentloaded_dispatched: Cell<bool>,
/// The element that has most recently requested focus for itself.
possibly_focused: MutNullableJS<Element>,
/// The element that currently has the document focus context.
focused: MutNullableJS<Element>,
/// The script element that is currently executing.
current_script: MutNullableJS<HTMLScriptElement>,
/// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pending_parsing_blocking_script: DOMRefCell<Option<PendingScript>>,
/// Number of stylesheets that block executing the next parser-inserted script
script_blocking_stylesheets_count: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-when-the-document-has-finished-parsing
deferred_scripts: PendingInOrderScriptVec,
/// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-in-order-as-soon-as-possible
asap_in_order_scripts_list: PendingInOrderScriptVec,
/// https://html.spec.whatwg.org/multipage/#set-of-scripts-that-will-execute-as-soon-as-possible
asap_scripts_set: DOMRefCell<Vec<JS<HTMLScriptElement>>>,
/// https://html.spec.whatwg.org/multipage/#concept-n-noscript
/// True if scripting is enabled for all scripts in this document
scripting_enabled: bool,
/// https://html.spec.whatwg.org/multipage/#animation-frame-callback-identifier
/// Current identifier of animation frame callback
animation_frame_ident: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#list-of-animation-frame-callbacks
/// List of animation frame callbacks
animation_frame_list: DOMRefCell<Vec<(u32, Option<AnimationFrameCallback>)>>,
/// Whether we're in the process of running animation callbacks.
///
/// Tracking this is not necessary for correctness. Instead, it is an optimization to avoid
/// sending needless `ChangeRunningAnimationsState` messages to the compositor.
running_animation_callbacks: Cell<bool>,
/// Tracks all outstanding loads related to this document.
loader: DOMRefCell<DocumentLoader>,
/// The current active HTML parser, to allow resuming after interruptions.
current_parser: MutNullableJS<ServoParser>,
/// When we should kick off a reflow. This happens during parsing.
reflow_timeout: Cell<Option<u64>>,
/// The cached first `base` element with an `href` attribute.
base_element: MutNullableJS<HTMLBaseElement>,
/// This field is set to the document itself for inert documents.
/// https://html.spec.whatwg.org/multipage/#appropriate-template-contents-owner-document
appropriate_template_contents_owner_document: MutNullableJS<Document>,
/// Information on elements needing restyle to ship over to the layout thread when the
/// time comes.
pending_restyles: DOMRefCell<HashMap<JS<Element>, PendingRestyle>>,
/// This flag will be true if layout suppressed a reflow attempt that was
/// needed in order for the page to be painted.
needs_paint: Cell<bool>,
/// http://w3c.github.io/touch-events/#dfn-active-touch-point
active_touch_points: DOMRefCell<Vec<JS<Touch>>>,
/// Navigation Timing properties:
/// https://w3c.github.io/navigation-timing/#sec-PerformanceNavigationTiming
dom_loading: Cell<u64>,
dom_interactive: Cell<u64>,
dom_content_loaded_event_start: Cell<u64>,
dom_content_loaded_event_end: Cell<u64>,
dom_complete: Cell<u64>,
load_event_start: Cell<u64>,
load_event_end: Cell<u64>,
/// https://html.spec.whatwg.org/multipage/#concept-document-https-state
https_state: Cell<HttpsState>,
touchpad_pressure_phase: Cell<TouchpadPressurePhase>,
/// The document's origin.
origin: Origin,
/// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-states
referrer_policy: Cell<Option<ReferrerPolicy>>,
/// https://html.spec.whatwg.org/multipage/#dom-document-referrer
referrer: Option<String>,
/// https://html.spec.whatwg.org/multipage/#target-element
target_element: MutNullableJS<Element>,
/// https://w3c.github.io/uievents/#event-type-dblclick
#[ignore_heap_size_of = "Defined in std"]
last_click_info: DOMRefCell<Option<(Instant, Point2D<f32>)>>,
/// https://html.spec.whatwg.org/multipage/#ignore-destructive-writes-counter
ignore_destructive_writes_counter: Cell<u32>,
/// Track the total number of elements in this DOM's tree.
/// This is sent to the layout thread every time a reflow is done;
/// layout uses this to determine if the gains from parallel layout will be worth the overhead.
///
/// See also: https://github.com/servo/servo/issues/10110
dom_count: Cell<u32>,
/// Entry node for fullscreen.
fullscreen_element: MutNullableJS<Element>,
}
#[derive(JSTraceable, HeapSizeOf)]
struct ImagesFilter;
impl CollectionFilter for ImagesFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLImageElement>()
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct EmbedsFilter;
impl CollectionFilter for EmbedsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLEmbedElement>()
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct LinksFilter;
impl CollectionFilter for LinksFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
(elem.is::<HTMLAnchorElement>() || elem.is::<HTMLAreaElement>()) &&
elem.has_attribute(&local_name!("href"))
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct FormsFilter;
impl CollectionFilter for FormsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLFormElement>()
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct ScriptsFilter;
impl CollectionFilter for ScriptsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLScriptElement>()
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct AnchorsFilter;
impl CollectionFilter for AnchorsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLAnchorElement>() && elem.has_attribute(&local_name!("href"))
}
}
#[derive(JSTraceable, HeapSizeOf)]
struct AppletsFilter;
impl CollectionFilter for AppletsFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
elem.is::<HTMLAppletElement>()
}
}
impl Document {
#[inline]
pub fn loader(&self) -> Ref<DocumentLoader> {
self.loader.borrow()
}
#[inline]
pub fn mut_loader(&self) -> RefMut<DocumentLoader> {
self.loader.borrow_mut()
}
/// https://html.spec.whatwg.org/multipage/#concept-document-bc
#[inline]
pub fn browsing_context(&self) -> Option<Root<BrowsingContext>> {
if self.has_browsing_context {
Some(self.window.browsing_context())
} else {
None
}
}
#[inline]
pub fn window(&self) -> &Window {
&*self.window
}
#[inline]
pub fn is_html_document(&self) -> bool {
self.is_html_document
}
pub fn set_https_state(&self, https_state: HttpsState) {
self.https_state.set(https_state);
self.trigger_mozbrowser_event(MozBrowserEvent::SecurityChange(https_state));
}
pub fn is_fully_active(&self) -> bool {
self.activity.get() == DocumentActivity::FullyActive
}
pub fn is_active(&self) -> bool {
self.activity.get() != DocumentActivity::Inactive
}
pub fn set_activity(&self, activity: DocumentActivity) {
// This function should only be called on documents with a browsing context
assert!(self.has_browsing_context);
// Set the document's activity level, reflow if necessary, and suspend or resume timers.
if activity != self.activity.get() {
self.activity.set(activity);
if activity == DocumentActivity::FullyActive {
self.title_changed();
self.dirty_all_nodes();
self.window().reflow(
ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::CachedPageNeededReflow
);
self.window().resume();
} else {
self.window().suspend();
}
}
}
pub fn origin(&self) -> &Origin {
&self.origin
}
// https://dom.spec.whatwg.org/#concept-document-url
pub fn url(&self) -> ServoUrl {
self.url.borrow().clone()
}
pub fn set_url(&self, url: ServoUrl) {
*self.url.borrow_mut() = url;
}
// https://html.spec.whatwg.org/multipage/#fallback-base-url
pub fn fallback_base_url(&self) -> ServoUrl {
// Step 1: iframe srcdoc (#4767).
// Step 2: about:blank with a creator browsing context.
// Step 3.
self.url()
}
// https://html.spec.whatwg.org/multipage/#document-base-url
pub fn base_url(&self) -> ServoUrl {
match self.base_element() {
// Step 1.
None => self.fallback_base_url(),
// Step 2.
Some(base) => base.frozen_base_url(),
}
}
pub fn needs_paint(&self) -> bool {
self.needs_paint.get()
}
pub fn needs_reflow(&self) -> bool {
// FIXME: This should check the dirty bit on the document,
// not the document element. Needs some layout changes to make
// that workable.
match self.GetDocumentElement() {
Some(root) => {
root.upcast::<Node>().has_dirty_descendants() ||
!self.pending_restyles.borrow().is_empty() ||
self.needs_paint()
}
None => false,
}
}
/// Returns the first `base` element in the DOM that has an `href` attribute.
pub fn base_element(&self) -> Option<Root<HTMLBaseElement>> {
self.base_element.get()
}
/// Refresh the cached first base element in the DOM.
/// https://github.com/w3c/web-platform-tests/issues/2122
pub fn refresh_base_element(&self) {
let base = self.upcast::<Node>()
.traverse_preorder()
.filter_map(Root::downcast::<HTMLBaseElement>)
.find(|element| element.upcast::<Element>().has_attribute(&local_name!("href")));
self.base_element.set(base.r());
}
pub fn dom_count(&self) -> u32 {
self.dom_count.get()
}
/// This is called by `bind_to_tree` when a node is added to the DOM.
/// The internal count is used by layout to determine whether to be sequential or parallel.
/// (it's sequential for small DOMs)
pub fn increment_dom_count(&self) {
self.dom_count.set(self.dom_count.get() + 1);
}
/// This is called by `unbind_from_tree` when a node is removed from the DOM.
pub fn decrement_dom_count(&self) {
self.dom_count.set(self.dom_count.get() - 1);
}
pub fn quirks_mode(&self) -> QuirksMode {
self.quirks_mode.get()
}
pub fn set_quirks_mode(&self, mode: QuirksMode) {
self.quirks_mode.set(mode);
if mode == QuirksMode::Quirks {
self.window.layout_chan().send(Msg::SetQuirksMode).unwrap();
}
}
pub fn encoding(&self) -> EncodingRef {
self.encoding.get()
}
pub fn set_encoding(&self, encoding: EncodingRef) {
self.encoding.set(encoding);
}
pub fn content_and_heritage_changed(&self, node: &Node, damage: NodeDamage) {
node.dirty(damage);
}
/// Reflows and disarms the timer if the reflow timer has expired.
pub fn reflow_if_reflow_timer_expired(&self) {
if let Some(reflow_timeout) = self.reflow_timeout.get() {
if time::precise_time_ns() < reflow_timeout {
return;
}
self.reflow_timeout.set(None);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::RefreshTick);
}
}
/// Schedules a reflow to be kicked off at the given `timeout` (in `time::precise_time_ns()`
/// units). This reflow happens even if the event loop is busy. This is used to display initial
/// page content during parsing.
pub fn set_reflow_timeout(&self, timeout: u64) {
if let Some(existing_timeout) = self.reflow_timeout.get() {
if existing_timeout < timeout {
return;
}
}
self.reflow_timeout.set(Some(timeout))
}
/// Remove any existing association between the provided id and any elements in this document.
pub fn unregister_named_element(&self, to_unregister: &Element, id: Atom) {
debug!("Removing named element from document {:p}: {:p} id={}",
self,
to_unregister,
id);
let mut id_map = self.id_map.borrow_mut();
let is_empty = match id_map.get_mut(&id) {
None => false,
Some(elements) => {
let position = elements.iter()
.position(|element| &**element == to_unregister)
.expect("This element should be in registered.");
elements.remove(position);
elements.is_empty()
}
};
if is_empty {
id_map.remove(&id);
}
}
/// Associate an element present in this document with the provided id.
pub fn register_named_element(&self, element: &Element, id: Atom) {
debug!("Adding named element to document {:p}: {:p} id={}",
self,
element,
id);
assert!(element.upcast::<Node>().is_in_doc());
assert!(!id.is_empty());
let mut id_map = self.id_map.borrow_mut();
let root = self.GetDocumentElement()
.expect("The element is in the document, so there must be a document \
element.");
match id_map.entry(id) {
Vacant(entry) => {
entry.insert(vec![JS::from_ref(element)]);
}
Occupied(entry) => {
let elements = entry.into_mut();
let new_node = element.upcast::<Node>();
let mut head: usize = 0;
let root = root.upcast::<Node>();
for node in root.traverse_preorder() {
if let Some(elem) = node.downcast() {
if &*(*elements)[head] == elem {
head += 1;
}
if new_node == &*node || head == elements.len() {
break;
}
}
}
elements.insert(head, JS::from_ref(element));
}
}
}
/// Attempt to find a named element in this page's document.
/// https://html.spec.whatwg.org/multipage/#the-indicated-part-of-the-document
pub fn find_fragment_node(&self, fragid: &str) -> Option<Root<Element>> {
// Step 1 is not handled here; the fragid is already obtained by the calling function
// Step 2: Simply use None to indicate the top of the document.
// Step 3 & 4
percent_decode(fragid.as_bytes()).decode_utf8().ok()
// Step 5
.and_then(|decoded_fragid| self.get_element_by_id(&Atom::from(decoded_fragid)))
// Step 6
.or_else(|| self.get_anchor_by_name(fragid))
// Step 7 & 8
}
/// Scroll to the target element, and when we do not find a target
/// and the fragment is empty or "top", scroll to the top.
/// https://html.spec.whatwg.org/multipage/#scroll-to-the-fragment-identifier
pub fn check_and_scroll_fragment(&self, fragment: &str) {
let target = self.find_fragment_node(fragment);
// Step 1
self.set_target_element(target.r());
let point = target.r().map(|element| {
// FIXME(#8275, pcwalton): This is pretty bogus when multiple layers are involved.
// Really what needs to happen is that this needs to go through layout to ask which
// layer the element belongs to, and have it send the scroll message to the
// compositor.
let rect = element.upcast::<Node>().bounding_content_box_or_zero();
// In order to align with element edges, we snap to unscaled pixel boundaries, since
// the paint thread currently does the same for drawing elements. This is important
// for pages that require pixel perfect scroll positioning for proper display
// (like Acid2). Since we don't have the device pixel ratio here, this might not be
// accurate, but should work as long as the ratio is a whole number. Once #8275 is
// fixed this should actually take into account the real device pixel ratio.
(rect.origin.x.to_nearest_px() as f32, rect.origin.y.to_nearest_px() as f32)
}).or_else(|| if fragment.is_empty() || fragment.eq_ignore_ascii_case("top") {
// FIXME(stshine): this should be the origin of the stacking context space,
// which may differ under the influence of writing mode.
Some((0.0, 0.0))
} else {
None
});
if let Some((x, y)) = point {
// Step 3
self.window.perform_a_scroll(x,
y,
ScrollRootId::root(),
ScrollBehavior::Instant,
target.r());
}
}
fn get_anchor_by_name(&self, name: &str) -> Option<Root<Element>> {
let check_anchor = |node: &HTMLAnchorElement| {
let elem = node.upcast::<Element>();
elem.get_attribute(&ns!(), &local_name!("name"))
.map_or(false, |attr| &**attr.value() == name)
};
let doc_node = self.upcast::<Node>();
doc_node.traverse_preorder()
.filter_map(Root::downcast)
.find(|node| check_anchor(&node))
.map(Root::upcast)
}
// https://html.spec.whatwg.org/multipage/#current-document-readiness
pub fn set_ready_state(&self, state: DocumentReadyState) {
match state {
DocumentReadyState::Loading => {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserconnected
self.trigger_mozbrowser_event(MozBrowserEvent::Connected);
update_with_current_time_ms(&self.dom_loading);
},
DocumentReadyState::Complete => {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadend
self.trigger_mozbrowser_event(MozBrowserEvent::LoadEnd);
update_with_current_time_ms(&self.dom_complete);
},
DocumentReadyState::Interactive => update_with_current_time_ms(&self.dom_interactive),
};
self.ready_state.set(state);
self.upcast::<EventTarget>().fire_event(atom!("readystatechange"));
}
/// Return whether scripting is enabled or not
pub fn is_scripting_enabled(&self) -> bool {
self.scripting_enabled
}
/// Return the element that currently has focus.
// https://w3c.github.io/uievents/#events-focusevent-doc-focus
pub fn get_focused_element(&self) -> Option<Root<Element>> {
self.focused.get()<|fim▁hole|> pub fn begin_focus_transaction(&self) {
self.possibly_focused.set(None);
}
/// Request that the given element receive focus once the current transaction is complete.
pub fn request_focus(&self, elem: &Element) {
if elem.is_focusable_area() {
self.possibly_focused.set(Some(elem))
}
}
/// Reassign the focus context to the element that last requested focus during this
/// transaction, or none if no elements requested it.
pub fn commit_focus_transaction(&self, focus_type: FocusType) {
if self.focused == self.possibly_focused.get().r() {
return
}
if let Some(ref elem) = self.focused.get() {
let node = elem.upcast::<Node>();
elem.set_focus_state(false);
// FIXME: pass appropriate relatedTarget
self.fire_focus_event(FocusEventType::Blur, node, None);
}
self.focused.set(self.possibly_focused.get().r());
if let Some(ref elem) = self.focused.get() {
elem.set_focus_state(true);
let node = elem.upcast::<Node>();
// FIXME: pass appropriate relatedTarget
self.fire_focus_event(FocusEventType::Focus, node, None);
// Update the focus state for all elements in the focus chain.
// https://html.spec.whatwg.org/multipage/#focus-chain
if focus_type == FocusType::Element {
let global_scope = self.window.upcast::<GlobalScope>();
let event = ConstellationMsg::Focus(global_scope.pipeline_id());
global_scope.constellation_chan().send(event).unwrap();
}
}
}
/// Handles any updates when the document's title has changed.
pub fn title_changed(&self) {
if self.browsing_context().is_some() {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowsertitlechange
self.trigger_mozbrowser_event(MozBrowserEvent::TitleChange(String::from(self.Title())));
self.send_title_to_compositor();
}
}
/// Sends this document's title to the compositor.
pub fn send_title_to_compositor(&self) {
let window = self.window();
let global_scope = window.upcast::<GlobalScope>();
global_scope
.constellation_chan()
.send(ConstellationMsg::SetTitle(global_scope.pipeline_id(),
Some(String::from(self.Title()))))
.unwrap();
}
pub fn dirty_all_nodes(&self) {
let root = self.upcast::<Node>();
for node in root.traverse_preorder() {
node.dirty(NodeDamage::OtherNodeDamage)
}
}
pub fn handle_mouse_event(&self,
js_runtime: *mut JSRuntime,
button: MouseButton,
client_point: Point2D<f32>,
mouse_event_type: MouseEventType) {
let mouse_event_type_string = match mouse_event_type {
MouseEventType::Click => "click".to_owned(),
MouseEventType::MouseUp => "mouseup".to_owned(),
MouseEventType::MouseDown => "mousedown".to_owned(),
};
debug!("{}: at {:?}", mouse_event_type_string, client_point);
let node = match self.window.hit_test_query(client_point, false) {
Some(node_address) => {
debug!("node address is {:?}", node_address);
node::from_untrusted_node_address(js_runtime, node_address)
},
None => return,
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return,
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Point2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::MouseButtonEvent(mouse_event_type, button, child_point);
let event = ConstellationMsg::ForwardEvent(pipeline_id, event);
self.window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
}
return;
}
let node = el.upcast::<Node>();
debug!("{} on {:?}", mouse_event_type_string, node.debug_str());
// Prevent click event if form control element is disabled.
if let MouseEventType::Click = mouse_event_type {
if el.click_event_filter_by_disabled_state() {
return;
}
self.begin_focus_transaction();
}
// https://w3c.github.io/uievents/#event-type-click
let client_x = client_point.x as i32;
let client_y = client_point.y as i32;
let click_count = 1;
let event = MouseEvent::new(&self.window,
DOMString::from(mouse_event_type_string),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
click_count,
client_x,
client_y,
client_x,
client_y, // TODO: Get real screen coordinates?
false,
false,
false,
false,
0i16,
None);
let event = event.upcast::<Event>();
// https://w3c.github.io/uievents/#trusted-events
event.set_trusted(true);
// https://html.spec.whatwg.org/multipage/#run-authentic-click-activation-steps
let activatable = el.as_maybe_activatable();
match mouse_event_type {
MouseEventType::Click => el.authentic_click_activation(event),
MouseEventType::MouseDown => {
if let Some(a) = activatable {
a.enter_formal_activation_state();
}
let target = node.upcast();
event.fire(target);
},
MouseEventType::MouseUp => {
if let Some(a) = activatable {
a.exit_formal_activation_state();
}
let target = node.upcast();
event.fire(target);
},
}
if let MouseEventType::Click = mouse_event_type {
self.commit_focus_transaction(FocusType::Element);
self.maybe_fire_dblclick(client_point, node);
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
}
fn maybe_fire_dblclick(&self, click_pos: Point2D<f32>, target: &Node) {
// https://w3c.github.io/uievents/#event-type-dblclick
let now = Instant::now();
let opt = self.last_click_info.borrow_mut().take();
if let Some((last_time, last_pos)) = opt {
let DBL_CLICK_TIMEOUT = Duration::from_millis(PREFS.get("dom.document.dblclick_timeout").as_u64()
.unwrap_or(300));
let DBL_CLICK_DIST_THRESHOLD = PREFS.get("dom.document.dblclick_dist").as_u64().unwrap_or(1);
// Calculate distance between this click and the previous click.
let line = click_pos - last_pos;
let dist = (line.dot(line) as f64).sqrt();
if now.duration_since(last_time) < DBL_CLICK_TIMEOUT &&
dist < DBL_CLICK_DIST_THRESHOLD as f64 {
// A double click has occurred if this click is within a certain time and dist. of previous click.
let click_count = 2;
let client_x = click_pos.x as i32;
let client_y = click_pos.y as i32;
let event = MouseEvent::new(&self.window,
DOMString::from("dblclick"),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
click_count,
client_x,
client_y,
client_x,
client_y,
false,
false,
false,
false,
0i16,
None);
event.upcast::<Event>().fire(target.upcast());
// When a double click occurs, self.last_click_info is left as None so that a
// third sequential click will not cause another double click.
return;
}
}
// Update last_click_info with the time and position of the click.
*self.last_click_info.borrow_mut() = Some((now, click_pos));
}
pub fn handle_touchpad_pressure_event(&self,
js_runtime: *mut JSRuntime,
client_point: Point2D<f32>,
pressure: f32,
phase_now: TouchpadPressurePhase) {
let node = match self.window.hit_test_query(client_point, false) {
Some(node_address) => node::from_untrusted_node_address(js_runtime, node_address),
None => return
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Point2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::TouchpadPressureEvent(child_point,
pressure,
phase_now);
let event = ConstellationMsg::ForwardEvent(pipeline_id, event);
self.window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
}
return;
}
let phase_before = self.touchpad_pressure_phase.get();
self.touchpad_pressure_phase.set(phase_now);
if phase_before == TouchpadPressurePhase::BeforeClick &&
phase_now == TouchpadPressurePhase::BeforeClick {
return;
}
let node = el.upcast::<Node>();
let target = node.upcast();
let force = match phase_now {
TouchpadPressurePhase::BeforeClick => pressure,
TouchpadPressurePhase::AfterFirstClick => 1. + pressure,
TouchpadPressurePhase::AfterSecondClick => 2. + pressure,
};
if phase_now != TouchpadPressurePhase::BeforeClick {
self.fire_forcetouch_event("servomouseforcechanged".to_owned(), target, force);
}
if phase_before != TouchpadPressurePhase::AfterSecondClick &&
phase_now == TouchpadPressurePhase::AfterSecondClick {
self.fire_forcetouch_event("servomouseforcedown".to_owned(), target, force);
}
if phase_before == TouchpadPressurePhase::AfterSecondClick &&
phase_now != TouchpadPressurePhase::AfterSecondClick {
self.fire_forcetouch_event("servomouseforceup".to_owned(), target, force);
}
}
fn fire_forcetouch_event(&self, event_name: String, target: &EventTarget, force: f32) {
let force_event = ForceTouchEvent::new(&self.window,
DOMString::from(event_name),
force);
let event = force_event.upcast::<Event>();
event.fire(target);
}
pub fn fire_mouse_event(&self, client_point: Point2D<f32>, target: &EventTarget, event_name: String) {
let client_x = client_point.x.to_i32().unwrap_or(0);
let client_y = client_point.y.to_i32().unwrap_or(0);
let mouse_event = MouseEvent::new(&self.window,
DOMString::from(event_name),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(&self.window),
0i32,
client_x,
client_y,
client_x,
client_y,
false,
false,
false,
false,
0i16,
None);
let event = mouse_event.upcast::<Event>();
event.fire(target);
}
pub fn handle_mouse_move_event(&self,
js_runtime: *mut JSRuntime,
client_point: Option<Point2D<f32>>,
prev_mouse_over_target: &MutNullableJS<Element>) {
let client_point = match client_point {
None => {
// If there's no point, there's no target under the mouse
// FIXME: dispatch mouseout here. We have no point.
prev_mouse_over_target.set(None);
return;
}
Some(client_point) => client_point,
};
let maybe_new_target = self.window.hit_test_query(client_point, true).and_then(|address| {
let node = node::from_untrusted_node_address(js_runtime, address);
node.inclusive_ancestors()
.filter_map(Root::downcast::<Element>)
.next()
});
// Send mousemove event to topmost target, and forward it if it's an iframe
if let Some(ref new_target) = maybe_new_target {
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = new_target.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Point2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = client_point - child_origin;
let event = CompositorEvent::MouseMoveEvent(Some(child_point));
let event = ConstellationMsg::ForwardEvent(pipeline_id, event);
self.window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
}
return;
}
self.fire_mouse_event(client_point, new_target.upcast(), "mousemove".to_owned());
}
// Nothing more to do here, mousemove is sent,
// and the element under the mouse hasn't changed.
if maybe_new_target == prev_mouse_over_target.get() {
return;
}
let old_target_is_ancestor_of_new_target = match (prev_mouse_over_target.get(), maybe_new_target.as_ref()) {
(Some(old_target), Some(new_target))
=> old_target.upcast::<Node>().is_ancestor_of(new_target.upcast::<Node>()),
_ => false,
};
// Here we know the target has changed, so we must update the state,
// dispatch mouseout to the previous one, mouseover to the new one,
if let Some(old_target) = prev_mouse_over_target.get() {
// If the old target is an ancestor of the new target, this can be skipped
// completely, since the node's hover state will be reseted below.
if !old_target_is_ancestor_of_new_target {
for element in old_target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast::<Element>) {
element.set_hover_state(false);
element.set_active_state(false);
}
}
// Remove hover state to old target and its parents
self.fire_mouse_event(client_point, old_target.upcast(), "mouseout".to_owned());
// TODO: Fire mouseleave here only if the old target is
// not an ancestor of the new target.
}
if let Some(ref new_target) = maybe_new_target {
for element in new_target.upcast::<Node>()
.inclusive_ancestors()
.filter_map(Root::downcast::<Element>) {
if element.hover_state() {
break;
}
element.set_hover_state(true);
}
self.fire_mouse_event(client_point, &new_target.upcast(), "mouseover".to_owned());
// TODO: Fire mouseenter here.
}
// Store the current mouse over target for next frame.
prev_mouse_over_target.set(maybe_new_target.r());
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
}
pub fn handle_touch_event(&self,
js_runtime: *mut JSRuntime,
event_type: TouchEventType,
touch_id: TouchId,
point: Point2D<f32>)
-> TouchEventResult {
let TouchId(identifier) = touch_id;
let event_name = match event_type {
TouchEventType::Down => "touchstart",
TouchEventType::Move => "touchmove",
TouchEventType::Up => "touchend",
TouchEventType::Cancel => "touchcancel",
};
let node = match self.window.hit_test_query(point, false) {
Some(node_address) => node::from_untrusted_node_address(js_runtime, node_address),
None => return TouchEventResult::Processed(false),
};
let el = match node.downcast::<Element>() {
Some(el) => Root::from_ref(el),
None => {
let parent = node.GetParentNode();
match parent.and_then(Root::downcast::<Element>) {
Some(parent) => parent,
None => return TouchEventResult::Processed(false),
}
},
};
// If the target is an iframe, forward the event to the child document.
if let Some(iframe) = el.downcast::<HTMLIFrameElement>() {
if let Some(pipeline_id) = iframe.pipeline_id() {
let rect = iframe.upcast::<Element>().GetBoundingClientRect();
let child_origin = Point2D::new(rect.X() as f32, rect.Y() as f32);
let child_point = point - child_origin;
let event = CompositorEvent::TouchEvent(event_type, touch_id, child_point);
let event = ConstellationMsg::ForwardEvent(pipeline_id, event);
self.window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
}
return TouchEventResult::Forwarded;
}
let target = Root::upcast::<EventTarget>(el);
let window = &*self.window;
let client_x = Finite::wrap(point.x as f64);
let client_y = Finite::wrap(point.y as f64);
let page_x = Finite::wrap(point.x as f64 + window.PageXOffset() as f64);
let page_y = Finite::wrap(point.y as f64 + window.PageYOffset() as f64);
let touch = Touch::new(window,
identifier,
&target,
client_x,
client_y, // TODO: Get real screen coordinates?
client_x,
client_y,
page_x,
page_y);
match event_type {
TouchEventType::Down => {
// Add a new touch point
self.active_touch_points.borrow_mut().push(JS::from_ref(&*touch));
}
TouchEventType::Move => {
// Replace an existing touch point
let mut active_touch_points = self.active_touch_points.borrow_mut();
match active_touch_points.iter_mut().find(|t| t.Identifier() == identifier) {
Some(t) => *t = JS::from_ref(&*touch),
None => warn!("Got a touchmove event for a non-active touch point"),
}
}
TouchEventType::Up |
TouchEventType::Cancel => {
// Remove an existing touch point
let mut active_touch_points = self.active_touch_points.borrow_mut();
match active_touch_points.iter().position(|t| t.Identifier() == identifier) {
Some(i) => {
active_touch_points.swap_remove(i);
}
None => warn!("Got a touchend event for a non-active touch point"),
}
}
}
rooted_vec!(let mut touches);
touches.extend(self.active_touch_points.borrow().iter().cloned());
rooted_vec!(let mut target_touches);
target_touches.extend(self.active_touch_points
.borrow()
.iter()
.filter(|t| t.Target() == target)
.cloned());
rooted_vec!(let changed_touches <- once(touch));
let event = TouchEvent::new(window,
DOMString::from(event_name),
EventBubbles::Bubbles,
EventCancelable::Cancelable,
Some(window),
0i32,
&TouchList::new(window, touches.r()),
&TouchList::new(window, changed_touches.r()),
&TouchList::new(window, target_touches.r()),
// FIXME: modifier keys
false,
false,
false,
false);
let event = event.upcast::<Event>();
let result = event.fire(&target);
window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::MouseEvent);
match result {
EventStatus::Canceled => TouchEventResult::Processed(false),
EventStatus::NotCanceled => TouchEventResult::Processed(true),
}
}
/// The entry point for all key processing for web content
pub fn dispatch_key_event(&self,
ch: Option<char>,
key: Key,
state: KeyState,
modifiers: KeyModifiers,
constellation: &IpcSender<ConstellationMsg>) {
let focused = self.get_focused_element();
let body = self.GetBody();
let target = match (&focused, &body) {
(&Some(ref focused), _) => focused.upcast(),
(&None, &Some(ref body)) => body.upcast(),
(&None, &None) => self.window.upcast(),
};
let ctrl = modifiers.contains(CONTROL);
let alt = modifiers.contains(ALT);
let shift = modifiers.contains(SHIFT);
let meta = modifiers.contains(SUPER);
let is_composing = false;
let is_repeating = state == KeyState::Repeated;
let ev_type = DOMString::from(match state {
KeyState::Pressed | KeyState::Repeated => "keydown",
KeyState::Released => "keyup",
}
.to_owned());
let props = KeyboardEvent::key_properties(ch, key, modifiers);
let keyevent = KeyboardEvent::new(&self.window,
ev_type,
true,
true,
Some(&self.window),
0,
ch,
Some(key),
DOMString::from(props.key_string.clone()),
DOMString::from(props.code),
props.location,
is_repeating,
is_composing,
ctrl,
alt,
shift,
meta,
None,
props.key_code);
let event = keyevent.upcast::<Event>();
event.fire(target);
let mut cancel_state = event.get_cancel_state();
// https://w3c.github.io/uievents/#keys-cancelable-keys
if state != KeyState::Released && props.is_printable() && cancel_state != EventDefault::Prevented {
// https://w3c.github.io/uievents/#keypress-event-order
let event = KeyboardEvent::new(&self.window,
DOMString::from("keypress"),
true,
true,
Some(&self.window),
0,
ch,
Some(key),
DOMString::from(props.key_string),
DOMString::from(props.code),
props.location,
is_repeating,
is_composing,
ctrl,
alt,
shift,
meta,
props.char_code,
0);
let ev = event.upcast::<Event>();
ev.fire(target);
cancel_state = ev.get_cancel_state();
}
if cancel_state == EventDefault::Allowed {
constellation.send(ConstellationMsg::SendKeyEvent(ch, key, state, modifiers)).unwrap();
// This behavior is unspecced
// We are supposed to dispatch synthetic click activation for Space and/or Return,
// however *when* we do it is up to us.
// Here, we're dispatching it after the key event so the script has a chance to cancel it
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=27337
match key {
Key::Space if state == KeyState::Released => {
let maybe_elem = target.downcast::<Element>();
if let Some(el) = maybe_elem {
synthetic_click_activation(el,
false,
false,
false,
false,
ActivationSource::NotFromClick)
}
}
Key::Enter if state == KeyState::Released => {
let maybe_elem = target.downcast::<Element>();
if let Some(el) = maybe_elem {
if let Some(a) = el.as_maybe_activatable() {
a.implicit_submission(ctrl, alt, shift, meta);
}
}
}
_ => (),
}
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::KeyEvent);
}
// https://dom.spec.whatwg.org/#converting-nodes-into-a-node
pub fn node_from_nodes_and_strings(&self,
mut nodes: Vec<NodeOrString>)
-> Fallible<Root<Node>> {
if nodes.len() == 1 {
Ok(match nodes.pop().unwrap() {
NodeOrString::Node(node) => node,
NodeOrString::String(string) => Root::upcast(self.CreateTextNode(string)),
})
} else {
let fragment = Root::upcast::<Node>(self.CreateDocumentFragment());
for node in nodes {
match node {
NodeOrString::Node(node) => {
try!(fragment.AppendChild(&node));
},
NodeOrString::String(string) => {
let node = Root::upcast::<Node>(self.CreateTextNode(string));
// No try!() here because appending a text node
// should not fail.
fragment.AppendChild(&node).unwrap();
}
}
}
Ok(fragment)
}
}
pub fn get_body_attribute(&self, local_name: &LocalName) -> DOMString {
match self.GetBody().and_then(Root::downcast::<HTMLBodyElement>) {
Some(ref body) => {
body.upcast::<Element>().get_string_attribute(local_name)
},
None => DOMString::new(),
}
}
pub fn set_body_attribute(&self, local_name: &LocalName, value: DOMString) {
if let Some(ref body) = self.GetBody().and_then(Root::downcast::<HTMLBodyElement>) {
let body = body.upcast::<Element>();
let value = body.parse_attribute(&ns!(), &local_name, value);
body.set_attribute(local_name, value);
}
}
pub fn set_current_script(&self, script: Option<&HTMLScriptElement>) {
self.current_script.set(script);
}
pub fn get_script_blocking_stylesheets_count(&self) -> u32 {
self.script_blocking_stylesheets_count.get()
}
pub fn increment_script_blocking_stylesheet_count(&self) {
let count_cell = &self.script_blocking_stylesheets_count;
count_cell.set(count_cell.get() + 1);
}
pub fn decrement_script_blocking_stylesheet_count(&self) {
let count_cell = &self.script_blocking_stylesheets_count;
assert!(count_cell.get() > 0);
count_cell.set(count_cell.get() - 1);
}
pub fn invalidate_stylesheets(&self) {
self.stylesheets_changed_since_reflow.set(true);
*self.stylesheets.borrow_mut() = None;
// Mark the document element dirty so a reflow will be performed.
if let Some(element) = self.GetDocumentElement() {
element.upcast::<Node>().dirty(NodeDamage::NodeStyleDamaged);
}
}
pub fn get_and_reset_stylesheets_changed_since_reflow(&self) -> bool {
let changed = self.stylesheets_changed_since_reflow.get();
self.stylesheets_changed_since_reflow.set(false);
changed
}
pub fn trigger_mozbrowser_event(&self, event: MozBrowserEvent) {
if PREFS.is_mozbrowser_enabled() {
if let Some((parent_pipeline_id, _)) = self.window.parent_info() {
let global_scope = self.window.upcast::<GlobalScope>();
let event = ConstellationMsg::MozBrowserEvent(parent_pipeline_id,
global_scope.pipeline_id(),
event);
global_scope.constellation_chan().send(event).unwrap();
}
}
}
/// https://html.spec.whatwg.org/multipage/#dom-window-requestanimationframe
pub fn request_animation_frame(&self, callback: AnimationFrameCallback) -> u32 {
let ident = self.animation_frame_ident.get() + 1;
self.animation_frame_ident.set(ident);
self.animation_frame_list.borrow_mut().push((ident, Some(callback)));
// No need to send a `ChangeRunningAnimationsState` if we're running animation callbacks:
// we're guaranteed to already be in the "animation callbacks present" state.
//
// This reduces CPU usage by avoiding needless thread wakeups in the common case of
// repeated rAF.
//
// TODO: Should tick animation only when document is visible
if !self.running_animation_callbacks.get() {
let global_scope = self.window.upcast::<GlobalScope>();
let event = ConstellationMsg::ChangeRunningAnimationsState(
global_scope.pipeline_id(),
AnimationState::AnimationCallbacksPresent);
global_scope.constellation_chan().send(event).unwrap();
}
ident
}
/// https://html.spec.whatwg.org/multipage/#dom-window-cancelanimationframe
pub fn cancel_animation_frame(&self, ident: u32) {
let mut list = self.animation_frame_list.borrow_mut();
if let Some(mut pair) = list.iter_mut().find(|pair| pair.0 == ident) {
pair.1 = None;
}
}
/// https://html.spec.whatwg.org/multipage/#run-the-animation-frame-callbacks
pub fn run_the_animation_frame_callbacks(&self) {
rooted_vec!(let mut animation_frame_list);
mem::swap(
&mut *animation_frame_list,
&mut *self.animation_frame_list.borrow_mut());
self.running_animation_callbacks.set(true);
let timing = self.window.Performance().Now();
for (_, callback) in animation_frame_list.drain(..) {
if let Some(callback) = callback {
callback.call(self, *timing);
}
}
// Only send the animation change state message after running any callbacks.
// This means that if the animation callback adds a new callback for
// the next frame (which is the common case), we won't send a NoAnimationCallbacksPresent
// message quickly followed by an AnimationCallbacksPresent message.
if self.animation_frame_list.borrow().is_empty() {
mem::swap(&mut *self.animation_frame_list.borrow_mut(),
&mut *animation_frame_list);
let global_scope = self.window.upcast::<GlobalScope>();
let event = ConstellationMsg::ChangeRunningAnimationsState(global_scope.pipeline_id(),
AnimationState::NoAnimationCallbacksPresent);
global_scope.constellation_chan().send(event).unwrap();
}
self.running_animation_callbacks.set(false);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::RequestAnimationFrame);
}
pub fn fetch_async(&self, load: LoadType,
request: RequestInit,
fetch_target: IpcSender<FetchResponseMsg>) {
let mut loader = self.loader.borrow_mut();
loader.fetch_async(load, request, fetch_target);
}
// https://html.spec.whatwg.org/multipage/#the-end
// https://html.spec.whatwg.org/multipage/#delay-the-load-event
pub fn finish_load(&self, load: LoadType) {
// This does not delay the load event anymore.
debug!("Document got finish_load: {:?}", load);
self.loader.borrow_mut().finish_load(&load);
match load {
LoadType::Stylesheet(_) => {
// A stylesheet finishing to load may unblock any pending
// parsing-blocking script or deferred script.
self.process_pending_parsing_blocking_script();
// Step 3.
self.process_deferred_scripts();
},
LoadType::PageSource(_) => {
if self.has_browsing_context {
// Disarm the reflow timer and trigger the initial reflow.
self.reflow_timeout.set(None);
self.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::FirstLoad);
}
// Deferred scripts have to wait for page to finish loading,
// this is the first opportunity to process them.
// Step 3.
self.process_deferred_scripts();
},
_ => {},
}
// Step 4 is in another castle, namely at the end of
// process_deferred_scripts.
// Step 5 can be found in asap_script_loaded and
// asap_in_order_script_loaded.
if self.loader.borrow().is_blocked() {
// Step 6.
return;
}
// The rest will ever run only once per document.
if self.loader.borrow().events_inhibited() {
return;
}
self.loader.borrow_mut().inhibit_events();
// Step 7.
debug!("Document loads are complete.");
let handler = box DocumentProgressHandler::new(Trusted::new(self));
self.window.dom_manipulation_task_source().queue(handler, self.window.upcast()).unwrap();
// Step 8.
// TODO: pageshow event.
// Step 9.
// TODO: pending application cache download process tasks.
// Step 10.
// TODO: printing steps.
// Step 11.
// TODO: ready for post-load tasks.
// Step 12.
// TODO: completely loaded.
}
// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pub fn set_pending_parsing_blocking_script(&self,
script: &HTMLScriptElement,
load: Option<ScriptResult>) {
assert!(!self.has_pending_parsing_blocking_script());
*self.pending_parsing_blocking_script.borrow_mut() = Some(PendingScript::new_with_load(script, load));
}
// https://html.spec.whatwg.org/multipage/#pending-parsing-blocking-script
pub fn has_pending_parsing_blocking_script(&self) -> bool {
self.pending_parsing_blocking_script.borrow().is_some()
}
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn pending_parsing_blocking_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
{
let mut blocking_script = self.pending_parsing_blocking_script.borrow_mut();
let entry = blocking_script.as_mut().unwrap();
assert!(&*entry.element == element);
entry.loaded(result);
}
self.process_pending_parsing_blocking_script();
}
fn process_pending_parsing_blocking_script(&self) {
if self.script_blocking_stylesheets_count.get() > 0 {
return;
}
let pair = self.pending_parsing_blocking_script
.borrow_mut()
.as_mut()
.and_then(PendingScript::take_result);
if let Some((element, result)) = pair {
*self.pending_parsing_blocking_script.borrow_mut() = None;
self.get_current_parser().unwrap().resume_with_pending_parsing_blocking_script(&element, result);
}
}
// https://html.spec.whatwg.org/multipage/#set-of-scripts-that-will-execute-as-soon-as-possible
pub fn add_asap_script(&self, script: &HTMLScriptElement) {
self.asap_scripts_set.borrow_mut().push(JS::from_ref(script));
}
/// https://html.spec.whatwg.org/multipage/#the-end step 5.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn asap_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
{
let mut scripts = self.asap_scripts_set.borrow_mut();
let idx = scripts.iter().position(|entry| &**entry == element).unwrap();
scripts.swap_remove(idx);
}
element.execute(result);
}
// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-in-order-as-soon-as-possible
pub fn push_asap_in_order_script(&self, script: &HTMLScriptElement) {
self.asap_in_order_scripts_list.push(script);
}
/// https://html.spec.whatwg.org/multipage/#the-end step 5.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.c.
pub fn asap_in_order_script_loaded(&self,
element: &HTMLScriptElement,
result: ScriptResult) {
self.asap_in_order_scripts_list.loaded(element, result);
while let Some((element, result)) = self.asap_in_order_scripts_list.take_next_ready_to_be_executed() {
element.execute(result);
}
}
// https://html.spec.whatwg.org/multipage/#list-of-scripts-that-will-execute-when-the-document-has-finished-parsing
pub fn add_deferred_script(&self, script: &HTMLScriptElement) {
self.deferred_scripts.push(script);
}
/// https://html.spec.whatwg.org/multipage/#the-end step 3.
/// https://html.spec.whatwg.org/multipage/#prepare-a-script step 22.d.
pub fn deferred_script_loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
self.deferred_scripts.loaded(element, result);
self.process_deferred_scripts();
}
/// https://html.spec.whatwg.org/multipage/#the-end step 3.
fn process_deferred_scripts(&self) {
if self.ready_state.get() != DocumentReadyState::Interactive {
return;
}
// Part of substep 1.
loop {
if self.script_blocking_stylesheets_count.get() > 0 {
return;
}
if let Some((element, result)) = self.deferred_scripts.take_next_ready_to_be_executed() {
element.execute(result);
} else {
break;
}
}
if self.deferred_scripts.is_empty() {
// https://html.spec.whatwg.org/multipage/#the-end step 4.
self.maybe_dispatch_dom_content_loaded();
}
}
// https://html.spec.whatwg.org/multipage/#the-end step 4.
pub fn maybe_dispatch_dom_content_loaded(&self) {
if self.domcontentloaded_dispatched.get() {
return;
}
self.domcontentloaded_dispatched.set(true);
assert!(self.ReadyState() != DocumentReadyState::Complete,
"Complete before DOMContentLoaded?");
update_with_current_time_ms(&self.dom_content_loaded_event_start);
// Step 4.1.
let window = self.window();
window.dom_manipulation_task_source().queue_event(self.upcast(), atom!("DOMContentLoaded"),
EventBubbles::Bubbles, EventCancelable::NotCancelable, window);
window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::DOMContentLoaded);
update_with_current_time_ms(&self.dom_content_loaded_event_end);
// Step 4.2.
// TODO: client message queue.
}
// https://html.spec.whatwg.org/multipage/#abort-a-document
fn abort(&self) {
// We need to inhibit the loader before anything else.
self.loader.borrow_mut().inhibit_events();
// Step 1.
for iframe in self.iter_iframes() {
if let Some(document) = iframe.GetContentDocument() {
// TODO: abort the active documents of every child browsing context.
document.abort();
// TODO: salvageable flag.
}
}
// Step 2.
self.script_blocking_stylesheets_count.set(0);
*self.pending_parsing_blocking_script.borrow_mut() = None;
*self.asap_scripts_set.borrow_mut() = vec![];
self.asap_in_order_scripts_list.clear();
self.deferred_scripts.clear();
// TODO: https://github.com/servo/servo/issues/15236
self.window.cancel_all_tasks();
// Step 3.
if let Some(parser) = self.get_current_parser() {
parser.abort();
// TODO: salvageable flag.
}
}
pub fn notify_constellation_load(&self) {
let global_scope = self.window.upcast::<GlobalScope>();
let pipeline_id = global_scope.pipeline_id();
let load_event = ConstellationMsg::LoadComplete(pipeline_id);
global_scope.constellation_chan().send(load_event).unwrap();
}
pub fn set_current_parser(&self, script: Option<&ServoParser>) {
self.current_parser.set(script);
}
pub fn get_current_parser(&self) -> Option<Root<ServoParser>> {
self.current_parser.get()
}
/// Iterate over all iframes in the document.
pub fn iter_iframes(&self) -> impl Iterator<Item=Root<HTMLIFrameElement>> {
self.upcast::<Node>()
.traverse_preorder()
.filter_map(Root::downcast::<HTMLIFrameElement>)
}
/// Find an iframe element in the document.
pub fn find_iframe(&self, frame_id: FrameId) -> Option<Root<HTMLIFrameElement>> {
self.iter_iframes()
.find(|node| node.frame_id() == frame_id)
}
pub fn get_dom_loading(&self) -> u64 {
self.dom_loading.get()
}
pub fn get_dom_interactive(&self) -> u64 {
self.dom_interactive.get()
}
pub fn get_dom_content_loaded_event_start(&self) -> u64 {
self.dom_content_loaded_event_start.get()
}
pub fn get_dom_content_loaded_event_end(&self) -> u64 {
self.dom_content_loaded_event_end.get()
}
pub fn get_dom_complete(&self) -> u64 {
self.dom_complete.get()
}
pub fn get_load_event_start(&self) -> u64 {
self.load_event_start.get()
}
pub fn get_load_event_end(&self) -> u64 {
self.load_event_end.get()
}
// https://html.spec.whatwg.org/multipage/#fire-a-focus-event
fn fire_focus_event(&self, focus_event_type: FocusEventType, node: &Node, related_target: Option<&EventTarget>) {
let (event_name, does_bubble) = match focus_event_type {
FocusEventType::Focus => (DOMString::from("focus"), EventBubbles::DoesNotBubble),
FocusEventType::Blur => (DOMString::from("blur"), EventBubbles::DoesNotBubble),
};
let event = FocusEvent::new(&self.window,
event_name,
does_bubble,
EventCancelable::NotCancelable,
Some(&self.window),
0i32,
related_target);
let event = event.upcast::<Event>();
event.set_trusted(true);
let target = node.upcast();
event.fire(target);
}
/// https://html.spec.whatwg.org/multipage/#cookie-averse-document-object
pub fn is_cookie_averse(&self) -> bool {
!self.has_browsing_context || !url_has_network_scheme(&self.url())
}
pub fn nodes_from_point(&self, client_point: &Point2D<f32>) -> Vec<UntrustedNodeAddress> {
let page_point =
Point2D::new(client_point.x + self.window.PageXOffset() as f32,
client_point.y + self.window.PageYOffset() as f32);
self.window.layout().nodes_from_point(page_point, *client_point)
}
}
#[derive(PartialEq, HeapSizeOf)]
pub enum DocumentSource {
FromParser,
NotFromParser,
}
#[allow(unsafe_code)]
pub trait LayoutDocumentHelpers {
unsafe fn is_html_document_for_layout(&self) -> bool;
unsafe fn drain_pending_restyles(&self) -> Vec<(LayoutJS<Element>, PendingRestyle)>;
unsafe fn needs_paint_from_layout(&self);
unsafe fn will_paint(&self);
unsafe fn quirks_mode(&self) -> QuirksMode;
}
#[allow(unsafe_code)]
impl LayoutDocumentHelpers for LayoutJS<Document> {
#[inline]
unsafe fn is_html_document_for_layout(&self) -> bool {
(*self.unsafe_get()).is_html_document
}
#[inline]
#[allow(unrooted_must_root)]
unsafe fn drain_pending_restyles(&self) -> Vec<(LayoutJS<Element>, PendingRestyle)> {
let mut elements = (*self.unsafe_get()).pending_restyles.borrow_mut_for_layout();
// Elements were in a document when they were adding to this list, but that
// may no longer be true when the next layout occurs.
let result = elements.drain()
.map(|(k, v)| (k.to_layout(), v))
.filter(|&(ref k, _)| k.upcast::<Node>().get_flag(IS_IN_DOC))
.collect();
result
}
#[inline]
unsafe fn needs_paint_from_layout(&self) {
(*self.unsafe_get()).needs_paint.set(true)
}
#[inline]
unsafe fn will_paint(&self) {
(*self.unsafe_get()).needs_paint.set(false)
}
#[inline]
unsafe fn quirks_mode(&self) -> QuirksMode {
(*self.unsafe_get()).quirks_mode()
}
}
/// https://url.spec.whatwg.org/#network-scheme
fn url_has_network_scheme(url: &ServoUrl) -> bool {
match url.scheme() {
"ftp" | "http" | "https" => true,
_ => false,
}
}
#[derive(Copy, Clone, HeapSizeOf, JSTraceable, PartialEq, Eq)]
pub enum HasBrowsingContext {
No,
Yes,
}
impl Document {
pub fn new_inherited(window: &Window,
has_browsing_context: HasBrowsingContext,
url: Option<ServoUrl>,
origin: Origin,
is_html_document: IsHTMLDocument,
content_type: Option<DOMString>,
last_modified: Option<String>,
activity: DocumentActivity,
source: DocumentSource,
doc_loader: DocumentLoader,
referrer: Option<String>,
referrer_policy: Option<ReferrerPolicy>)
-> Document {
let url = url.unwrap_or_else(|| ServoUrl::parse("about:blank").unwrap());
let (ready_state, domcontentloaded_dispatched) = if source == DocumentSource::FromParser {
(DocumentReadyState::Loading, false)
} else {
(DocumentReadyState::Complete, true)
};
Document {
node: Node::new_document_node(),
window: JS::from_ref(window),
has_browsing_context: has_browsing_context == HasBrowsingContext::Yes,
implementation: Default::default(),
location: Default::default(),
content_type: match content_type {
Some(string) => string,
None => DOMString::from(match is_html_document {
// https://dom.spec.whatwg.org/#dom-domimplementation-createhtmldocument
IsHTMLDocument::HTMLDocument => "text/html",
// https://dom.spec.whatwg.org/#concept-document-content-type
IsHTMLDocument::NonHTMLDocument => "application/xml",
}),
},
last_modified: last_modified,
url: DOMRefCell::new(url),
// https://dom.spec.whatwg.org/#concept-document-quirks
quirks_mode: Cell::new(QuirksMode::NoQuirks),
// https://dom.spec.whatwg.org/#concept-document-encoding
encoding: Cell::new(UTF_8),
is_html_document: is_html_document == IsHTMLDocument::HTMLDocument,
activity: Cell::new(activity),
id_map: DOMRefCell::new(HashMap::new()),
tag_map: DOMRefCell::new(HashMap::new()),
tagns_map: DOMRefCell::new(HashMap::new()),
classes_map: DOMRefCell::new(HashMap::new()),
images: Default::default(),
embeds: Default::default(),
links: Default::default(),
forms: Default::default(),
scripts: Default::default(),
anchors: Default::default(),
applets: Default::default(),
stylesheets: DOMRefCell::new(None),
stylesheets_changed_since_reflow: Cell::new(false),
stylesheet_list: MutNullableJS::new(None),
ready_state: Cell::new(ready_state),
domcontentloaded_dispatched: Cell::new(domcontentloaded_dispatched),
possibly_focused: Default::default(),
focused: Default::default(),
current_script: Default::default(),
pending_parsing_blocking_script: Default::default(),
script_blocking_stylesheets_count: Cell::new(0u32),
deferred_scripts: Default::default(),
asap_in_order_scripts_list: Default::default(),
asap_scripts_set: Default::default(),
scripting_enabled: has_browsing_context == HasBrowsingContext::Yes,
animation_frame_ident: Cell::new(0),
animation_frame_list: DOMRefCell::new(vec![]),
running_animation_callbacks: Cell::new(false),
loader: DOMRefCell::new(doc_loader),
current_parser: Default::default(),
reflow_timeout: Cell::new(None),
base_element: Default::default(),
appropriate_template_contents_owner_document: Default::default(),
pending_restyles: DOMRefCell::new(HashMap::new()),
needs_paint: Cell::new(false),
active_touch_points: DOMRefCell::new(Vec::new()),
dom_loading: Cell::new(Default::default()),
dom_interactive: Cell::new(Default::default()),
dom_content_loaded_event_start: Cell::new(Default::default()),
dom_content_loaded_event_end: Cell::new(Default::default()),
dom_complete: Cell::new(Default::default()),
load_event_start: Cell::new(Default::default()),
load_event_end: Cell::new(Default::default()),
https_state: Cell::new(HttpsState::None),
touchpad_pressure_phase: Cell::new(TouchpadPressurePhase::BeforeClick),
origin: origin,
referrer: referrer,
referrer_policy: Cell::new(referrer_policy),
target_element: MutNullableJS::new(None),
last_click_info: DOMRefCell::new(None),
ignore_destructive_writes_counter: Default::default(),
dom_count: Cell::new(1),
fullscreen_element: MutNullableJS::new(None),
}
}
// https://dom.spec.whatwg.org/#dom-document
pub fn Constructor(window: &Window) -> Fallible<Root<Document>> {
let doc = window.Document();
let docloader = DocumentLoader::new(&*doc.loader());
Ok(Document::new(window,
HasBrowsingContext::No,
None,
doc.origin().alias(),
IsHTMLDocument::NonHTMLDocument,
None,
None,
DocumentActivity::Inactive,
DocumentSource::NotFromParser,
docloader,
None,
None))
}
pub fn new(window: &Window,
has_browsing_context: HasBrowsingContext,
url: Option<ServoUrl>,
origin: Origin,
doctype: IsHTMLDocument,
content_type: Option<DOMString>,
last_modified: Option<String>,
activity: DocumentActivity,
source: DocumentSource,
doc_loader: DocumentLoader,
referrer: Option<String>,
referrer_policy: Option<ReferrerPolicy>)
-> Root<Document> {
let document = reflect_dom_object(box Document::new_inherited(window,
has_browsing_context,
url,
origin,
doctype,
content_type,
last_modified,
activity,
source,
doc_loader,
referrer,
referrer_policy),
window,
DocumentBinding::Wrap);
{
let node = document.upcast::<Node>();
node.set_owner_doc(&document);
}
document
}
fn create_node_list<F: Fn(&Node) -> bool>(&self, callback: F) -> Root<NodeList> {
let doc = self.GetDocumentElement();
let maybe_node = doc.r().map(Castable::upcast::<Node>);
let iter = maybe_node.iter()
.flat_map(|node| node.traverse_preorder())
.filter(|node| callback(&node));
NodeList::new_simple_list(&self.window, iter)
}
fn get_html_element(&self) -> Option<Root<HTMLHtmlElement>> {
self.GetDocumentElement().and_then(Root::downcast)
}
// Ensure that the stylesheets vector is populated
fn ensure_stylesheets(&self) {
let mut stylesheets = self.stylesheets.borrow_mut();
if stylesheets.is_none() {
*stylesheets = Some(self.upcast::<Node>()
.traverse_preorder()
.filter_map(|node| {
node.get_stylesheet()
.map(|stylesheet| StylesheetInDocument {
node: JS::from_ref(&*node),
stylesheet: stylesheet,
})
})
.collect());
};
}
/// Returns the list of stylesheets associated with nodes in the document.
pub fn stylesheets(&self) -> Vec<Arc<Stylesheet>> {
self.ensure_stylesheets();
self.stylesheets.borrow().as_ref().unwrap().iter()
.map(|s| s.stylesheet.clone())
.collect()
}
pub fn with_style_sheets_in_document<F, T>(&self, mut f: F) -> T
where F: FnMut(&[StylesheetInDocument]) -> T {
self.ensure_stylesheets();
f(&self.stylesheets.borrow().as_ref().unwrap())
}
/// https://html.spec.whatwg.org/multipage/#appropriate-template-contents-owner-document
pub fn appropriate_template_contents_owner_document(&self) -> Root<Document> {
self.appropriate_template_contents_owner_document.or_init(|| {
let doctype = if self.is_html_document {
IsHTMLDocument::HTMLDocument
} else {
IsHTMLDocument::NonHTMLDocument
};
let new_doc = Document::new(self.window(),
HasBrowsingContext::No,
None,
// https://github.com/whatwg/html/issues/2109
Origin::opaque_identifier(),
doctype,
None,
None,
DocumentActivity::Inactive,
DocumentSource::NotFromParser,
DocumentLoader::new(&self.loader()),
None,
None);
new_doc.appropriate_template_contents_owner_document.set(Some(&new_doc));
new_doc
})
}
pub fn get_element_by_id(&self, id: &Atom) -> Option<Root<Element>> {
self.id_map.borrow().get(&id).map(|ref elements| Root::from_ref(&*(*elements)[0]))
}
pub fn ensure_pending_restyle(&self, el: &Element) -> RefMut<PendingRestyle> {
let map = self.pending_restyles.borrow_mut();
RefMut::map(map, |m| m.entry(JS::from_ref(el)).or_insert_with(PendingRestyle::new))
}
pub fn element_state_will_change(&self, el: &Element) {
let mut entry = self.ensure_pending_restyle(el);
if entry.snapshot.is_none() {
entry.snapshot = Some(Snapshot::new(el.html_element_in_html_document()));
}
let mut snapshot = entry.snapshot.as_mut().unwrap();
if snapshot.state.is_none() {
snapshot.state = Some(el.state());
}
}
pub fn element_attr_will_change(&self, el: &Element, attr: &Attr) {
// FIXME(emilio): Kind of a shame we have to duplicate this.
//
// I'm getting rid of the whole hashtable soon anyway, since all it does
// right now is populate the element restyle data in layout, and we
// could in theory do it in the DOM I think.
let mut entry = self.ensure_pending_restyle(el);
if entry.snapshot.is_none() {
entry.snapshot = Some(Snapshot::new(el.html_element_in_html_document()));
}
if attr.local_name() == &local_name!("style") {
entry.hint |= RESTYLE_STYLE_ATTRIBUTE;
}
let mut snapshot = entry.snapshot.as_mut().unwrap();
if snapshot.attrs.is_none() {
let attrs = el.attrs()
.iter()
.map(|attr| (attr.identifier().clone(), attr.value().clone()))
.collect();
snapshot.attrs = Some(attrs);
}
}
pub fn set_referrer_policy(&self, policy: Option<ReferrerPolicy>) {
self.referrer_policy.set(policy);
}
//TODO - default still at no-referrer
pub fn get_referrer_policy(&self) -> Option<ReferrerPolicy> {
return self.referrer_policy.get();
}
pub fn set_target_element(&self, node: Option<&Element>) {
if let Some(ref element) = self.target_element.get() {
element.set_target_state(false);
}
self.target_element.set(node);
if let Some(ref element) = self.target_element.get() {
element.set_target_state(true);
}
self.window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::ElementStateChanged);
}
pub fn incr_ignore_destructive_writes_counter(&self) {
self.ignore_destructive_writes_counter.set(
self.ignore_destructive_writes_counter.get() + 1);
}
pub fn decr_ignore_destructive_writes_counter(&self) {
self.ignore_destructive_writes_counter.set(
self.ignore_destructive_writes_counter.get() - 1);
}
// https://fullscreen.spec.whatwg.org/#dom-element-requestfullscreen
#[allow(unrooted_must_root)]
pub fn enter_fullscreen(&self, pending: &Element) -> Rc<Promise> {
// Step 1
let promise = Promise::new(self.global().r());
let mut error = false;
// Step 4
// check namespace
match *pending.namespace() {
ns!(mathml) => {
if pending.local_name().as_ref() != "math" {
error = true;
}
}
ns!(svg) => {
if pending.local_name().as_ref() != "svg" {
error = true;
}
}
ns!(html) => (),
_ => error = true,
}
// fullscreen element ready check
if !pending.fullscreen_element_ready_check() {
error = true;
}
// TODO fullscreen is supported
// TODO This algorithm is allowed to request fullscreen.
// Step 5 Parallel start
let window = self.window();
// Step 6
if !error {
let event = ConstellationMsg::SetFullscreenState(true);
window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
}
// Step 7
let trusted_pending = Trusted::new(pending);
let trusted_promise = TrustedPromise::new(promise.clone());
let handler = ElementPerformFullscreenEnter::new(trusted_pending, trusted_promise, error);
let script_msg = CommonScriptMsg::RunnableMsg(ScriptThreadEventCategory::EnterFullscreen, handler);
let msg = MainThreadScriptMsg::Common(script_msg);
window.main_thread_script_chan().send(msg).unwrap();
promise
}
// https://fullscreen.spec.whatwg.org/#exit-fullscreen
#[allow(unrooted_must_root)]
pub fn exit_fullscreen(&self) -> Rc<Promise> {
let global = self.global();
// Step 1
let promise = Promise::new(global.r());
// Step 2
if self.fullscreen_element.get().is_none() {
promise.reject_error(global.get_cx(), Error::Type(String::from("fullscreen is null")));
return promise
}
// TODO Step 3-6
let element = self.fullscreen_element.get().unwrap();
// Step 7 Parallel start
let window = self.window();
// Step 8
let event = ConstellationMsg::SetFullscreenState(false);
window.upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
// Step 9
let trusted_element = Trusted::new(element.r());
let trusted_promise = TrustedPromise::new(promise.clone());
let handler = ElementPerformFullscreenExit::new(trusted_element, trusted_promise);
let script_msg = CommonScriptMsg::RunnableMsg(ScriptThreadEventCategory::ExitFullscreen, handler);
let msg = MainThreadScriptMsg::Common(script_msg);
window.main_thread_script_chan().send(msg).unwrap();
promise
}
pub fn set_fullscreen_element(&self, element: Option<&Element>) {
self.fullscreen_element.set(element);
}
pub fn get_allow_fullscreen(&self) -> bool {
// https://html.spec.whatwg.org/multipage/#allowed-to-use
match self.browsing_context() {
// Step 1
None => false,
Some(_) => {
// Step 2
let window = self.window();
if window.is_top_level() {
true
} else {
// Step 3
window.GetFrameElement().map_or(false, |el| el.has_attribute(&local_name!("allowfullscreen")))
}
}
}
}
}
impl Element {
fn click_event_filter_by_disabled_state(&self) -> bool {
let node = self.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLButtonElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLInputElement)) |
// NodeTypeId::Element(ElementTypeId::HTMLKeygenElement) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLOptionElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLSelectElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLTextAreaElement))
if self.disabled_state() => true,
_ => false,
}
}
}
impl DocumentMethods for Document {
// https://drafts.csswg.org/cssom/#dom-document-stylesheets
fn StyleSheets(&self) -> Root<StyleSheetList> {
self.stylesheet_list.or_init(|| StyleSheetList::new(&self.window, JS::from_ref(&self)))
}
// https://dom.spec.whatwg.org/#dom-document-implementation
fn Implementation(&self) -> Root<DOMImplementation> {
self.implementation.or_init(|| DOMImplementation::new(self))
}
// https://dom.spec.whatwg.org/#dom-document-url
fn URL(&self) -> USVString {
USVString(String::from(self.url().as_str()))
}
// https://html.spec.whatwg.org/multipage/#dom-document-activeelement
fn GetActiveElement(&self) -> Option<Root<Element>> {
// TODO: Step 2.
match self.get_focused_element() {
Some(element) => Some(element), // Step 3. and 4.
None => match self.GetBody() { // Step 5.
Some(body) => Some(Root::upcast(body)),
None => self.GetDocumentElement(),
},
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-hasfocus
fn HasFocus(&self) -> bool {
// Step 1-2.
if self.window().parent_info().is_none() && self.is_fully_active() {
return true;
}
// TODO Step 3.
false
}
// https://html.spec.whatwg.org/multipage/#relaxing-the-same-origin-restriction
fn Domain(&self) -> DOMString {
// Step 1.
if self.browsing_context().is_none() {
return DOMString::new();
}
if let Some(host) = self.origin.host() {
// Step 4.
DOMString::from(host.to_string())
} else {
// Step 3.
DOMString::new()
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-referrer
fn Referrer(&self) -> DOMString {
match self.referrer {
Some(ref referrer) => DOMString::from(referrer.to_string()),
None => DOMString::new()
}
}
// https://dom.spec.whatwg.org/#dom-document-documenturi
fn DocumentURI(&self) -> USVString {
self.URL()
}
// https://dom.spec.whatwg.org/#dom-document-compatmode
fn CompatMode(&self) -> DOMString {
DOMString::from(match self.quirks_mode.get() {
QuirksMode::LimitedQuirks | QuirksMode::NoQuirks => "CSS1Compat",
QuirksMode::Quirks => "BackCompat",
})
}
// https://dom.spec.whatwg.org/#dom-document-characterset
fn CharacterSet(&self) -> DOMString {
DOMString::from(match self.encoding.get().name() {
"utf-8" => "UTF-8",
"ibm866" => "IBM866",
"iso-8859-2" => "ISO-8859-2",
"iso-8859-3" => "ISO-8859-3",
"iso-8859-4" => "ISO-8859-4",
"iso-8859-5" => "ISO-8859-5",
"iso-8859-6" => "ISO-8859-6",
"iso-8859-7" => "ISO-8859-7",
"iso-8859-8" => "ISO-8859-8",
"iso-8859-8-i" => "ISO-8859-8-I",
"iso-8859-10" => "ISO-8859-10",
"iso-8859-13" => "ISO-8859-13",
"iso-8859-14" => "ISO-8859-14",
"iso-8859-15" => "ISO-8859-15",
"iso-8859-16" => "ISO-8859-16",
"koi8-r" => "KOI8-R",
"koi8-u" => "KOI8-U",
"gbk" => "GBK",
"big5" => "Big5",
"euc-jp" => "EUC-JP",
"iso-2022-jp" => "ISO-2022-JP",
"shift_jis" => "Shift_JIS",
"euc-kr" => "EUC-KR",
"utf-16be" => "UTF-16BE",
"utf-16le" => "UTF-16LE",
name => name
})
}
// https://dom.spec.whatwg.org/#dom-document-charset
fn Charset(&self) -> DOMString {
self.CharacterSet()
}
// https://dom.spec.whatwg.org/#dom-document-inputencoding
fn InputEncoding(&self) -> DOMString {
self.CharacterSet()
}
// https://dom.spec.whatwg.org/#dom-document-content_type
fn ContentType(&self) -> DOMString {
self.content_type.clone()
}
// https://dom.spec.whatwg.org/#dom-document-doctype
fn GetDoctype(&self) -> Option<Root<DocumentType>> {
self.upcast::<Node>().children().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-document-documentelement
fn GetDocumentElement(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbytagname
fn GetElementsByTagName(&self, qualified_name: DOMString) -> Root<HTMLCollection> {
let qualified_name = LocalName::from(&*qualified_name);
match self.tag_map.borrow_mut().entry(qualified_name.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_qualified_name(
&self.window, self.upcast(), qualified_name);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbytagnamens
fn GetElementsByTagNameNS(&self,
maybe_ns: Option<DOMString>,
tag_name: DOMString)
-> Root<HTMLCollection> {
let ns = namespace_from_domstring(maybe_ns);
let local = LocalName::from(tag_name);
let qname = QualName::new(ns, local);
match self.tagns_map.borrow_mut().entry(qname.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_qual_tag_name(&self.window, self.upcast(), qname);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-document-getelementsbyclassname
fn GetElementsByClassName(&self, classes: DOMString) -> Root<HTMLCollection> {
let class_atoms: Vec<Atom> = split_html_space_chars(&classes)
.map(Atom::from)
.collect();
match self.classes_map.borrow_mut().entry(class_atoms.clone()) {
Occupied(entry) => Root::from_ref(entry.get()),
Vacant(entry) => {
let result = HTMLCollection::by_atomic_class_name(&self.window,
self.upcast(),
class_atoms);
entry.insert(JS::from_ref(&*result));
result
}
}
}
// https://dom.spec.whatwg.org/#dom-nonelementparentnode-getelementbyid
fn GetElementById(&self, id: DOMString) -> Option<Root<Element>> {
self.get_element_by_id(&Atom::from(id))
}
// https://dom.spec.whatwg.org/#dom-document-createelement
fn CreateElement(&self, mut local_name: DOMString) -> Fallible<Root<Element>> {
if xml_name_type(&local_name) == InvalidXMLName {
debug!("Not a valid element name");
return Err(Error::InvalidCharacter);
}
if self.is_html_document {
local_name.make_ascii_lowercase();
}
let name = QualName::new(ns!(html), LocalName::from(local_name));
Ok(Element::create(name, None, self, ElementCreator::ScriptCreated))
}
// https://dom.spec.whatwg.org/#dom-document-createelementns
fn CreateElementNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString)
-> Fallible<Root<Element>> {
let (namespace, prefix, local_name) = try!(validate_and_extract(namespace,
&qualified_name));
let name = QualName::new(namespace, local_name);
Ok(Element::create(name, prefix, self, ElementCreator::ScriptCreated))
}
// https://dom.spec.whatwg.org/#dom-document-createattribute
fn CreateAttribute(&self, mut local_name: DOMString) -> Fallible<Root<Attr>> {
if xml_name_type(&local_name) == InvalidXMLName {
debug!("Not a valid element name");
return Err(Error::InvalidCharacter);
}
if self.is_html_document {
local_name.make_ascii_lowercase();
}
let name = LocalName::from(local_name);
let value = AttrValue::String("".to_owned());
Ok(Attr::new(&self.window, name.clone(), value, name, ns!(), None, None))
}
// https://dom.spec.whatwg.org/#dom-document-createattributens
fn CreateAttributeNS(&self,
namespace: Option<DOMString>,
qualified_name: DOMString)
-> Fallible<Root<Attr>> {
let (namespace, prefix, local_name) = try!(validate_and_extract(namespace,
&qualified_name));
let value = AttrValue::String("".to_owned());
let qualified_name = LocalName::from(qualified_name);
Ok(Attr::new(&self.window,
local_name,
value,
qualified_name,
namespace,
prefix,
None))
}
// https://dom.spec.whatwg.org/#dom-document-createdocumentfragment
fn CreateDocumentFragment(&self) -> Root<DocumentFragment> {
DocumentFragment::new(self)
}
// https://dom.spec.whatwg.org/#dom-document-createtextnode
fn CreateTextNode(&self, data: DOMString) -> Root<Text> {
Text::new(data, self)
}
// https://dom.spec.whatwg.org/#dom-document-createcomment
fn CreateComment(&self, data: DOMString) -> Root<Comment> {
Comment::new(data, self)
}
// https://dom.spec.whatwg.org/#dom-document-createprocessinginstruction
fn CreateProcessingInstruction(&self,
target: DOMString,
data: DOMString)
-> Fallible<Root<ProcessingInstruction>> {
// Step 1.
if xml_name_type(&target) == InvalidXMLName {
return Err(Error::InvalidCharacter);
}
// Step 2.
if data.contains("?>") {
return Err(Error::InvalidCharacter);
}
// Step 3.
Ok(ProcessingInstruction::new(target, data, self))
}
// https://dom.spec.whatwg.org/#dom-document-importnode
fn ImportNode(&self, node: &Node, deep: bool) -> Fallible<Root<Node>> {
// Step 1.
if node.is::<Document>() {
return Err(Error::NotSupported);
}
// Step 2.
let clone_children = if deep {
CloneChildrenFlag::CloneChildren
} else {
CloneChildrenFlag::DoNotCloneChildren
};
Ok(Node::clone(node, Some(self), clone_children))
}
// https://dom.spec.whatwg.org/#dom-document-adoptnode
fn AdoptNode(&self, node: &Node) -> Fallible<Root<Node>> {
// Step 1.
if node.is::<Document>() {
return Err(Error::NotSupported);
}
// Step 2.
Node::adopt(node, self);
// Step 3.
Ok(Root::from_ref(node))
}
// https://dom.spec.whatwg.org/#dom-document-createevent
fn CreateEvent(&self, mut interface: DOMString) -> Fallible<Root<Event>> {
interface.make_ascii_lowercase();
match &*interface {
"beforeunloadevent" =>
Ok(Root::upcast(BeforeUnloadEvent::new_uninitialized(&self.window))),
"closeevent" =>
Ok(Root::upcast(CloseEvent::new_uninitialized(self.window.upcast()))),
"customevent" =>
Ok(Root::upcast(CustomEvent::new_uninitialized(self.window.upcast()))),
"errorevent" =>
Ok(Root::upcast(ErrorEvent::new_uninitialized(self.window.upcast()))),
"events" | "event" | "htmlevents" | "svgevents" =>
Ok(Event::new_uninitialized(&self.window.upcast())),
"focusevent" =>
Ok(Root::upcast(FocusEvent::new_uninitialized(&self.window))),
"hashchangeevent" =>
Ok(Root::upcast(HashChangeEvent::new_uninitialized(&self.window))),
"keyboardevent" =>
Ok(Root::upcast(KeyboardEvent::new_uninitialized(&self.window))),
"messageevent" =>
Ok(Root::upcast(MessageEvent::new_uninitialized(self.window.upcast()))),
"mouseevent" | "mouseevents" =>
Ok(Root::upcast(MouseEvent::new_uninitialized(&self.window))),
"pagetransitionevent" =>
Ok(Root::upcast(PageTransitionEvent::new_uninitialized(&self.window))),
"popstateevent" =>
Ok(Root::upcast(PopStateEvent::new_uninitialized(&self.window))),
"progressevent" =>
Ok(Root::upcast(ProgressEvent::new_uninitialized(self.window.upcast()))),
"storageevent" => {
let USVString(url) = self.URL();
Ok(Root::upcast(StorageEvent::new_uninitialized(&self.window, DOMString::from(url))))
},
"touchevent" =>
Ok(Root::upcast(
TouchEvent::new_uninitialized(&self.window,
&TouchList::new(&self.window, &[]),
&TouchList::new(&self.window, &[]),
&TouchList::new(&self.window, &[]),
)
)),
"uievent" | "uievents" =>
Ok(Root::upcast(UIEvent::new_uninitialized(&self.window))),
"webglcontextevent" =>
Ok(Root::upcast(WebGLContextEvent::new_uninitialized(&self.window))),
_ =>
Err(Error::NotSupported),
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-lastmodified
fn LastModified(&self) -> DOMString {
match self.last_modified {
Some(ref t) => DOMString::from(t.clone()),
None => DOMString::from(time::now().strftime("%m/%d/%Y %H:%M:%S").unwrap().to_string()),
}
}
// https://dom.spec.whatwg.org/#dom-document-createrange
fn CreateRange(&self) -> Root<Range> {
Range::new_with_doc(self)
}
// https://dom.spec.whatwg.org/#dom-document-createnodeiteratorroot-whattoshow-filter
fn CreateNodeIterator(&self,
root: &Node,
what_to_show: u32,
filter: Option<Rc<NodeFilter>>)
-> Root<NodeIterator> {
NodeIterator::new(self, root, what_to_show, filter)
}
// https://w3c.github.io/touch-events/#idl-def-Document
fn CreateTouch(&self,
window: &Window,
target: &EventTarget,
identifier: i32,
page_x: Finite<f64>,
page_y: Finite<f64>,
screen_x: Finite<f64>,
screen_y: Finite<f64>)
-> Root<Touch> {
let client_x = Finite::wrap(*page_x - window.PageXOffset() as f64);
let client_y = Finite::wrap(*page_y - window.PageYOffset() as f64);
Touch::new(window,
identifier,
target,
screen_x,
screen_y,
client_x,
client_y,
page_x,
page_y)
}
// https://w3c.github.io/touch-events/#idl-def-document-createtouchlist(touch...)
fn CreateTouchList(&self, touches: &[&Touch]) -> Root<TouchList> {
TouchList::new(&self.window, &touches)
}
// https://dom.spec.whatwg.org/#dom-document-createtreewalker
fn CreateTreeWalker(&self,
root: &Node,
what_to_show: u32,
filter: Option<Rc<NodeFilter>>)
-> Root<TreeWalker> {
TreeWalker::new(self, root, what_to_show, filter)
}
// https://html.spec.whatwg.org/multipage/#document.title
fn Title(&self) -> DOMString {
let title = self.GetDocumentElement().and_then(|root| {
if root.namespace() == &ns!(svg) && root.local_name() == &local_name!("svg") {
// Step 1.
root.upcast::<Node>()
.child_elements()
.find(|node| {
node.namespace() == &ns!(svg) && node.local_name() == &local_name!("title")
})
.map(Root::upcast::<Node>)
} else {
// Step 2.
root.upcast::<Node>()
.traverse_preorder()
.find(|node| node.is::<HTMLTitleElement>())
}
});
match title {
None => DOMString::new(),
Some(ref title) => {
// Steps 3-4.
let value = title.child_text_content();
DOMString::from(str_join(split_html_space_chars(&value), " "))
},
}
}
// https://html.spec.whatwg.org/multipage/#document.title
fn SetTitle(&self, title: DOMString) {
let root = match self.GetDocumentElement() {
Some(root) => root,
None => return,
};
let elem = if root.namespace() == &ns!(svg) && root.local_name() == &local_name!("svg") {
let elem = root.upcast::<Node>().child_elements().find(|node| {
node.namespace() == &ns!(svg) && node.local_name() == &local_name!("title")
});
match elem {
Some(elem) => Root::upcast::<Node>(elem),
None => {
let name = QualName::new(ns!(svg), local_name!("title"));
let elem = Element::create(name, None, self, ElementCreator::ScriptCreated);
let parent = root.upcast::<Node>();
let child = elem.upcast::<Node>();
parent.InsertBefore(child, parent.GetFirstChild().r())
.unwrap()
}
}
} else if root.namespace() == &ns!(html) {
let elem = root.upcast::<Node>()
.traverse_preorder()
.find(|node| node.is::<HTMLTitleElement>());
match elem {
Some(elem) => elem,
None => {
match self.GetHead() {
Some(head) => {
let name = QualName::new(ns!(html), local_name!("title"));
let elem = Element::create(name,
None,
self,
ElementCreator::ScriptCreated);
head.upcast::<Node>()
.AppendChild(elem.upcast())
.unwrap()
},
None => return,
}
}
}
} else {
return;
};
elem.SetTextContent(Some(title));
}
// https://html.spec.whatwg.org/multipage/#dom-document-head
fn GetHead(&self) -> Option<Root<HTMLHeadElement>> {
self.get_html_element()
.and_then(|root| root.upcast::<Node>().children().filter_map(Root::downcast).next())
}
// https://html.spec.whatwg.org/multipage/#dom-document-currentscript
fn GetCurrentScript(&self) -> Option<Root<HTMLScriptElement>> {
self.current_script.get()
}
// https://html.spec.whatwg.org/multipage/#dom-document-body
fn GetBody(&self) -> Option<Root<HTMLElement>> {
self.get_html_element().and_then(|root| {
let node = root.upcast::<Node>();
node.children().find(|child| {
match child.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLFrameSetElement)) => true,
_ => false
}
}).map(|node| Root::downcast(node).unwrap())
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-body
fn SetBody(&self, new_body: Option<&HTMLElement>) -> ErrorResult {
// Step 1.
let new_body = match new_body {
Some(new_body) => new_body,
None => return Err(Error::HierarchyRequest),
};
let node = new_body.upcast::<Node>();
match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLBodyElement)) |
NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLFrameSetElement)) => {}
_ => return Err(Error::HierarchyRequest),
}
// Step 2.
let old_body = self.GetBody();
if old_body.r() == Some(new_body) {
return Ok(());
}
match (self.get_html_element(), &old_body) {
// Step 3.
(Some(ref root), &Some(ref child)) => {
let root = root.upcast::<Node>();
root.ReplaceChild(new_body.upcast(), child.upcast()).unwrap();
},
// Step 4.
(None, _) => return Err(Error::HierarchyRequest),
// Step 5.
(Some(ref root), &None) => {
let root = root.upcast::<Node>();
root.AppendChild(new_body.upcast()).unwrap();
}
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-getelementsbyname
fn GetElementsByName(&self, name: DOMString) -> Root<NodeList> {
self.create_node_list(|node| {
let element = match node.downcast::<Element>() {
Some(element) => element,
None => return false,
};
if element.namespace() != &ns!(html) {
return false;
}
element.get_attribute(&ns!(), &local_name!("name"))
.map_or(false, |attr| &**attr.value() == &*name)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-images
fn Images(&self) -> Root<HTMLCollection> {
self.images.or_init(|| {
let filter = box ImagesFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-embeds
fn Embeds(&self) -> Root<HTMLCollection> {
self.embeds.or_init(|| {
let filter = box EmbedsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-plugins
fn Plugins(&self) -> Root<HTMLCollection> {
self.Embeds()
}
// https://html.spec.whatwg.org/multipage/#dom-document-links
fn Links(&self) -> Root<HTMLCollection> {
self.links.or_init(|| {
let filter = box LinksFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-forms
fn Forms(&self) -> Root<HTMLCollection> {
self.forms.or_init(|| {
let filter = box FormsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-scripts
fn Scripts(&self) -> Root<HTMLCollection> {
self.scripts.or_init(|| {
let filter = box ScriptsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-anchors
fn Anchors(&self) -> Root<HTMLCollection> {
self.anchors.or_init(|| {
let filter = box AnchorsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-applets
fn Applets(&self) -> Root<HTMLCollection> {
// FIXME: This should be return OBJECT elements containing applets.
self.applets.or_init(|| {
let filter = box AppletsFilter;
HTMLCollection::create(&self.window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-document-location
fn GetLocation(&self) -> Option<Root<Location>> {
self.browsing_context().map(|_| self.location.or_init(|| Location::new(&self.window)))
}
// https://dom.spec.whatwg.org/#dom-parentnode-children
fn Children(&self) -> Root<HTMLCollection> {
HTMLCollection::children(&self.window, self.upcast())
}
// https://dom.spec.whatwg.org/#dom-parentnode-firstelementchild
fn GetFirstElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().child_elements().next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-lastelementchild
fn GetLastElementChild(&self) -> Option<Root<Element>> {
self.upcast::<Node>().rev_children().filter_map(Root::downcast).next()
}
// https://dom.spec.whatwg.org/#dom-parentnode-childelementcount
fn ChildElementCount(&self) -> u32 {
self.upcast::<Node>().child_elements().count() as u32
}
// https://dom.spec.whatwg.org/#dom-parentnode-prepend
fn Prepend(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().prepend(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-append
fn Append(&self, nodes: Vec<NodeOrString>) -> ErrorResult {
self.upcast::<Node>().append(nodes)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselector
fn QuerySelector(&self, selectors: DOMString) -> Fallible<Option<Root<Element>>> {
let root = self.upcast::<Node>();
root.query_selector(selectors)
}
// https://dom.spec.whatwg.org/#dom-parentnode-queryselectorall
fn QuerySelectorAll(&self, selectors: DOMString) -> Fallible<Root<NodeList>> {
let root = self.upcast::<Node>();
root.query_selector_all(selectors)
}
// https://html.spec.whatwg.org/multipage/#dom-document-readystate
fn ReadyState(&self) -> DocumentReadyState {
self.ready_state.get()
}
// https://html.spec.whatwg.org/multipage/#dom-document-defaultview
fn GetDefaultView(&self) -> Option<Root<Window>> {
if self.has_browsing_context {
Some(Root::from_ref(&*self.window))
} else {
None
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-cookie
fn GetCookie(&self) -> Fallible<DOMString> {
if self.is_cookie_averse() {
return Ok(DOMString::new());
}
if !self.origin.is_scheme_host_port_tuple() {
return Err(Error::Security);
}
let url = self.url();
let (tx, rx) = ipc::channel().unwrap();
let _ = self.window
.upcast::<GlobalScope>()
.resource_threads()
.send(GetCookiesForUrl(url, tx, NonHTTP));
let cookies = rx.recv().unwrap();
Ok(cookies.map_or(DOMString::new(), DOMString::from))
}
// https://html.spec.whatwg.org/multipage/#dom-document-cookie
fn SetCookie(&self, cookie: DOMString) -> ErrorResult {
if self.is_cookie_averse() {
return Ok(());
}
if !self.origin.is_scheme_host_port_tuple() {
return Err(Error::Security);
}
let header = Header::parse_header(&[cookie.into()]);
if let Ok(SetCookie(cookies)) = header {
let cookies = cookies.into_iter().map(Serde).collect();
let _ = self.window
.upcast::<GlobalScope>()
.resource_threads()
.send(SetCookiesForUrl(self.url(), cookies, NonHTTP));
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-bgcolor
fn BgColor(&self) -> DOMString {
self.get_body_attribute(&local_name!("bgcolor"))
}
// https://html.spec.whatwg.org/multipage/#dom-document-bgcolor
fn SetBgColor(&self, value: DOMString) {
self.set_body_attribute(&local_name!("bgcolor"), value)
}
// https://html.spec.whatwg.org/multipage/#dom-document-fgcolor
fn FgColor(&self) -> DOMString {
self.get_body_attribute(&local_name!("text"))
}
// https://html.spec.whatwg.org/multipage/#dom-document-fgcolor
fn SetFgColor(&self, value: DOMString) {
self.set_body_attribute(&local_name!("text"), value)
}
#[allow(unsafe_code)]
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:dom-document-nameditem-filter
unsafe fn NamedGetter(&self, _cx: *mut JSContext, name: DOMString) -> Option<NonZero<*mut JSObject>> {
#[derive(JSTraceable, HeapSizeOf)]
struct NamedElementFilter {
name: Atom,
}
impl CollectionFilter for NamedElementFilter {
fn filter(&self, elem: &Element, _root: &Node) -> bool {
filter_by_name(&self.name, elem.upcast())
}
}
// https://html.spec.whatwg.org/multipage/#dom-document-nameditem-filter
fn filter_by_name(name: &Atom, node: &Node) -> bool {
let html_elem_type = match node.type_id() {
NodeTypeId::Element(ElementTypeId::HTMLElement(type_)) => type_,
_ => return false,
};
let elem = match node.downcast::<Element>() {
Some(elem) => elem,
None => return false,
};
match html_elem_type {
HTMLElementTypeId::HTMLAppletElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) if attr.value().as_atom() == name => true,
_ => {
match elem.get_attribute(&ns!(), &local_name!("id")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
},
}
},
HTMLElementTypeId::HTMLFormElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
},
HTMLElementTypeId::HTMLImageElement => {
match elem.get_attribute(&ns!(), &local_name!("name")) {
Some(ref attr) => {
if attr.value().as_atom() == name {
true
} else {
match elem.get_attribute(&ns!(), &local_name!("id")) {
Some(ref attr) => attr.value().as_atom() == name,
None => false,
}
}
},
None => false,
}
},
// TODO: Handle <embed>, <iframe> and <object>.
_ => false,
}
}
let name = Atom::from(name);
let root = self.upcast::<Node>();
{
// Step 1.
let mut elements = root.traverse_preorder()
.filter(|node| filter_by_name(&name, &node))
.peekable();
if let Some(first) = elements.next() {
if elements.peek().is_none() {
// TODO: Step 2.
// Step 3.
return Some(NonZero::new(first.reflector().get_jsobject().get()));
}
} else {
return None;
}
}
// Step 4.
let filter = NamedElementFilter {
name: name,
};
let collection = HTMLCollection::create(self.window(), root, box filter);
Some(NonZero::new(collection.reflector().get_jsobject().get()))
}
// https://html.spec.whatwg.org/multipage/#dom-tree-accessors:supported-property-names
fn SupportedPropertyNames(&self) -> Vec<DOMString> {
// FIXME: unimplemented (https://github.com/servo/servo/issues/7273)
vec![]
}
// https://html.spec.whatwg.org/multipage/#dom-document-clear
fn Clear(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#dom-document-captureevents
fn CaptureEvents(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#dom-document-releaseevents
fn ReleaseEvents(&self) {
// This method intentionally does nothing
}
// https://html.spec.whatwg.org/multipage/#globaleventhandlers
global_event_handlers!();
// https://html.spec.whatwg.org/multipage/#handler-onreadystatechange
event_handler!(readystatechange, GetOnreadystatechange, SetOnreadystatechange);
#[allow(unsafe_code)]
// https://drafts.csswg.org/cssom-view/#dom-document-elementfrompoint
fn ElementFromPoint(&self, x: Finite<f64>, y: Finite<f64>) -> Option<Root<Element>> {
let x = *x as f32;
let y = *y as f32;
let point = &Point2D::new(x, y);
let window = window_from_node(self);
let viewport = window.window_size().unwrap().visible_viewport;
if self.browsing_context().is_none() {
return None;
}
if x < 0.0 || y < 0.0 || x > viewport.width || y > viewport.height {
return None;
}
match self.window.hit_test_query(*point, false) {
Some(untrusted_node_address) => {
let js_runtime = unsafe { JS_GetRuntime(window.get_cx()) };
let node = node::from_untrusted_node_address(js_runtime, untrusted_node_address);
let parent_node = node.GetParentNode().unwrap();
let element_ref = node.downcast::<Element>().unwrap_or_else(|| {
parent_node.downcast::<Element>().unwrap()
});
Some(Root::from_ref(element_ref))
},
None => self.GetDocumentElement()
}
}
#[allow(unsafe_code)]
// https://drafts.csswg.org/cssom-view/#dom-document-elementsfrompoint
fn ElementsFromPoint(&self, x: Finite<f64>, y: Finite<f64>) -> Vec<Root<Element>> {
let x = *x as f32;
let y = *y as f32;
let point = &Point2D::new(x, y);
let window = window_from_node(self);
let viewport = window.window_size().unwrap().visible_viewport;
if self.browsing_context().is_none() {
return vec!();
}
// Step 2
if x < 0.0 || y < 0.0 || x > viewport.width || y > viewport.height {
return vec!();
}
let js_runtime = unsafe { JS_GetRuntime(window.get_cx()) };
// Step 1 and Step 3
let mut elements: Vec<Root<Element>> = self.nodes_from_point(point).iter()
.flat_map(|&untrusted_node_address| {
let node = node::from_untrusted_node_address(js_runtime, untrusted_node_address);
Root::downcast::<Element>(node)
}).collect();
// Step 4
if let Some(root_element) = self.GetDocumentElement() {
if elements.last() != Some(&root_element) {
elements.push(root_element);
}
}
// Step 5
elements
}
// https://html.spec.whatwg.org/multipage/#dom-document-open
fn Open(&self, type_: DOMString, replace: DOMString) -> Fallible<Root<Document>> {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
if !self.is_active() {
// Step 3.
return Ok(Root::from_ref(self));
}
let entry_responsible_document = GlobalScope::entry().as_window().Document();
if !self.origin.same_origin(&entry_responsible_document.origin) {
// Step 4.
return Err(Error::Security);
}
if self.get_current_parser().map_or(false, |parser| parser.script_nesting_level() > 0) {
// Step 5.
return Ok(Root::from_ref(self));
}
// Step 6.
// TODO: ignore-opens-during-unload counter check.
// Step 7: first argument already bound to `type_`.
// Step 8.
// TODO: check session history's state.
let replace = replace.eq_ignore_ascii_case("replace");
// Step 9.
// TODO: salvageable flag.
// Step 10.
// TODO: prompt to unload.
// Step 11.
// TODO: unload.
// Step 12.
self.abort();
// Step 13.
for node in self.upcast::<Node>().traverse_preorder() {
node.upcast::<EventTarget>().remove_all_listeners();
}
// Step 14.
// TODO: remove any tasks associated with the Document in any task source.
// Step 15.
Node::replace_all(None, self.upcast::<Node>());
// Steps 16-18.
// Let's not?
// TODO: https://github.com/whatwg/html/issues/1698
// Step 19.
self.implementation.set(None);
self.location.set(None);
self.images.set(None);
self.embeds.set(None);
self.links.set(None);
self.forms.set(None);
self.scripts.set(None);
self.anchors.set(None);
self.applets.set(None);
*self.stylesheets.borrow_mut() = None;
self.stylesheets_changed_since_reflow.set(true);
self.animation_frame_ident.set(0);
self.animation_frame_list.borrow_mut().clear();
self.pending_restyles.borrow_mut().clear();
self.target_element.set(None);
*self.last_click_info.borrow_mut() = None;
// Step 20.
self.set_encoding(UTF_8);
// Step 21.
// TODO: reload override buffer.
// Step 22.
// TODO: salvageable flag.
let url = entry_responsible_document.url();
// Step 23.
self.set_url(url.clone());
// Step 24.
// TODO: mute iframe load.
// Step 27.
let type_ = if type_.eq_ignore_ascii_case("replace") {
"text/html"
} else if let Some(position) = type_.find(';') {
&type_[0..position]
} else {
&*type_
};
let type_ = type_.trim_matches(HTML_SPACE_CHARACTERS);
// Step 25.
let resource_threads =
self.window.upcast::<GlobalScope>().resource_threads().clone();
*self.loader.borrow_mut() =
DocumentLoader::new_with_threads(resource_threads, Some(url.clone()));
ServoParser::parse_html_script_input(self, url, type_);
// Step 26.
self.ready_state.set(DocumentReadyState::Interactive);
// Step 28 is handled when creating the parser in step 25.
// Step 29.
// TODO: truncate session history.
// Step 30.
// TODO: remove history traversal tasks.
// Step 31.
// TODO: remove earlier entries.
if !replace {
// Step 32.
// TODO: add history entry.
}
// Step 33.
// TODO: clear fired unload flag.
// Step 34 is handled when creating the parser in step 25.
// Step 35.
Ok(Root::from_ref(self))
}
// https://html.spec.whatwg.org/multipage/#dom-document-write
fn Write(&self, text: Vec<DOMString>) -> ErrorResult {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
if !self.is_active() {
// Step 3.
return Ok(());
}
let parser = match self.get_current_parser() {
Some(ref parser) if parser.can_write() => Root::from_ref(&**parser),
_ => {
// Either there is no parser, which means the parsing ended;
// or script nesting level is 0, which means the method was
// called from outside a parser-executed script.
if self.ignore_destructive_writes_counter.get() > 0 {
// Step 4.
// TODO: handle ignore-opens-during-unload counter.
return Ok(());
}
// Step 5.
self.Open("text/html".into(), "".into())?;
self.get_current_parser().unwrap()
}
};
// Step 7.
// TODO: handle reload override buffer.
// Steps 6-8.
parser.write(text);
// Step 9.
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-document-writeln
fn Writeln(&self, mut text: Vec<DOMString>) -> ErrorResult {
text.push("\n".into());
self.Write(text)
}
// https://html.spec.whatwg.org/multipage/#dom-document-close
fn Close(&self) -> ErrorResult {
if !self.is_html_document() {
// Step 1.
return Err(Error::InvalidState);
}
// Step 2.
// TODO: handle throw-on-dynamic-markup-insertion counter.
let parser = match self.get_current_parser() {
Some(ref parser) if parser.is_script_created() => Root::from_ref(&**parser),
_ => {
// Step 3.
return Ok(());
}
};
// Step 4-6.
parser.close();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#documentandelementeventhandlers
document_and_element_event_handlers!();
// https://fullscreen.spec.whatwg.org/#handler-document-onfullscreenerror
event_handler!(fullscreenerror, GetOnfullscreenerror, SetOnfullscreenerror);
// https://fullscreen.spec.whatwg.org/#handler-document-onfullscreenchange
event_handler!(fullscreenchange, GetOnfullscreenchange, SetOnfullscreenchange);
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreenenabled
fn FullscreenEnabled(&self) -> bool {
self.get_allow_fullscreen()
}
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreen
fn Fullscreen(&self) -> bool {
self.fullscreen_element.get().is_some()
}
// https://fullscreen.spec.whatwg.org/#dom-document-fullscreenelement
fn GetFullscreenElement(&self) -> Option<Root<Element>> {
// TODO ShadowRoot
self.fullscreen_element.get()
}
#[allow(unrooted_must_root)]
// https://fullscreen.spec.whatwg.org/#dom-document-exitfullscreen
fn ExitFullscreen(&self) -> Rc<Promise> {
self.exit_fullscreen()
}
}
fn update_with_current_time_ms(marker: &Cell<u64>) {
if marker.get() == Default::default() {
let time = time::get_time();
let current_time_ms = time.sec * 1000 + time.nsec as i64 / 1000000;
marker.set(current_time_ms as u64);
}
}
/// https://w3c.github.io/webappsec-referrer-policy/#determine-policy-for-token
pub fn determine_policy_for_token(token: &str) -> Option<ReferrerPolicy> {
let lower = token.to_lowercase();
return match lower.as_ref() {
"never" | "no-referrer" => Some(ReferrerPolicy::NoReferrer),
"default" | "no-referrer-when-downgrade" => Some(ReferrerPolicy::NoReferrerWhenDowngrade),
"origin" => Some(ReferrerPolicy::Origin),
"same-origin" => Some(ReferrerPolicy::SameOrigin),
"strict-origin" => Some(ReferrerPolicy::StrictOrigin),
"strict-origin-when-cross-origin" => Some(ReferrerPolicy::StrictOriginWhenCrossOrigin),
"origin-when-cross-origin" => Some(ReferrerPolicy::OriginWhenCrossOrigin),
"always" | "unsafe-url" => Some(ReferrerPolicy::UnsafeUrl),
"" => Some(ReferrerPolicy::NoReferrer),
_ => None,
}
}
pub struct DocumentProgressHandler {
addr: Trusted<Document>
}
impl DocumentProgressHandler {
pub fn new(addr: Trusted<Document>) -> DocumentProgressHandler {
DocumentProgressHandler {
addr: addr
}
}
fn set_ready_state_complete(&self) {
let document = self.addr.root();
document.set_ready_state(DocumentReadyState::Complete);
}
fn dispatch_load(&self) {
let document = self.addr.root();
if document.browsing_context().is_none() {
return;
}
let window = document.window();
let event = Event::new(window.upcast(),
atom!("load"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable);
let wintarget = window.upcast::<EventTarget>();
event.set_trusted(true);
// http://w3c.github.io/navigation-timing/#widl-PerformanceNavigationTiming-loadEventStart
update_with_current_time_ms(&document.load_event_start);
debug!("About to dispatch load for {:?}", document.url());
let _ = wintarget.dispatch_event_with_target(document.upcast(), &event);
// http://w3c.github.io/navigation-timing/#widl-PerformanceNavigationTiming-loadEventEnd
update_with_current_time_ms(&document.load_event_end);
window.reflow(ReflowGoal::ForDisplay,
ReflowQueryType::NoQuery,
ReflowReason::DocumentLoaded);
document.notify_constellation_load();
}
}
impl Runnable for DocumentProgressHandler {
fn name(&self) -> &'static str { "DocumentProgressHandler" }
fn handler(self: Box<DocumentProgressHandler>) {
let document = self.addr.root();
let window = document.window();
if window.is_alive() {
self.set_ready_state_complete();
self.dispatch_load();
if let Some(fragment) = document.url().fragment() {
document.check_and_scroll_fragment(fragment);
}
}
}
}
/// Specifies the type of focus event that is sent to a pipeline
#[derive(Copy, Clone, PartialEq)]
pub enum FocusType {
Element, // The first focus message - focus the element itself
Parent, // Focusing a parent element (an iframe)
}
/// Focus events
pub enum FocusEventType {
Focus, // Element gained focus. Doesn't bubble.
Blur, // Element lost focus. Doesn't bubble.
}
#[derive(HeapSizeOf, JSTraceable)]
pub enum AnimationFrameCallback {
DevtoolsFramerateTick { actor_name: String },
FrameRequestCallback {
#[ignore_heap_size_of = "Rc is hard"]
callback: Rc<FrameRequestCallback>
},
}
impl AnimationFrameCallback {
fn call(&self, document: &Document, now: f64) {
match *self {
AnimationFrameCallback::DevtoolsFramerateTick { ref actor_name } => {
let msg = ScriptToDevtoolsControlMsg::FramerateTick(actor_name.clone(), now);
let devtools_sender = document.window().upcast::<GlobalScope>().devtools_chan().unwrap();
devtools_sender.send(msg).unwrap();
}
AnimationFrameCallback::FrameRequestCallback { ref callback } => {
// TODO(jdm): The spec says that any exceptions should be suppressed:
// https://github.com/servo/servo/issues/6928
let _ = callback.Call__(Finite::wrap(now), ExceptionHandling::Report);
}
}
}
}
#[derive(Default, HeapSizeOf, JSTraceable)]
#[must_root]
struct PendingInOrderScriptVec {
scripts: DOMRefCell<VecDeque<PendingScript>>,
}
impl PendingInOrderScriptVec {
fn is_empty(&self) -> bool {
self.scripts.borrow().is_empty()
}
fn push(&self, element: &HTMLScriptElement) {
self.scripts.borrow_mut().push_back(PendingScript::new(element));
}
fn loaded(&self, element: &HTMLScriptElement, result: ScriptResult) {
let mut scripts = self.scripts.borrow_mut();
let mut entry = scripts.iter_mut().find(|entry| &*entry.element == element).unwrap();
entry.loaded(result);
}
fn take_next_ready_to_be_executed(&self) -> Option<(Root<HTMLScriptElement>, ScriptResult)> {
let mut scripts = self.scripts.borrow_mut();
let pair = scripts.front_mut().and_then(PendingScript::take_result);
if pair.is_none() {
return None;
}
scripts.pop_front();
pair
}
fn clear(&self) {
*self.scripts.borrow_mut() = Default::default();
}
}
#[derive(HeapSizeOf, JSTraceable)]
#[must_root]
struct PendingScript {
element: JS<HTMLScriptElement>,
load: Option<ScriptResult>,
}
impl PendingScript {
fn new(element: &HTMLScriptElement) -> Self {
Self { element: JS::from_ref(element), load: None }
}
fn new_with_load(element: &HTMLScriptElement, load: Option<ScriptResult>) -> Self {
Self { element: JS::from_ref(element), load }
}
fn loaded(&mut self, result: ScriptResult) {
assert!(self.load.is_none());
self.load = Some(result);
}
fn take_result(&mut self) -> Option<(Root<HTMLScriptElement>, ScriptResult)> {
self.load.take().map(|result| (Root::from_ref(&*self.element), result))
}
}<|fim▁end|>
|
}
/// Initiate a new round of checking for elements requesting focus. The last element to call
/// `request_focus` before `commit_focus_transaction` is called will receive focus.
|
<|file_name|>ipv6addr.go<|end_file_name|><|fim▁begin|><|fim▁hole|>type IPv6Addr struct {
HostName string `json:",omitempty"` // ホスト名
IPv6Addr string `json:",omitempty"` // IPv6アドレス
Interface *Interface `json:",omitempty"` // インターフェース
IPv6Net *IPv6Net `json:",omitempty"` // IPv6サブネット
}
// GetIPv6NetID IPv6アドレスが所属するIPv6NetのIDを取得
func (a *IPv6Addr) GetIPv6NetID() int64 {
if a.IPv6Net != nil {
return a.IPv6Net.ID
}
return 0
}
// GetInternetID IPv6アドレスを所有するルータ+スイッチ(Internet)のIDを取得
func (a *IPv6Addr) GetInternetID() int64 {
if a.IPv6Net != nil && a.IPv6Net.Switch != nil && a.IPv6Net.Switch.Internet != nil {
return a.IPv6Net.Switch.Internet.ID
}
return 0
}
// CreateNewIPv6Addr IPv6アドレス作成
func CreateNewIPv6Addr() *IPv6Addr {
return &IPv6Addr{
IPv6Net: &IPv6Net{
Resource: &Resource{},
},
}
}<|fim▁end|>
|
package sacloud
// IPv6Addr IPアドレス(IPv6)
|
<|file_name|>test_github.py<|end_file_name|><|fim▁begin|>import pytest
from github3 import AuthenticationFailed, GitHubError
from github3.github import GitHub
from .helper import UnitHelper, UnitIteratorHelper
def url_for(path=''):
"""Simple function to generate URLs with the base GitHub URL."""
return 'https://api.github.com/' + path.strip('/')
class TestGitHub(UnitHelper):
described_class = GitHub
example_data = None
def test_authorization(self):
"""Show that a user can retrieve a specific authorization by id."""
self.instance.authorization(10)
self.session.get.assert_called_once_with(
url_for('authorizations/10'),
)
def test_authorize(self):
"""Show an authorization can be created for a user."""
self.instance.authorize('username', 'password', ['user', 'repo'])
self.session.temporary_basic_auth.assert_called_once_with(
'username', 'password'
)
self.post_called_with(
url_for('authorizations'),
data={'note': '', 'note_url': '', 'client_id': '',
'client_secret': '', 'scopes': ['user', 'repo']}
)
def test_check_authorization(self):
"""Test an app's ability to check a authorization token."""
self.instance.set_client_id('client-id', 'client-secret')
self.instance.check_authorization('super-fake-access-token')
self.session.get.assert_called_once_with(
url_for('applications/client-id/tokens/super-fake-access-token'),
params={'client_id': None, 'client_secret': None},
auth=('client-id', 'client-secret')
)
def test_create_gist(self):
"""Test the request to create a gist."""
self.instance.create_gist('description', {
'example.py': {'content': '# example contents'}
})
self.post_called_with(
url_for('gists'),
data={
'description': 'description',
'files': {
'example.py': {
'content': '# example contents'
}
},
'public': True,
}
)
def test_create_key(self):
"""Test the request to create a key."""
self.instance.create_key('key_name', 'key text')
self.post_called_with(
url_for('user/keys'),
data={
'title': 'key_name',
'key': 'key text'
}
)
def test_create_key_requires_a_key(self):
"""Test that no request is made with an empty key."""
self.instance.create_key('title', '')
assert self.session.post.called is False
def test_create_key_requires_a_title(self):
"""Test that no request is made with an empty title."""
self.instance.create_key('', 'key text')
assert self.session.post.called is False
def test_create_repository(self):
"""Test the request to create a repository."""
self.instance.create_repository('repo-name')
self.post_called_with(
url_for('user/repos'),
data={
'name': 'repo-name',
'description': '',
'homepage': '',
'private': False,
'has_issues': True,
'has_wiki': True,
'auto_init': False,
'gitignore_template': ''
}
)
def test_emojis(self):
"""Test the request to retrieve GitHub's emojis."""
self.instance.emojis()
self.session.get.assert_called_once_with(url_for('emojis'))
def test_follow(self):
"""Test the request to follow a user."""
self.instance.follow('username')
self.session.put.assert_called_once_with(
url_for('user/following/username')
)
def test_follow_requires_a_username(self):
"""Test that GitHub#follow requires a username."""
self.instance.follow(None)
assert self.session.put.called is False
def test_gist(self):
"""Test the request to retrieve a specific gist."""
self.instance.gist(10)
self.session.get.assert_called_once_with(url_for('gists/10'))
def test_gitignore_template(self):
"""Test the request to retrieve a gitignore template."""
self.instance.gitignore_template('Python')
self.session.get.assert_called_once_with(
url_for('gitignore/templates/Python')
)
def test_gitignore_templates(self):
"""Test the request to retrieve gitignore templates."""
self.instance.gitignore_templates()
self.session.get.assert_called_once_with(
url_for('gitignore/templates')
)
def test_is_following(self):
"""Test the request to check if the user is following a user."""
self.instance.is_following('username')
self.session.get.assert_called_once_with(
url_for('user/following/username')
)
def test_is_starred(self):
"""Test the request to check if the user starred a repository."""
self.instance.is_starred('username', 'repository')
self.session.get.assert_called_once_with(
url_for('user/starred/username/repository')
)
def test_is_starred_requires_an_owner(self):
"""Test that GitHub#is_starred requires an owner."""
self.instance.is_starred(None, 'repo')
assert self.session.get.called is False
def test_is_starred_requires_a_repo(self):
"""Test that GitHub#is_starred requires an repo."""
self.instance.is_starred('username', None)
assert self.session.get.called is False
def test_issue(self):
"""Test the request to retrieve a single issue."""
self.instance.issue('owner', 'repo', 1)
self.session.get.assert_called_once_with(
url_for('repos/owner/repo/issues/1')
)
def test_issue_requires_username(self):
"""Test GitHub#issue requires a non-None username."""
self.instance.issue(None, 'foo', 1)
assert self.session.get.called is False
def test_issue_requires_repository(self):
"""Test GitHub#issue requires a non-None repository."""
self.instance.issue('foo', None, 1)
assert self.session.get.called is False
def test_issue_requires_positive_issue_id(self):
"""Test GitHub#issue requires positive issue id."""
self.instance.issue('foo', 'bar', -1)
assert self.session.get.called is False
def test_me(self):
"""Test the ability to retrieve the authenticated user's info."""
self.instance.me()
self.session.get.assert_called_once_with(url_for('user'))
def test_repository(self):
""""Verify the GET request for a repository."""
self.instance.repository('user', 'repo')
self.session.get.assert_called_once_with(url_for('repos/user/repo'))
def test_repository_with_invalid_repo(self):
"""Verify there is no call made for invalid repo combos."""
self.instance.repository('user', None)
assert self.session.get.called is False
def test_repository_with_invalid_user(self):
"""Verify there is no call made for invalid username combos."""
self.instance.repository(None, 'repo')
assert self.session.get.called is False
def test_repository_with_invalid_user_and_repo(self):
"""Verify there is no call made for invalid user/repo combos."""
self.instance.repository(None, None)
assert self.session.get.called is False
def test_repository_with_id(self):
"""Test the ability to retrieve a repository by its id."""
self.instance.repository_with_id(10)
self.session.get.assert_called_once_with(url_for('repositories/10'))
def test_repository_with_id_requires_a_positive_id(self):
"""Test the ability to retrieve a repository by its id."""
self.instance.repository_with_id(-10)
assert self.session.get.called is False
def test_repository_with_id_accepts_a_string(self):
"""Test the ability to retrieve a repository by its id."""
self.instance.repository_with_id('10')
self.session.get.assert_called_once_with(url_for('repositories/10'))
def test_two_factor_login(self):
"""Test the ability to pass two_factor_callback."""
self.instance.login('username', 'password',
two_factor_callback=lambda *args: 'foo')
def test_can_login_without_two_factor_callback(self):
"""Test that two_factor_callback is not required."""
self.instance.login('username', 'password')
self.instance.login(token='token')
def test_update_me(self):
"""Verify the request to update the authenticated user's profile."""
self.instance.update_me(name='New name', email='[email protected]',
blog='http://blog.example.com', company='Corp',
location='here')
self.patch_called_with(
url_for('user'),
data={'name': 'New name', 'email': '[email protected]',
'blog': 'http://blog.example.com', 'company': 'Corp',
'location': 'here', 'hireable': False}
)
def test_user(self):
"""Test that a user can retrieve information about any user."""
self.instance.user('username')
self.session.get.assert_called_once_with(
url_for('users/username'),
)
def test_user_with_id(self):
"""Test that any user's information can be retrieved by id."""
self.instance.user_with_id(10)
self.session.get.assert_called_once_with(url_for('user/10'))
def test_user_with_id_requires_a_positive_id(self):
"""Test that user_with_id requires a positive parameter."""
self.instance.user_with_id(-10)
assert self.session.get.called is False
def test_user_with_id_accepts_a_string(self):
"""Test that any user's information can be retrieved by id."""
self.instance.user_with_id('10')
self.session.get.assert_called_once_with(url_for('user/10'))
class TestGitHubIterators(UnitIteratorHelper):
described_class = GitHub
example_data = None
def test_all_events(self):
"""Show that one can iterate over all public events."""
i = self.instance.all_events()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('events'),
params={'per_page': 100},
headers={}
)
def test_all_organizations(self):
"""Show that one can iterate over all organizations."""
i = self.instance.all_organizations()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('organizations'),
params={'per_page': 100},
headers={}
)
def test_all_organizations_per_page(self):
"""Show that one can iterate over all organizations with per_page."""
i = self.instance.all_organizations(per_page=25)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('organizations'),
params={'per_page': 25},
headers={}
)
def test_all_organizations_since(self):
"""Show that one can limit the organizations returned."""
since = 100000
i = self.instance.all_organizations(since=since)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('organizations'),
params={'per_page': 100, 'since': since},
headers={}
)
def test_all_repositories(self):
"""Show that one can iterate over all repositories."""
i = self.instance.all_repositories()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('repositories'),
params={'per_page': 100},
headers={}
)
def test_all_repositories_per_page(self):
"""Show that one can iterate over all repositories with per_page."""
i = self.instance.all_repositories(per_page=25)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('repositories'),
params={'per_page': 25},
headers={}
)
def test_all_repositories_since(self):
"""Show that one can limit the repositories returned."""
since = 100000
i = self.instance.all_repositories(since=since)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('repositories'),
params={'per_page': 100, 'since': since},
headers={}
)
def test_all_users(self):
"""Show that one can iterate over all users."""
i = self.instance.all_users()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users'),
params={'per_page': 100},
headers={}
)
def test_all_users_per_page(self):
"""Show that one can iterate over all users with per_page."""
i = self.instance.all_users(per_page=25)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users'),
params={'per_page': 25},
headers={}
)
def test_all_users_since(self):
"""Show that one can limit the users returned."""
since = 100000
i = self.instance.all_users(since=since)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users'),
params={'per_page': 100, 'since': since},
headers={}
)
def test_authorizations(self):
"""
Show that an authenticated user can iterate over their authorizations.
"""
i = self.instance.authorizations()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('authorizations'),
params={'per_page': 100},
headers={}
)
def test_emails(self):
"""Show that an authenticated user can iterate over their emails."""
i = self.instance.emails()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/emails'),
params={'per_page': 100},
headers={}
)
def test_followers(self):
"""
Show that an authenticated user can iterate over their followers.
"""
i = self.instance.followers()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/followers'),
params={'per_page': 100},
headers={}
)
def test_followers_require_auth(self):
"""Show that one needs to authenticate to use #followers."""
self.session.has_auth.return_value = False
with pytest.raises(GitHubError):
self.instance.followers()
def test_followers_of(self):
"""Show that one can authenticate over the followers of a user."""
i = self.instance.followers_of('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/followers'),
params={'per_page': 100},
headers={}
)
def test_following(self):
"""
Show that an authenticated user can iterate the users they are
following.
"""
i = self.instance.following()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/following'),
params={'per_page': 100},
headers={}
)
def test_following_require_auth(self):
"""Show that one needs to authenticate to use #following."""
self.session.has_auth.return_value = False
with pytest.raises(GitHubError):
self.instance.following()
def test_followed_by(self):
"""
Show that one can authenticate over the users followed by another.
"""
i = self.instance.followed_by('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/following'),
params={'per_page': 100},
headers={}
)
def test_gists(self):
"""Show that an authenticated user can iterate over their gists."""
i = self.instance.gists()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('gists'),
params={'per_page': 100},
headers={}
)
def test_gists_by(self):
"""Show that an user's gists can be iterated over."""
i = self.instance.gists_by('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/gists'),
params={'per_page': 100},
headers={}
)
def test_issues(self):
"""Show that an authenticated user can iterate over their issues."""
i = self.instance.issues()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('issues'),
params={'per_page': 100},
headers={}
)
def test_issues_with_params(self):
"""Show that issues can be filtered."""
params = {'filter': 'assigned', 'state': 'closed', 'labels': 'bug',
'sort': 'created', 'direction': 'asc',
'since': '2012-05-20T23:10:27Z'}
p = {'per_page': 100}
p.update(params)
i = self.instance.issues(**params)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('issues'),
params=p,
headers={}
)
def test_keys(self):
"""
Show that an authenticated user can iterate over their public keys.
"""
i = self.instance.keys()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/keys'),
params={'per_page': 100},
headers={}
)
def test_notifications(self):
"""
Show that an authenticated user can iterate over their notifications.
"""
i = self.instance.notifications()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('notifications'),
params={'per_page': 100},
headers={},
)
def test_notifications_participating_in(self):
"""Show that the user can filter by pariticpating."""
i = self.instance.notifications(participating=True)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('notifications'),
params={'per_page': 100, 'participating': 'true'},
headers={}
)
def test_notifications_all(self):
"""Show that the user can iterate over all of their notifications."""
i = self.instance.notifications(all=True)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('notifications'),
params={'per_page': 100, 'all': 'true'},
headers={}
)
def test_organization_issues(self):<|fim▁hole|> self.session.get.assert_called_once_with(
url_for('orgs/org/issues'),
params={'per_page': 100},
headers={}
)
def test_organization_issues_with_params(self):
"""Show that one can pass parameters to #organization_issues."""
params = {'filter': 'assigned', 'state': 'closed', 'labels': 'bug',
'sort': 'created', 'direction': 'asc',
'since': '2012-05-20T23:10:27Z'}
i = self.instance.organization_issues('org', **params)
self.get_next(i)
p = {'per_page': 100}
p.update(params)
self.session.get.assert_called_once_with(
url_for('orgs/org/issues'),
params=p,
headers={}
)
def test_organizations(self):
"""
Show that one can iterate over all of the authenticated user's orgs.
"""
i = self.instance.organizations()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/orgs'),
params={'per_page': 100},
headers={}
)
def test_organizations_with(self):
"""Show that one can iterate over all of a user's orgs."""
i = self.instance.organizations_with('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/orgs'),
params={'per_page': 100},
headers={}
)
def test_public_gists(self):
"""Show that all public gists can be iterated over."""
i = self.instance.public_gists()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('gists/public'),
params={'per_page': 100},
headers={}
)
def test_respositories(self):
"""
Show that an authenticated user can iterate over their repositories.
"""
i = self.instance.repositories()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/repos'),
params={'per_page': 100},
headers={}
)
def test_respositories_accepts_params(self):
"""Show that an #repositories accepts params."""
i = self.instance.repositories(type='all',
direction='desc',
sort='created')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/repos'),
params={'per_page': 100, 'type': 'all', 'direction': 'desc',
'sort': 'created'},
headers={}
)
def test_issues_on(self):
"""Show that a user can iterate over a repository's issues."""
i = self.instance.issues_on('owner', 'repo')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('repos/owner/repo/issues'),
params={'per_page': 100},
headers={}
)
def test_issues_on_with_params(self):
"""Show that #issues_on accepts multiple parameters."""
params = {'milestone': 1, 'state': 'all', 'assignee': 'owner',
'mentioned': 'someone', 'labels': 'bug,high'}
i = self.instance.issues_on('owner', 'repo', **params)
self.get_next(i)
params.update(per_page=100)
self.session.get.assert_called_once_with(
url_for('repos/owner/repo/issues'),
params=params,
headers={}
)
def test_starred(self):
"""
Show that one can iterate over an authenticated user's stars.
"""
i = self.instance.starred()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/starred'),
params={'per_page': 100},
headers={}
)
def test_starred_by(self):
"""Show that one can iterate over a user's stars."""
i = self.instance.starred_by('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/starred'),
params={'per_page': 100},
headers={}
)
def test_subscriptions(self):
"""
Show that one can iterate over an authenticated user's subscriptions.
"""
i = self.instance.subscriptions()
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/subscriptions'),
params={'per_page': 100},
headers={}
)
def test_subscriptions_for(self):
"""Show that one can iterate over a user's subscriptions."""
i = self.instance.subscriptions_for('sigmavirus24')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/subscriptions'),
params={'per_page': 100},
headers={}
)
def test_user_issues(self):
"""Test that one can iterate over a user's issues."""
i = self.instance.user_issues()
# Get the next item from the iterator
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/issues'),
params={'per_page': 100},
headers={}
)
def test_user_issues_with_parameters(self):
"""Test that one may pass parameters to GitHub#user_issues."""
# Set up the parameters to be sent
params = {'filter': 'assigned', 'state': 'closed', 'labels': 'bug',
'sort': 'created', 'direction': 'asc',
'since': '2012-05-20T23:10:27Z', 'per_page': 25}
# Make the call with the paramters
i = self.instance.user_issues(**params)
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('user/issues'),
params=params,
headers={}
)
def test_repositories_by(self):
"""Test that one can iterate over a user's repositories."""
i = self.instance.repositories_by('sigmavirus24')
# Get the next item from the iterator
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/repos'),
params={'per_page': 100},
headers={}
)
def test_repositories_by_with_type(self):
"""
Test that one can iterate over a user's repositories with a type.
"""
i = self.instance.repositories_by('sigmavirus24', 'all')
self.get_next(i)
self.session.get.assert_called_once_with(
url_for('users/sigmavirus24/repos'),
params={'per_page': 100, 'type': 'all'},
headers={}
)
class TestGitHubRequiresAuthentication(UnitHelper):
"""Test methods that require authentication."""
described_class = GitHub
example_data = None
def after_setup(self):
"""Disable authentication on the session."""
self.session.auth = None
self.session.has_auth.return_value = False
def test_authorization(self):
"""A user must be authenticated to retrieve an authorization."""
with pytest.raises(AuthenticationFailed):
self.instance.authorization(1)
def test_authorizations(self):
"""Show that one needs to authenticate to use #authorizations."""
with pytest.raises(AuthenticationFailed):
self.instance.authorizations()
def test_create_issue(self):
"""Show that GitHub#create_issue requires auth."""
with pytest.raises(AuthenticationFailed):
self.instance.create_issue('owner', 'repo', 'title')
def test_create_key(self):
"""Show that GitHub#create_key requires auth."""
with pytest.raises(AuthenticationFailed):
self.instance.create_key('title', 'key')
def test_create_repository(self):
"""Show that GitHub#create_repository requires auth."""
with pytest.raises(AuthenticationFailed):
self.instance.create_repository('repo')
def test_emails(self):
"""Show that one needs to authenticate to use #emails."""
with pytest.raises(AuthenticationFailed):
self.instance.emails()
def test_follow(self):
"""Show that one needs to authenticate to use #follow."""
with pytest.raises(AuthenticationFailed):
self.instance.follow('foo')
def test_gists(self):
"""Show that one needs to authenticate to use #gists."""
with pytest.raises(AuthenticationFailed):
self.instance.gists()
def test_is_following(self):
"""Show that GitHub#is_following requires authentication."""
with pytest.raises(AuthenticationFailed):
self.instance.is_following('foo')
def test_is_starred(self):
"""Show that GitHub#is_starred requires authentication."""
with pytest.raises(AuthenticationFailed):
self.instance.is_starred('foo', 'bar')
def test_issues(self):
"""Show that one needs to authenticate to use #issues."""
with pytest.raises(AuthenticationFailed):
self.instance.issues()
def test_keys(self):
"""Show that one needs to authenticate to use #keys."""
with pytest.raises(AuthenticationFailed):
self.instance.keys()
def test_me(self):
"""Show that GitHub#me requires authentication."""
with pytest.raises(AuthenticationFailed):
self.instance.me()
def test_notifications(self):
"""Show that one needs to authenticate to use #gists."""
with pytest.raises(AuthenticationFailed):
self.instance.notifications()
def test_organization_issues(self):
"""Show that one needs to authenticate to use #organization_issues."""
with pytest.raises(AuthenticationFailed):
self.instance.organization_issues('org')
def test_organizations(self):
"""Show that one needs to authenticate to use #organizations."""
with pytest.raises(AuthenticationFailed):
self.instance.organizations()
def test_repositories(self):
"""Show that one needs to authenticate to use #repositories."""
with pytest.raises(AuthenticationFailed):
self.instance.repositories()
def test_starred(self):
"""Show that one needs to authenticate to use #starred."""
with pytest.raises(AuthenticationFailed):
self.instance.starred()
def test_user_issues(self):
"""Show that GitHub#user_issues requires authentication."""
with pytest.raises(AuthenticationFailed):
self.instance.user_issues()
class TestGitHubAuthorizations(UnitHelper):
described_class = GitHub
example_data = None
def create_session_mock(self, *args):
session = super(TestGitHubAuthorizations,
self).create_session_mock(*args)
session.retrieve_client_credentials.return_value = ('id', 'secret')
return session
def test_revoke_authorization(self):
"""Test that GitHub#revoke_authorization calls the expected methods.
It should use the session's delete and temporary_basic_auth methods.
"""
self.instance.revoke_authorization('access_token')
self.session.delete.assert_called_once_with(
'https://api.github.com/applications/id/tokens/access_token',
params={'client_id': None, 'client_secret': None}
)
self.session.temporary_basic_auth.assert_called_once_with(
'id', 'secret'
)
def test_revoke_authorizations(self):
"""Test that GitHub#revoke_authorizations calls the expected methods.
It should use the session's delete and temporary_basic_auth methods.
"""
self.instance.revoke_authorizations()
self.session.delete.assert_called_once_with(
'https://api.github.com/applications/id/tokens',
params={'client_id': None, 'client_secret': None}
)
self.session.temporary_basic_auth.assert_called_once_with(
'id', 'secret'
)<|fim▁end|>
|
"""Show that one can iterate over an organization's issues."""
i = self.instance.organization_issues('org')
self.get_next(i)
|
<|file_name|>global_settings.py<|end_file_name|><|fim▁begin|>DATASET_DIR = '/tmp'
BRAIN_DIR = '/tmp'
GENRES = [
'blues', 'classical', 'country', 'disco', 'hiphop',
'jazz', 'metal', 'pop', 'reggae', 'rock'
]
NUM_BEATS = 10
KEEP_FRAMES = 0
TRAIN_TEST_RATIO = [7, 3]
MODE = 'nn'
PCA = False
FEATURES = ['mfcc', 'dwt', 'beat']
MFCC_EXTRA = ['delta', 'ddelta', 'energy']
DWT = ['mean', 'std', 'max', 'min']<|fim▁hole|>
FEATURES_LENGTH = {
'mfcc' : 160,
'dwt' : 112,
'beat' : 11
}
FRAME_LENGTH = 0.025
HOP_LENGTH = 0.005
N_MFCC = 13
W_FRAME_SCALE = 10
NN = {
'NUM_HIDDEN_LAYERS' : 2,
'HIDDEN_INPUTS' : [1024, 1024],
'RANDOM' : True,
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
}
CNN = {
'NUM_HIDDEN_LAYERS' : 2,
'NUM_DENSE_LAYERS' : 1,
'HIDDEN_FEATURES' : [32, 64],
'DENSE_INPUTS' : [128],
'INPUT_SHAPE' : [16, 17],
'PATCH_SIZE' : [5, 5],
'RANDOM' : False,
'STRIDES' : [1, 1, 1, 1],
'BATCH_SIZE' : 100,
'TRAINING_CYCLES' : 1000,
'LEARNING_RATE' : 0.01,
'DROPOUT_PROB' : 0.6
}<|fim▁end|>
| |
<|file_name|>conversation_read_webhook.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import messagebird
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--accessKey', help='access key for MessageBird API', type=str, required=True)
parser.add_argument('--webhookId', help='webhook that you want to read', type=str, required=True)
args = vars(parser.parse_args())
try:
client = messagebird.Client(args['accessKey'])
webhook = client.conversation_read_webhook(args['webhookId'])
# Print the object information.
print('The following information was returned as a Webhook object:')<|fim▁hole|>
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)<|fim▁end|>
|
print(webhook)
except messagebird.client.ErrorException as e:
print('An error occured while requesting a Webhook object:')
|
<|file_name|>close.rs<|end_file_name|><|fim▁begin|>use futures_core::future::Future;
use futures_core::task::{Context, Poll};
use futures_io::AsyncWrite;
use std::io;<|fim▁hole|>#[must_use = "futures do nothing unless you `.await` or poll them"]
pub struct Close<'a, W: ?Sized> {
writer: &'a mut W,
}
impl<W: ?Sized + Unpin> Unpin for Close<'_, W> {}
impl<'a, W: AsyncWrite + ?Sized + Unpin> Close<'a, W> {
pub(super) fn new(writer: &'a mut W) -> Self {
Close { writer }
}
}
impl<W: AsyncWrite + ?Sized + Unpin> Future for Close<'_, W> {
type Output = io::Result<()>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Pin::new(&mut *self.writer).poll_close(cx)
}
}<|fim▁end|>
|
use std::pin::Pin;
/// Future for the [`close`](super::AsyncWriteExt::close) method.
#[derive(Debug)]
|
<|file_name|>syntax.py<|end_file_name|><|fim▁begin|>import re
import keyword
import logging
import builtins
from keypad.api import BufferController, autoconnect
from keypad.core.syntaxlib import SyntaxHighlighter, lazy
_python_kwlist = frozenset(keyword.kwlist) - frozenset('from import None False True'.split())
_python_builtins = frozenset(x for x in dir(builtins) if not isinstance(getattr(builtins, x), type))
_python_types = frozenset(x for x in dir(builtins) if isinstance(getattr(builtins, x), type))
@lazy
def pylexer():
from keypad.core.syntaxlib import keyword, regex, region
Keyword = keyword(_python_kwlist, dict(lexcat='keyword'))
Import = keyword('from import'.split(), dict(lexcat='keyword.modulesystem'))
Const = keyword(_python_builtins, dict(lexcat='identifier.constant'))
Type = keyword(_python_types, dict(lexcat='identifier.type'))
<|fim▁hole|> FUNCTION = dict(lexcat='identifier.function')
TODO = dict(lexcat='todo')
SIGIL = dict(lexcat='punctuation.sigil')
NUMBER = dict(lexcat='literal.numeric')
Todo = regex(r'\btodo:|\bfixme:|\bhack:', TODO, flags=re.IGNORECASE)
Comment = region(guard=regex('#'),
exit=regex('$'),
contains=[Todo],
attrs=COMMENT)
HEX = r'[a-fA-F0-9]'
Esc1 = regex(r'''\\[abfnrtv'"\\]''', ESCAPE)
Esc2 = regex(r'''\\\[0-7]{1,3}''', ESCAPE)
Esc3 = regex(r'''\\x[a-fA-F0-9]{2}''', ESCAPE)
Esc4 = regex(r'\\u' + HEX + r'{4}|\\U' + HEX + '{8}', ESCAPE)
Esc5 = regex(r'\\N\{[a-zA-Z]+(?:\s[a-zA-Z]+)*}', ESCAPE)
Esc6 = regex(r'\\$', ESCAPE)
DQDoctest = region(
guard=regex(r'^\s*>>>\s'),
exit=regex(r'$|(?=""")'),
contains=(),
attrs=ESCAPE
)
SQDoctest = region(
guard=regex(r'^\s*>>>\s'),
exit=regex(r"$|(?=''')"),
contains=(),
attrs=ESCAPE
)
Escs = [Esc1, Esc2, Esc3, Esc4, Esc5, Esc6]
DQString = region(
guard=regex(r'"(?!"")'),
exit=regex(r'"'),
contains=Escs,
attrs=STRING
)
SQString = region(
guard=regex(r"'(?!'')"),
exit=regex(r"'"),
contains=Escs,
attrs=STRING
)
TDQString = region(
guard=regex(r'"""'),
exit=regex(r'"""'),
contains=Escs + [DQDoctest],
attrs=STRING
)
TSQString = region(
guard=regex(r"'''"),
exit=regex(r"'''"),
contains=Escs + [SQDoctest],
attrs=STRING
)
def make_raw_string(quote):
return region(
guard=regex(r"r" + quote),
exit=regex(r"\\\\" + quote + "|" + r"(?<!\\)" + quote),
contains=[regex(r"(?<!\\)\\" + quote, ESCAPE)],
attrs=STRING
)
RSQString = make_raw_string("'")
RDQString = make_raw_string('"')
RTSQString = make_raw_string("'''")
RTDQString = make_raw_string('"""')
FloatLiteral = regex(r'\b\d*\.\d+', NUMBER)
IntLiteral = regex(r'\b\d+L?', NUMBER)
HexLiteral = regex(r'\b0x' + HEX + r'+L?', NUMBER)
OctLiteral = regex(r'\b0o[0-7]+L?', NUMBER)
BinLiteral = regex(r'\b0b[01]+L?', NUMBER)
FuncDef = regex(r'(?:(?<=\bdef)|(?<=\bclass)|(?<=@))\s+\w+', FUNCTION)
Deco = regex(r'(?<=@)\s*[\w.]+', FUNCTION)
CommAt = regex(re.escape('@'), SIGIL)
PythonLexers = [
Keyword,
Const,
Import,
DQString,
SQString,
TDQString,
TSQString,
RSQString,
RDQString,
IntLiteral,
HexLiteral,
OctLiteral,
BinLiteral,
FloatLiteral,
Comment,
FuncDef,
CommAt,
RTSQString,
RTDQString,
Deco,
Type
]
DQDoctest.contains = tuple(PythonLexers)
SQDoctest.contains = tuple(PythonLexers)
Python = region(
guard=None,
exit=None,
contains=PythonLexers
)
return Python
@autoconnect(BufferController.buffer_needs_highlight,
lambda tags: tags.get('syntax') == 'python')
def python_syntax_highlighting(controller):
highlighter = SyntaxHighlighter('keypad.plugins.pycomplete.syntax', pylexer(), dict(lexcat=None))
highlighter.highlight_buffer(controller.buffer)
def main():
from keypad.plugins.semantics.syntaxlib import Tokenizer
from keypad.core import AttributedString
from keypad.buffers import Buffer
buf = Buffer()
buf.insert((0,0), "'\\b")
highlighter = SyntaxHighlighter('h', pylexer(), dict(lexcat=None))
highlighter.highlight_buffer(buf)
print(buf.lines[0])
if __name__ == '__main__':
main()<|fim▁end|>
|
ESCAPE = dict(lexcat='literal.string.escape')
STRING = dict(lexcat='literal.string')
COMMENT = dict(lexcat='comment')
|
<|file_name|>bitstream_converter.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "media/filters/bitstream_converter.h"
#include "media/filters/ffmpeg_common.h"
namespace media {
FFmpegBitstreamConverter::FFmpegBitstreamConverter(
const std::string& filter_name,
AVCodecContext* stream_context)
: filter_name_(filter_name),
stream_filter_(NULL),
stream_context_(stream_context) {
CHECK(stream_context_);
}
FFmpegBitstreamConverter::~FFmpegBitstreamConverter() {
if (stream_filter_) {
av_bitstream_filter_close(stream_filter_);
stream_filter_ = NULL;
}
}
bool FFmpegBitstreamConverter::Initialize() {<|fim▁hole|>}
bool FFmpegBitstreamConverter::ConvertPacket(AVPacket* packet) {
CHECK(packet);
if (!stream_filter_) {
LOG(ERROR) << "Converter improperly initialized.";
return false;
}
uint8_t* converted_data = NULL;
int converted_size = 0;
if (av_bitstream_filter_filter(stream_filter_, stream_context_, NULL,
&converted_data, &converted_size,
packet->data, packet->size,
packet->flags & PKT_FLAG_KEY) < 0) {
return false;
}
// av_bitstream_filter_filter() does not always allocate a new packet.
// If a new packet was allocated, then we need to modify the
// |packet| to point to the new data, releasing its current payload
// if it has the authoritative reference.
//
// TODO(ajwong): We're relying on the implementation behavior of
// av_free_packet() and the meaning of the |destruct| field in
// AVPacket. Try to find a cleaner way to do this.
if (converted_data != packet->data) {
av_free_packet(packet);
packet->data = converted_data;
packet->size = converted_size;
packet->destruct = av_destruct_packet;
}
return true;
}
} // namespace media<|fim▁end|>
|
stream_filter_ = av_bitstream_filter_init(filter_name_.c_str());
return stream_filter_ != NULL;
|
<|file_name|>task.service.ts<|end_file_name|><|fim▁begin|>import {Task} from './task.model'
import {Injectable} from 'angular2/core'
import {Http, Response, Headers, RequestOptions} from 'angular2/http';
import {Observable} from "rxjs/Observable";
@Injectable()
export class TaskService {
constructor(
private http: Http
) { }
private _tasksUrl = 'api/tasks';
private defaultRequestOptions: RequestOptions = new RequestOptions({
headers: new Headers({ 'Content-Type': 'application/json' }),
});
getTasks(): Observable<Task[]> {
return this.http.get(this._tasksUrl)
.map(this.extractTasks)
.catch(this.handleError);
}
addTask(title: string): Observable<Task> {
let body = JSON.stringify({ title });
return this.http.post(this._tasksUrl, body, this.defaultRequestOptions)
.map(this.extractTask)
.catch(this.handleError);
}<|fim▁hole|> updateTask(task: Task): Observable<Task> {
let body = JSON.stringify({ task });
return this.http.patch(`api/tasks/${task.id}`, body, this.defaultRequestOptions)
.map(this.extractTask)
.catch(this.handleError)
}
deleteTask(task: Task): Observable<Response> {
return this.http.delete(`api/tasks/${task.id}`, this.defaultRequestOptions)
.map(res => res)
.catch(this.handleError)
}
private extractTasks(res: Response) {
if (res.status < 200 || res.status >= 300) {
throw new Error('Bad response status: ' + res.status);
}
let body = res.json();
return body.tasks || {};
}
private extractTask(res: Response) {
if (res.status < 200 || res.status >= 300) {
throw new Error('Bad response status: ' + res.status);
}
let body = res.json();
return body || {};
}
private handleError(error: any) {
let errMsg = error.message || 'Server error';
console.error(errMsg);
return Observable.throw(errMsg);
}
}<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from flask_login import LoginManager, UserMixin, login_user, logout_user, current_user, login_required
from werkzeug.security import generate_password_hash, check_password_hash
import ctf
class User(UserMixin, ctf.db.Model):
__tablename__ = 'users'
id = ctf.db.Column(ctf.db.Integer, primary_key=True)
username = ctf.db.Column(ctf.db.String(80), unique=True)
email = ctf.db.Column(ctf.db.String(80))
password_hash = ctf.db.Column(ctf.db.String(120))
school = ctf.db.Column(ctf.db.String(120))
score = ctf.db.Column(ctf.db.String(20))
solved = ctf.db.Column(ctf.db.String(400))
lastSubmit = ctf.db.Column(ctf.db.DateTime)
confirmed = ctf.db.Column(ctf.db.Boolean, nullable=False, default=False)
#timestamp=datetime.datetime.utcnow()
#def __init__(self, **kwargs):
# super(User, self).__init__(**kwargs)
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):<|fim▁hole|> return '<User %r>' % self.username
class Challenges(ctf.db.Model):
__tablename__ = 'challenges'
id = ctf.db.Column(ctf.db.Integer, primary_key=True)
name = ctf.db.Column(ctf.db.String(80), unique=True)
category = ctf.db.Column(ctf.db.String(80))
info = ctf.db.Column(ctf.db.String(800))
score = ctf.db.Column(ctf.db.String(20))
flag = ctf.db.Column(ctf.db.String(40))
def __repr__(self):
return '<Challenges %r>' % self.name<|fim▁end|>
|
return check_password_hash(self.password_hash, password)
def __repr__(self):
|
<|file_name|>ClassicEncryptedMediaDataSource.java<|end_file_name|><|fim▁begin|>package org.thoughtcrime.securesms.video;
import android.media.MediaDataSource;
import androidx.annotation.NonNull;
import androidx.annotation.RequiresApi;
import org.thoughtcrime.securesms.crypto.AttachmentSecret;
import org.thoughtcrime.securesms.crypto.ClassicDecryptingPartInputStream;
import org.thoughtcrime.securesms.util.Util;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
@RequiresApi(23)
final class ClassicEncryptedMediaDataSource extends MediaDataSource {
private final AttachmentSecret attachmentSecret;
private final File mediaFile;
private final long length;
ClassicEncryptedMediaDataSource(@NonNull AttachmentSecret attachmentSecret, @NonNull File mediaFile, long length) {
this.attachmentSecret = attachmentSecret;
this.mediaFile = mediaFile;
this.length = length;
}
@Override
public int readAt(long position, byte[] bytes, int offset, int length) throws IOException {
try (InputStream inputStream = ClassicDecryptingPartInputStream.createFor(attachmentSecret, mediaFile)) {
byte[] buffer = new byte[4096];
long headerRemaining = position;
while (headerRemaining > 0) {
int read = inputStream.read(buffer, 0, Util.toIntExact(Math.min((long)buffer.length, headerRemaining)));
if (read == -1) return -1;
headerRemaining -= read;
}
return inputStream.read(bytes, offset, length);
}
}
@Override<|fim▁hole|>
@Override
public void close() {}
}<|fim▁end|>
|
public long getSize() {
return length;
}
|
<|file_name|>nfc_lib.py<|end_file_name|><|fim▁begin|>import random
def append_letter_or_number():
alphabet = ['a','b','c','d','e','f']
use_number = 0
use_letter = 1<|fim▁hole|>
letter_or_string = random.randrange(2)
if letter_or_string == use_number:
result = str(random.randrange(0,9))
elif letter_or_string == use_letter:
next_character = random.randrange(len(alphabet))
result = str(alphabet[next_character])
else:
print("Uh-oh! You've got a bug. This should have selected number or letter.")
return -1
return result
# generates a random 16-byte NFC ID tag when a NFC is unavailable
def create_nfc_tag():
random_nfc_tag = ''
tag_size = 7 # number of hex pairs
end_of_tag = tag_size - 1
current_byte = 0
byte_half = 0
while current_byte < tag_size:
while byte_half != 2:
random_nfc_tag += append_letter_or_number()
byte_half += 1
if current_byte != end_of_tag:
random_nfc_tag += ':'
current_byte += 1
byte_half = 0
return random_nfc_tag<|fim▁end|>
| |
<|file_name|>MockAppModule.java<|end_file_name|><|fim▁begin|>package com.fomdeveloper.planket.injection;
import android.app.Application;
import android.content.Context;
import android.net.ConnectivityManager;
import com.fomdeveloper.planket.BuildConfig;
import com.fomdeveloper.planket.NetworkManager;
import com.fomdeveloper.planket.bus.RxEventBus;
import com.fomdeveloper.planket.data.PlanketDatabase;
import com.fomdeveloper.planket.data.PaginatedDataManager;
import com.fomdeveloper.planket.data.api.FlickrOauthService;
import com.fomdeveloper.planket.data.api.FlickrService;
import com.fomdeveloper.planket.data.api.oauth.OAuthManager;
import com.fomdeveloper.planket.data.api.oauth.OAuthManagerImpl;
import com.fomdeveloper.planket.data.api.oauth.OAuthToken;
import com.fomdeveloper.planket.data.prefs.PlanketBoxPreferences;
import com.fomdeveloper.planket.data.prefs.UserHelper;
import com.fomdeveloper.planket.data.repository.FlickrRepository;
import com.fomdeveloper.planket.ui.presentation.base.oauth.OauthPresenter;
import com.fomdeveloper.planket.ui.presentation.ego.EgoPresenter;
import com.fomdeveloper.planket.ui.presentation.main.MainPresenter;
import com.fomdeveloper.planket.ui.presentation.photodetail.PhotoDetailPresenter;
import com.fomdeveloper.planket.ui.presentation.profile.ProfilePresenter;
import com.fomdeveloper.planket.ui.presentation.searchphotos.SearchPresenter;
import com.google.gson.Gson;
import com.squareup.picasso.Picasso;
import org.mockito.Mockito;
import java.io.IOException;
import javax.inject.Named;
import javax.inject.Singleton;
import dagger.Module;
import dagger.Provides;
import okhttp3.HttpUrl;
import okhttp3.Interceptor;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.logging.HttpLoggingInterceptor;
import retrofit2.Retrofit;
import retrofit2.adapter.rxjava.RxJavaCallAdapterFactory;
import retrofit2.converter.gson.GsonConverterFactory;
import rx.Scheduler;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
import se.akerfeldt.okhttp.signpost.OkHttpOAuthConsumer;
import se.akerfeldt.okhttp.signpost.SigningInterceptor;
/**
* Created by Fernando on 24/12/2016.
*/
@Module
public class MockAppModule {
/************* MOCKS *************/
@Provides @Singleton
public UserHelper provideUserHelper(){
return Mockito.mock(PlanketBoxPreferences.class);
}
@Provides @Singleton
public FlickrRepository provideFlickrRepository(){
return Mockito.mock(FlickrRepository.class);
}
@Provides @Singleton
public NetworkManager provideNetworkManager(){
return Mockito.mock(NetworkManager.class);
}
/**************************/
private Application application;
public MockAppModule(Application application) {
this.application = application;
}
@Provides @Singleton
public Context provideContext(){
return this.application;
}
@Provides @Singleton
public Gson provideGson(){
return new Gson();
}<|fim▁hole|> @Provides @Singleton
public ConnectivityManager provideConnectivityManager(){
return (ConnectivityManager) application.getSystemService(Context.CONNECTIVITY_SERVICE);
}
@Provides @Singleton
public PlanketBoxPreferences providePlanketPreferences(Context context, Gson gson){
return new PlanketBoxPreferences(context,gson);
}
@Provides @Singleton
public PlanketDatabase providePlanketDatabase(Context context){
return new PlanketDatabase(context);
}
@Provides @Singleton @Named("main_thread")
public Scheduler provideMainScheduler(){
return AndroidSchedulers.mainThread();
}
@Provides @Singleton @Named("io_thread")
public Scheduler provideIOScheduler(){
return Schedulers.io();
}
@Provides @Singleton
public RxEventBus provideRxBus(){
return new RxEventBus();
}
@Provides @Singleton
public Picasso providePicasso(Context context){
return Picasso.with(context);
}
@Provides @Named("non_oauth") @Singleton
public OkHttpClient provideOkHttpClient(OkHttpOAuthConsumer okHttpOAuthConsumer, PlanketBoxPreferences planketBoxPreferences){
HttpLoggingInterceptor loggingInterceptor = new HttpLoggingInterceptor();
loggingInterceptor.setLevel( BuildConfig.DEBUG? HttpLoggingInterceptor.Level.BODY : HttpLoggingInterceptor.Level.NONE);
Interceptor paramInterceptor = new Interceptor() {
@Override
public Response intercept(Chain chain) throws IOException {
Request request = chain.request();
HttpUrl url = request.url().newBuilder()
.addQueryParameter(FlickrService.PARAM_API_KEY, BuildConfig.FLICKR_API_KEY)
.addQueryParameter(FlickrService.PARAM_FORMAT,"json")
.addQueryParameter(FlickrService.PARAM_JSONCALLBACK,"1")
.build();
request = request.newBuilder().url(url).build();
return chain.proceed(request);
}
};
OkHttpClient.Builder okHttpClientBuilder = new OkHttpClient.Builder()
.addInterceptor(paramInterceptor)
.addInterceptor(loggingInterceptor)
.addInterceptor(new SigningInterceptor(okHttpOAuthConsumer));
if (planketBoxPreferences.getAccessToken()!=null){
OAuthToken oAuthToken = planketBoxPreferences.getAccessToken();
okHttpOAuthConsumer.setTokenWithSecret(oAuthToken.getToken(),oAuthToken.getTokenSecret());
}
return okHttpClientBuilder.build();
}
@Provides @Singleton
public OkHttpOAuthConsumer provideOkHttpOAuthConsumer(){
return new OkHttpOAuthConsumer(BuildConfig.FLICKR_API_KEY, BuildConfig.FLICKR_CONSUMER_SECRET);
}
@Provides @Named("oauth") @Singleton
public OkHttpClient provideOauthOkHttpClient(OkHttpOAuthConsumer okHttpOAuthConsumer){
HttpLoggingInterceptor loggingInterceptor = new HttpLoggingInterceptor();
loggingInterceptor.setLevel( BuildConfig.DEBUG? HttpLoggingInterceptor.Level.BODY : HttpLoggingInterceptor.Level.NONE);
return new OkHttpClient.Builder()
.addInterceptor(loggingInterceptor)
.addInterceptor(new SigningInterceptor(okHttpOAuthConsumer))
.build();
}
@Provides @Named("non_oauth") @Singleton
public Retrofit provideRetrofit(@Named("non_oauth") OkHttpClient okHttpClient){
return new Retrofit.Builder()
.baseUrl( FlickrService.ENDPOINT )
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.client(okHttpClient)
.build();
}
@Provides @Named("oauth") @Singleton
public Retrofit provideOauthRetrofit(@Named("oauth") OkHttpClient okHttpClient){
return new Retrofit.Builder()
.baseUrl( FlickrOauthService.ENDPOINT )
.addCallAdapterFactory(RxJavaCallAdapterFactory.create())
.client(okHttpClient)
.build();
}
@Provides @Singleton
public FlickrService provideFlickrService(@Named("non_oauth") Retrofit retrofit){
return retrofit.create(FlickrService.class);
}
@Provides @Singleton
public FlickrOauthService provideFlickrOauthService(@Named("oauth") Retrofit retrofit){
return retrofit.create(FlickrOauthService.class);
}
@Provides @Singleton
public OAuthManager provideOAuthManager(FlickrOauthService flickrOauthService,OkHttpOAuthConsumer okHttpOAuthConsumer, PlanketBoxPreferences planketBoxPreferences, Context context){
return new OAuthManagerImpl(flickrOauthService,okHttpOAuthConsumer, planketBoxPreferences, context);
}
@Provides
public PaginatedDataManager providePaginatedManager(){
return new PaginatedDataManager();
}
@Provides
public MainPresenter provideMainPresenter(FlickrRepository flickrRepository, PlanketBoxPreferences planketBoxPreferences, RxEventBus rxEventBus, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new MainPresenter(flickrRepository, planketBoxPreferences, rxEventBus, mainScheduler, ioScheduler);
}
@Provides
public OauthPresenter provideFlickrLoginPresenter(OAuthManager oAuthManager, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new OauthPresenter(oAuthManager, mainScheduler, ioScheduler);
}
@Provides
public SearchPresenter provideSearchPresenter(FlickrRepository flickrRepository, PaginatedDataManager paginatedDataManager, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new SearchPresenter(flickrRepository, paginatedDataManager, mainScheduler, ioScheduler);
}
@Provides
public PhotoDetailPresenter providePhotoDetailPresenter(FlickrRepository flickrRepository, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new PhotoDetailPresenter(flickrRepository, mainScheduler, ioScheduler);
}
@Provides
public EgoPresenter provideEgoPresenter(FlickrRepository flickrRepository, PaginatedDataManager paginatedDataManager, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new EgoPresenter(flickrRepository, paginatedDataManager, mainScheduler, ioScheduler);
}
@Provides
public ProfilePresenter provideProfilePresenter(FlickrRepository flickrRepository, @Named("main_thread") Scheduler mainScheduler, @Named("io_thread") Scheduler ioScheduler){
return new ProfilePresenter(flickrRepository, mainScheduler, ioScheduler);
}
}<|fim▁end|>
| |
<|file_name|>project.ts<|end_file_name|><|fim▁begin|>import * as fs from 'fs';
import { prerelease } from 'semver';
import { packages } from '../../../../lib/packages';
import { getGlobalVariable } from './env';
import { prependToFile, readFile, replaceInFile, writeFile } from './fs';
import { gitCommit } from './git';
import { execAndWaitForOutputToMatch, git, ng, npm, silentNpm } from './process';
const tsConfigPath = 'tsconfig.json';
export function updateJsonFile(filePath: string, fn: (json: any) => any | void) {
return readFile(filePath)
.then(tsConfigJson => {
const tsConfig = JSON.parse(tsConfigJson);
const result = fn(tsConfig) || tsConfig;
return writeFile(filePath, JSON.stringify(result, null, 2));
});
}
export function updateTsConfig(fn: (json: any) => any | void) {
return updateJsonFile(tsConfigPath, fn);
}
export function ngServe(...args: string[]) {
return execAndWaitForOutputToMatch('ng',
['serve', ...args],
/: Compiled successfully./);
}
export async function createProject(name: string, ...args: string[]) {
const extraArgs = [];
process.chdir(getGlobalVariable('tmp-root'));
await ng('new', name, '--skip-install', ...extraArgs, ...args);
process.chdir(name);
if (fs.existsSync('tsconfig.json')) {
// Disable the TS version check to make TS updates easier.
// Only VE does it, but on Ivy the i18n extraction uses VE.
await updateJsonFile('tsconfig.json', config => {
config.angularCompilerOptions.disableTypeScriptVersionCheck = true;
});
}
await prepareProjectForE2e(name);
}
export async function prepareProjectForE2e(name) {
const argv: string[] = getGlobalVariable('argv');
await git(
'config',
'user.email',
'[email protected]',
);
await git(
'config',
'user.name',
'Angular CLI E2e',
);
await git(
'config',
'commit.gpgSign',
'false',
);
await useCIChrome(
'e2e',
);
await useCIChrome(
'',
);
// legacy projects
await useCIChrome(
'src',
);
if (argv['ng-snapshots'] || argv['ng-tag']) {
await useSha();
}
await writeFile('.npmrc', 'registry=http://localhost:4873');
console.log(
`Project ${name} created... Installing npm.`,
);
await silentNpm(
'install',
);
await useCIDefaults(
name,
);
// Force sourcemaps to be from the root of the filesystem.
await updateJsonFile(
'tsconfig.json',
json => {
json[
'compilerOptions'<|fim▁hole|> ] =
'/';
},
);
await gitCommit(
'prepare-project-for-e2e',
);
}
export function useBuiltPackages() {
return Promise.resolve()
.then(() => updateJsonFile('package.json', json => {
if (!json['dependencies']) {
json['dependencies'] = {};
}
if (!json['devDependencies']) {
json['devDependencies'] = {};
}
for (const packageName of Object.keys(packages)) {
if (json['dependencies'].hasOwnProperty(packageName)
) {
json['dependencies'][packageName] = packages[packageName].tar;
} else if (json['devDependencies'].hasOwnProperty(packageName)) {
json['devDependencies'][packageName] = packages[packageName].tar;
}
}
}));
}
export function useSha() {
const argv = getGlobalVariable('argv');
if (argv['ng-snapshots'] || argv['ng-tag']) {
// We need more than the sha here, version is also needed. Examples of latest tags:
// 7.0.0-beta.4+dd2a650
// 6.1.6+4a8d56a
const label = argv['ng-tag'] ? argv['ng-tag'] : '';
const ngSnapshotVersions = require('../ng-snapshot/package.json');
return updateJsonFile('package.json', json => {
// Install over the project with snapshot builds.
function replaceDependencies(key: string) {
const missingSnapshots = [];
Object.keys(json[key] || {})
.filter(name => name.match(/^@angular\//))
.forEach(name => {
const pkgName = name.split(/\//)[1];
if (pkgName == 'cli') {
return;
}
if (label) {
json[key][`@angular/${pkgName}`]
= `github:angular/${pkgName}-builds${label}`;
} else {
const replacement = ngSnapshotVersions.dependencies[`@angular/${pkgName}`];
if (!replacement) {
missingSnapshots.push(`missing @angular/${pkgName}`);
}
json[key][`@angular/${pkgName}`] = replacement;
}
});
if (missingSnapshots.length > 0) {
throw new Error('e2e test with --ng-snapshots requires all angular packages be ' +
'listed in tests/legacy-cli/e2e/ng-snapshot/package.json.\nErrors:\n' + missingSnapshots.join('\n '));
}
}
try {
replaceDependencies('dependencies');
replaceDependencies('devDependencies');
} catch (e) {
return Promise.reject(e);
}
});
} else {
return Promise.resolve();
}
}
export function useNgVersion(version: string) {
return updateJsonFile('package.json', json => {
// Install over the project with specific versions.
Object.keys(json['dependencies'] || {})
.filter(name => name.match(/^@angular\//))
.forEach(name => {
const pkgName = name.split(/\//)[1];
if (pkgName == 'cli') {
return;
}
json['dependencies'][`@angular/${pkgName}`] = version;
});
Object.keys(json['devDependencies'] || {})
.filter(name => name.match(/^@angular\//))
.forEach(name => {
const pkgName = name.split(/\//)[1];
if (pkgName == 'cli') {
return;
}
json['devDependencies'][`@angular/${pkgName}`] = version;
});
// Set the correct peer dependencies for @angular/core and @angular/compiler-cli.
// This list should be kept up to date with each major release.
if (version.startsWith('^5')) {
json['devDependencies']['typescript'] = '>=2.4.2 <2.5';
json['dependencies']['rxjs'] = '^5.5.0';
json['dependencies']['zone.js'] = '~0.8.4';
} else if (version.startsWith('^6')) {
json['devDependencies']['typescript'] = '>=2.7.2 <2.8';
json['dependencies']['rxjs'] = '^6.0.0';
json['dependencies']['zone.js'] = '~0.8.26';
} else if (version.startsWith('^7')) {
json['devDependencies']['typescript'] = '>=3.1.1 <3.2';
json['dependencies']['rxjs'] = '^6.0.0';
json['dependencies']['zone.js'] = '~0.8.26';
}
});
}
export function useCIDefaults(projectName = 'test-project') {
return updateJsonFile('angular.json', workspaceJson => {
// Disable progress reporting on CI to reduce spam.
const project = workspaceJson.projects[projectName];
const appTargets = project.targets || project.architect;
appTargets.build.options.progress = false;
appTargets.test.options.progress = false;
// Use the CI chrome setup in karma.
appTargets.test.options.browsers = 'ChromeHeadlessCI';
// Disable auto-updating webdriver in e2e.
if (appTargets.e2e) {
appTargets.e2e.options.webdriverUpdate = false;
}
// legacy project structure
const e2eProject = workspaceJson.projects[projectName + '-e2e'];
if (e2eProject) {
const e2eTargets = e2eProject.targets || e2eProject.architect;
e2eTargets.e2e.options.webdriverUpdate = false;
}
})
.then(() => updateJsonFile('package.json', json => {
// Use matching versions of Chromium and ChromeDriver.
// https://github.com/GoogleChrome/puppeteer/releases
// http://chromedriver.chromium.org/downloads
json['scripts']['webdriver-update'] = 'webdriver-manager update' +
` --standalone false --gecko false --versions.chrome 79.0.3945.16`; // Supports Chrome 79
}))
.then(() => npm('run', 'webdriver-update'));
}
export function useCIChrome(projectDir: string) {
const dir = projectDir ? projectDir + '/' : '';
const protractorConf = `${dir}protractor.conf.js`;
const karmaConf = `${dir}karma.conf.js`;
return Promise.resolve()
.then(() => updateJsonFile('package.json', json => {
// Use matching versions of Chromium (via puppeteer) and ChromeDriver.
// https://github.com/GoogleChrome/puppeteer/releases
// http://chromedriver.chromium.org/downloads
json['devDependencies']['puppeteer'] = '2.0.0'; // Chromium 79.0.3942.0 (r706915)
json['devDependencies']['karma-chrome-launcher'] = '~2.2.0'; // Minimum for ChromeHeadless.
}))
// Use Pupeteer in protractor if a config is found on the project.
.then(() => {
if (fs.existsSync(protractorConf)) {
return replaceInFile(protractorConf,
`browserName: 'chrome'`,
`browserName: 'chrome',
chromeOptions: {
args: ['--headless'],
binary: require('puppeteer').executablePath()
}
`);
}
})
// Use Pupeteer in karma if a config is found on the project.
.then(() => {
if (fs.existsSync(karmaConf)) {
return prependToFile(karmaConf,
`process.env.CHROME_BIN = require('puppeteer').executablePath();`)
.then(() => replaceInFile(karmaConf,
`browsers: ['Chrome']`,
`browsers: ['Chrome'],
customLaunchers: {
ChromeHeadlessCI: {
base: 'ChromeHeadless',
}
}
`));
}
});
}
export async function isPrereleaseCli() {
const angularCliPkgJson = JSON.parse(await readFile('node_modules/@angular/cli/package.json'));
const pre = prerelease(angularCliPkgJson.version);
return pre && pre.length > 0;
}<|fim▁end|>
|
][
'sourceRoot'
|
<|file_name|>stack_overflow.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rt::util::report_overflow;
use core::prelude::*;
use ptr;
use mem;
use libc;
use libc::types::os::arch::extra::{LPVOID, DWORD, LONG, BOOL};
use sys_common::stack;
pub struct Handler {
_data: *mut libc::c_void
}
impl Handler {
pub unsafe fn new() -> Handler {
make_handler()
}
}
impl Drop for Handler {
fn drop(&mut self) {}
}
// This is initialized in init() and only read from after
static mut PAGE_SIZE: usize = 0;
#[no_stack_check]
extern "system" fn vectored_handler(ExceptionInfo: *mut EXCEPTION_POINTERS) -> LONG {
unsafe {
let rec = &(*(*ExceptionInfo).ExceptionRecord);
let code = rec.ExceptionCode;
if code != EXCEPTION_STACK_OVERFLOW {
return EXCEPTION_CONTINUE_SEARCH;
}
// We're calling into functions with stack checks,
// however stack checks by limit should be disabled on Windows
stack::record_sp_limit(0);
report_overflow();
EXCEPTION_CONTINUE_SEARCH
}
}
pub unsafe fn init() {
let mut info = mem::zeroed();
libc::GetSystemInfo(&mut info);
PAGE_SIZE = info.dwPageSize as usize;
if AddVectoredExceptionHandler(0, vectored_handler) == ptr::null_mut() {
panic!("failed to install exception handler");
}
mem::forget(make_handler());
}
pub unsafe fn cleanup() {
}
pub unsafe fn make_handler() -> Handler {
if SetThreadStackGuarantee(&mut 0x5000) == 0 {
panic!("failed to reserve stack space for exception handling");
}
Handler { _data: 0 as *mut libc::c_void }
}
pub struct EXCEPTION_RECORD {
pub ExceptionCode: DWORD,
pub ExceptionFlags: DWORD,
pub ExceptionRecord: *mut EXCEPTION_RECORD,
pub ExceptionAddress: LPVOID,
pub NumberParameters: DWORD,
pub ExceptionInformation: [LPVOID; EXCEPTION_MAXIMUM_PARAMETERS]
}<|fim▁hole|> pub ExceptionRecord: *mut EXCEPTION_RECORD,
pub ContextRecord: LPVOID
}
pub type PVECTORED_EXCEPTION_HANDLER = extern "system"
fn(ExceptionInfo: *mut EXCEPTION_POINTERS) -> LONG;
pub type ULONG = libc::c_ulong;
const EXCEPTION_CONTINUE_SEARCH: LONG = 0;
const EXCEPTION_MAXIMUM_PARAMETERS: usize = 15;
const EXCEPTION_STACK_OVERFLOW: DWORD = 0xc00000fd;
extern "system" {
fn AddVectoredExceptionHandler(FirstHandler: ULONG,
VectoredHandler: PVECTORED_EXCEPTION_HANDLER)
-> LPVOID;
fn SetThreadStackGuarantee(StackSizeInBytes: *mut ULONG) -> BOOL;
}<|fim▁end|>
|
pub struct EXCEPTION_POINTERS {
|
<|file_name|>ipcClient.go<|end_file_name|><|fim▁begin|>/*******************************************************************
* Copyright(c) 2000-2015 rjcb99
* All rights reserved.
*
* 文件名称: ipcClient.go
* 简要描述: 一个简单的通过ipcServer通讯的DEMO
*
* 创建日期: 2015-11-27
* 作者: ChenBo
* 说明:
*
* 修改日期: 2015-11-28
* 作者: ChenBo
* 说明:
******************************************************************/
package main
import (
"flag"
"fmt"
"net"
"time"
"utils"
)
var SysLog *utils.MyLog
var Mq *utils.MsgBox
func main() {
id := flag.Int("id", 1, "client_id")
ip := flag.String("ip", GetLocalIp(), "ip addr")
port := flag.Int("port", 8384, "port")
flag.Parse()
if !Init_SysLog() {<|fim▁hole|> return
}
//初始化全局Mq
if Init_Mq(*id, *ip, *port) == false {
SysLog.PutLineAsLog(fmt.Sprintf("error Init_Mq(%d,%s,%d) : in NN ", *id, *ip, *port))
return
}
for i := 1; i < 3600000; i++ {
Mq.SendMsg(*id, "北国风光,千里冰封,万里雪飘。望长城内外,惟余莽莽;大河上下,顿失滔滔。山舞银蛇,原驰蜡象,欲与天公试比高。须晴日,看红装素裹,分外妖娆。江山如此多娇,引无数英雄竞折腰。惜秦皇汉武,略输文采;唐宗宋祖,稍逊风骚。一代天骄,成吉思汗,只识弯弓射大雕。俱往矣,数风流人物,还看今朝。")
time.Sleep(time.Millisecond * 1)
}
SysLog.PutLineAsLog("ipcClient Exit!")
}
//初始化进程日志
func Init_SysLog() bool {
if SysLog == nil {
SysLog = utils.MakeNewMyLog("ipcClientLog", "ipcClient.log", 10000000, 5)
}
if SysLog == nil {
return false
} else {
return true
}
}
//获取本地ip
func GetLocalIp() string {
addrs, err := net.InterfaceAddrs()
if err == nil {
for _, a := range addrs {
if ipnet, ok := a.(*net.IPNet); ok && !ipnet.IP.IsLoopback() {
if ipnet.IP.To4() != nil {
return ipnet.IP.String() //返回第一个
}
}
}
}
return "127.0.0.1"
}
//初始化mq
func Init_Mq(id int, ip string, port int) bool {
Mq = utils.MakeNewMsgBox(id, ip, port)
return Mq.MakeConn()
}<|fim▁end|>
|
println("Init_SysLog() False!")
|
<|file_name|>bypass-security.component.ts<|end_file_name|><|fim▁begin|>// #docplaster
// #docregion
import { Component } from '@angular/core';
import { DomSanitizer, SafeResourceUrl, SafeUrl } from '@angular/platform-browser';<|fim▁hole|> templateUrl: './bypass-security.component.html',
})
export class BypassSecurityComponent {
dangerousUrl: string;
trustedUrl: SafeUrl;
dangerousVideoUrl!: string;
videoUrl!: SafeResourceUrl;
// #docregion trust-url
constructor(private sanitizer: DomSanitizer) {
// javascript: URLs are dangerous if attacker controlled.
// Angular sanitizes them in data binding, but you can
// explicitly tell Angular to trust this value:
this.dangerousUrl = 'javascript:alert("Hi there")';
this.trustedUrl = sanitizer.bypassSecurityTrustUrl(this.dangerousUrl);
// #enddocregion trust-url
this.updateVideoUrl('PUBnlbjZFAI');
}
// #docregion trust-video-url
updateVideoUrl(id: string) {
// Appending an ID to a YouTube URL is safe.
// Always make sure to construct SafeValue objects as
// close as possible to the input data so
// that it's easier to check if the value is safe.
this.dangerousVideoUrl = 'https://www.youtube.com/embed/' + id;
this.videoUrl =
this.sanitizer.bypassSecurityTrustResourceUrl(this.dangerousVideoUrl);
}
// #enddocregion trust-video-url
}<|fim▁end|>
|
@Component({
selector: 'app-bypass-security',
|
<|file_name|>restyle_damage.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The restyle damage is a hint that tells layout which kind of operations may
//! be needed in presence of incremental style changes.
#![deny(missing_docs)]
use computed_values::display;
use heapsize::HeapSizeOf;
use matching::{StyleChange, StyleDifference};
use properties::ComputedValues;
use std::fmt;
bitflags! {
#[doc = "Individual layout actions that may be necessary after restyling."]
pub flags ServoRestyleDamage: u8 {
#[doc = "Repaint the node itself."]
#[doc = "Currently unused; need to decide how this propagates."]
const REPAINT = 0x01,
#[doc = "The stacking-context-relative position of this node or its descendants has \
changed."]
#[doc = "Propagates both up and down the flow tree."]
const REPOSITION = 0x02,
#[doc = "Recompute the overflow regions (bounding box of object and all descendants)."]
#[doc = "Propagates down the flow tree because the computation is bottom-up."]
const STORE_OVERFLOW = 0x04,
#[doc = "Recompute intrinsic inline_sizes (minimum and preferred)."]
#[doc = "Propagates down the flow tree because the computation is"]
#[doc = "bottom-up."]
const BUBBLE_ISIZES = 0x08,
#[doc = "Recompute actual inline-sizes and block-sizes, only taking out-of-flow children \
into account. \
Propagates up the flow tree because the computation is top-down."]
const REFLOW_OUT_OF_FLOW = 0x10,
#[doc = "Recompute actual inline_sizes and block_sizes."]
#[doc = "Propagates up the flow tree because the computation is"]
#[doc = "top-down."]
const REFLOW = 0x20,
#[doc = "Re-resolve generated content. \
Propagates up the flow tree because the computation is inorder."]
const RESOLVE_GENERATED_CONTENT = 0x40,
#[doc = "The entire flow needs to be reconstructed."]
const RECONSTRUCT_FLOW = 0x80
}
}
impl HeapSizeOf for ServoRestyleDamage {
fn heap_size_of_children(&self) -> usize { 0 }
}
impl ServoRestyleDamage {
/// Compute the `StyleDifference` (including the appropriate restyle damage)
/// for a given style change between `old` and `new`.
pub fn compute_style_difference(
old: &ComputedValues,
new: &ComputedValues,
) -> StyleDifference {
let damage = compute_damage(old, new);
let change = if damage.is_empty() {
StyleChange::Unchanged
} else {
// FIXME(emilio): Differentiate between reset and inherited
// properties here, and set `reset_only` appropriately so the
// optimization to skip the cascade in those cases applies.
StyleChange::Changed { reset_only: false }
};
StyleDifference::new(damage, change)
}
/// Returns a bitmask that represents a flow that needs to be rebuilt and
/// reflowed.
///
/// FIXME(bholley): Do we ever actually need this? Shouldn't
/// RECONSTRUCT_FLOW imply everything else?
pub fn rebuild_and_reflow() -> ServoRestyleDamage {
REPAINT | REPOSITION | STORE_OVERFLOW | BUBBLE_ISIZES | REFLOW_OUT_OF_FLOW | REFLOW |
RECONSTRUCT_FLOW
}
/// Returns a bitmask indicating that the frame needs to be reconstructed.
pub fn reconstruct() -> ServoRestyleDamage {
RECONSTRUCT_FLOW
}
/// Supposing a flow has the given `position` property and this damage,
/// returns the damage that we should add to the *parent* of this flow.
pub fn damage_for_parent(self, child_is_absolutely_positioned: bool) -> ServoRestyleDamage {
if child_is_absolutely_positioned {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
} else {
self & (REPAINT | REPOSITION | STORE_OVERFLOW | REFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
}
}
/// Supposing the *parent* of a flow with the given `position` property has
/// this damage, returns the damage that we should add to this flow.
pub fn damage_for_child(self,
parent_is_absolutely_positioned: bool,
child_is_absolutely_positioned: bool)
-> ServoRestyleDamage {
match (parent_is_absolutely_positioned, child_is_absolutely_positioned) {
(false, true) => {
// Absolute children are out-of-flow and therefore insulated from changes.
//
// FIXME(pcwalton): Au contraire, if the containing block dimensions change!<|fim▁hole|> // its kids.
if self.contains(REFLOW_OUT_OF_FLOW) {
self | REFLOW
} else {
self
}
}
_ => {
// TODO(pcwalton): Take floatedness into account.
self & (REPAINT | REPOSITION | REFLOW)
}
}
}
}
impl Default for ServoRestyleDamage {
fn default() -> Self {
Self::empty()
}
}
impl fmt::Display for ServoRestyleDamage {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut first_elem = true;
let to_iter =
[ (REPAINT, "Repaint")
, (REPOSITION, "Reposition")
, (STORE_OVERFLOW, "StoreOverflow")
, (BUBBLE_ISIZES, "BubbleISizes")
, (REFLOW_OUT_OF_FLOW, "ReflowOutOfFlow")
, (REFLOW, "Reflow")
, (RESOLVE_GENERATED_CONTENT, "ResolveGeneratedContent")
, (RECONSTRUCT_FLOW, "ReconstructFlow")
];
for &(damage, damage_str) in &to_iter {
if self.contains(damage) {
if !first_elem { write!(f, " | ")?; }
write!(f, "{}", damage_str)?;
first_elem = false;
}
}
if first_elem {
write!(f, "NoDamage")?;
}
Ok(())
}
}
// NB: We need the braces inside the RHS due to Rust #8012. This particular
// version of this macro might be safe anyway, but we want to avoid silent
// breakage on modifications.
macro_rules! add_if_not_equal(
($old:ident, $new:ident, $damage:ident,
[ $($effect:ident),* ], [ $($style_struct_getter:ident.$name:ident),* ]) => ({
if $( ($old.$style_struct_getter().$name != $new.$style_struct_getter().$name) )||* {
$damage.insert($($effect)|*);
true
} else {
false
}
})
);
fn compute_damage(old: &ComputedValues, new: &ComputedValues) -> ServoRestyleDamage {
let mut damage = ServoRestyleDamage::empty();
// This should check every CSS property, as enumerated in the fields of
// http://doc.servo.org/style/properties/struct.ComputedValues.html
// FIXME: Test somehow that every property is included.
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW,
REFLOW, RECONSTRUCT_FLOW], [
get_box.clear, get_box.float, get_box.display, get_box.position, get_counters.content,
get_counters.counter_reset, get_counters.counter_increment,
get_inheritedbox._servo_under_display_none,
get_list.quotes, get_list.list_style_type,
// If these text or font properties change, we need to reconstruct the flow so that
// text shaping is re-run.
get_inheritedtext.letter_spacing, get_inheritedtext.text_rendering,
get_inheritedtext.text_transform, get_inheritedtext.word_spacing,
get_inheritedtext.overflow_wrap, get_inheritedtext.text_justify,
get_inheritedtext.white_space, get_inheritedtext.word_break, get_text.text_overflow,
get_font.font_family, get_font.font_style, get_font.font_variant_caps, get_font.font_weight,
get_font.font_size, get_font.font_stretch,
get_inheritedbox.direction, get_inheritedbox.writing_mode,
get_text.text_decoration_line, get_text.unicode_bidi,
get_inheritedtable.empty_cells, get_inheritedtable.caption_side,
get_column.column_width, get_column.column_count
]) || (new.get_box().display == display::T::inline &&
add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW, RECONSTRUCT_FLOW], [
// For inline boxes only, border/padding styles are used in flow construction (to decide
// whether to create fragments for empty flows).
get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left
])) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW, REFLOW],
[get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_margin.margin_top, get_margin.margin_right,
get_margin.margin_bottom, get_margin.margin_left,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left,
get_position.width, get_position.height,
get_inheritedtext.line_height,
get_inheritedtext.text_align, get_inheritedtext.text_indent,
get_table.table_layout,
get_inheritedtable.border_collapse,
get_inheritedtable.border_spacing,
get_column.column_gap,
get_position.flex_direction,
get_position.flex_wrap,
get_position.justify_content,
get_position.align_items,
get_position.align_content,
get_position.order,
get_position.flex_basis,
get_position.flex_grow,
get_position.flex_shrink,
get_position.align_self
]) || add_if_not_equal!(old, new, damage,
[REPAINT, REPOSITION, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW], [
get_position.top, get_position.left,
get_position.right, get_position.bottom,
get_effects.opacity,
get_box.transform, get_box.transform_style, get_box.transform_origin,
get_box.perspective, get_box.perspective_origin
]) || add_if_not_equal!(old, new, damage,
[REPAINT], [
get_color.color, get_background.background_color,
get_background.background_image, get_background.background_position_x,
get_background.background_position_y, get_background.background_repeat,
get_background.background_attachment, get_background.background_clip,
get_background.background_origin, get_background.background_size,
get_border.border_top_color, get_border.border_right_color,
get_border.border_bottom_color, get_border.border_left_color,
get_border.border_top_style, get_border.border_right_style,
get_border.border_bottom_style, get_border.border_left_style,
get_border.border_top_left_radius, get_border.border_top_right_radius,
get_border.border_bottom_left_radius, get_border.border_bottom_right_radius,
get_position.z_index, get_box._servo_overflow_clip_box,
get_inheritedtext._servo_text_decorations_in_effect,
get_pointing.cursor, get_pointing.pointer_events,
get_effects.box_shadow, get_effects.clip, get_inheritedtext.text_shadow, get_effects.filter,
get_effects.mix_blend_mode, get_inheritedbox.image_rendering,
// Note: May require REFLOW et al. if `visibility: collapse` is implemented.
get_inheritedbox.visibility
]);
// Paint worklets may depend on custom properties,
// so if they have changed we should repaint.
if old.get_custom_properties() != new.get_custom_properties() {
damage.insert(REPAINT);
}
// If the layer requirements of this flow have changed due to the value
// of the transform, then reflow is required to rebuild the layers.
if old.transform_requires_layer() != new.transform_requires_layer() {
damage.insert(ServoRestyleDamage::rebuild_and_reflow());
}
damage
}<|fim▁end|>
|
self & (REPAINT | REPOSITION)
}
(true, false) => {
// Changing the position of an absolutely-positioned block requires us to reflow
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>"""
Serializer for user API
"""
from rest_framework import serializers
from rest_framework.reverse import reverse
from django.template import defaultfilters
from courseware.access import has_access
from student.models import CourseEnrollment, User
from certificates.models import certificate_status_for_student, CertificateStatuses
from xmodule.course_module import DEFAULT_START_DATE
class CourseOverviewField(serializers.RelatedField):
"""Custom field to wrap a CourseDescriptor object. Read-only."""
def to_representation(self, course_overview):
course_id = unicode(course_overview.id)
request = self.context.get('request', None)
if request:
video_outline_url = reverse(
'video-summary-list',
kwargs={'course_id': course_id},
request=request
)
course_updates_url = reverse(
'course-updates-list',
kwargs={'course_id': course_id},
request=request
)
course_handouts_url = reverse(
'course-handouts-list',
kwargs={'course_id': course_id},
request=request
)
discussion_url = reverse(
'discussion_course',
kwargs={'course_id': course_id},
request=request
) if course_overview.is_discussion_tab_enabled() else None
else:
video_outline_url = None
course_updates_url = None
course_handouts_url = None
discussion_url = None
if course_overview.advertised_start is not None:
start_type = "string"
start_display = course_overview.advertised_start
elif course_overview.start != DEFAULT_START_DATE:
start_type = "timestamp"
start_display = defaultfilters.date(course_overview.start, "DATE_FORMAT")
else:
start_type = "empty"
start_display = None
return {
"id": course_id,
"name": course_overview.display_name,
"number": course_overview.display_number_with_default,
"org": course_overview.display_org_with_default,
"start": course_overview.start,
"start_display": start_display,
"start_type": start_type,
"end": course_overview.end,
"course_image": course_overview.course_image_url,
"social_urls": {
"facebook": course_overview.facebook_url,
},
"latest_updates": {
"video": None
},
"video_outline": video_outline_url,
"course_updates": course_updates_url,
"course_handouts": course_handouts_url,
"discussion_url": discussion_url,
"subscription_id": course_overview.clean_id(padding_char='_'),
"courseware_access": has_access(request.user, 'load_mobile', course_overview).to_json() if request else None
}
class CourseEnrollmentSerializer(serializers.ModelSerializer):
"""
Serializes CourseEnrollment models
"""
course = CourseOverviewField(source="course_overview", read_only=True)
certificate = serializers.SerializerMethodField()
def get_certificate(self, model):
"""Returns the information about the user's certificate in the course."""
certificate_info = certificate_status_for_student(model.user, model.course_id)
if certificate_info['status'] == CertificateStatuses.downloadable:
return {
"url": certificate_info['download_url'],
}
else:
return {}
class Meta(object):<|fim▁hole|> model = CourseEnrollment
fields = ('created', 'mode', 'is_active', 'course', 'certificate')
lookup_field = 'username'
class UserSerializer(serializers.HyperlinkedModelSerializer):
"""
Serializes User models
"""
name = serializers.ReadOnlyField(source='profile.name')
course_enrollments = serializers.HyperlinkedIdentityField(
view_name='courseenrollment-detail',
lookup_field='username'
)
class Meta(object):
model = User
fields = ('id', 'username', 'email', 'name', 'course_enrollments')
lookup_field = 'username'<|fim▁end|>
| |
<|file_name|>test_pathod.py<|end_file_name|><|fim▁begin|>import sys
import cStringIO
import OpenSSL
from libpathod import pathod, version
from netlib import tcp, http
from netlib.exceptions import HttpException, TlsException
import tutils
class TestPathod(object):
def test_logging(self):
s = cStringIO.StringIO()
p = pathod.Pathod(("127.0.0.1", 0), logfp=s)
assert len(p.get_log()) == 0
id = p.add_log(dict(s="foo"))
assert p.log_by_id(id)
assert len(p.get_log()) == 1
p.clear_log()
assert len(p.get_log()) == 0
for _ in range(p.LOGBUF + 1):
p.add_log(dict(s="foo"))
assert len(p.get_log()) <= p.LOGBUF
class TestNoWeb(tutils.DaemonTests):
noweb = True
def test_noweb(self):
assert self.get("200:da").status_code == 200
assert self.getpath("/").status_code == 800
class TestTimeout(tutils.DaemonTests):
timeout = 0.01
def test_noweb(self):
# FIXME: Add float values to spec language, reduce test timeout to
# increase test performance
# This is a bodge - we have some platform difference that causes
# different exceptions to be raised here.
tutils.raises(Exception, self.pathoc, ["get:/:p1,1"])
assert self.d.last_log()["type"] == "timeout"
class TestNoApi(tutils.DaemonTests):
noapi = True
def test_noapi(self):
assert self.getpath("/log").status_code == 404
r = self.getpath("/")
assert r.status_code == 200
assert not "Log" in r.content
class TestNotAfterConnect(tutils.DaemonTests):
ssl = False
ssloptions = dict(
not_after_connect=True
)
def test_connect(self):
r, _ = self.pathoc(
[r"get:'http://foo.com/p/202':da"],
connect_to=("localhost", self.d.port)
)
assert r[0].status_code == 202
class TestCustomCert(tutils.DaemonTests):
ssl = True
ssloptions = dict(
certs=[("*", tutils.test_data.path("data/testkey.pem"))],
)
def test_connect(self):
r, _ = self.pathoc([r"get:/p/202"])
r = r[0]
assert r.status_code == 202
assert r.sslinfo
assert "test.com" in str(r.sslinfo.certchain[0].get_subject())
class TestSSLCN(tutils.DaemonTests):
ssl = True
ssloptions = dict(
cn="foo.com"
)
def test_connect(self):
r, _ = self.pathoc([r"get:/p/202"])
r = r[0]
assert r.status_code == 202
assert r.sslinfo
assert r.sslinfo.certchain[0].get_subject().CN == "foo.com"
class TestNohang(tutils.DaemonTests):
nohang = True
def test_nohang(self):
r = self.get("200:p0,0")
assert r.status_code == 800
l = self.d.last_log()
assert "Pauses have been disabled" in l["response"]["msg"]
class TestHexdump(tutils.DaemonTests):
hexdump = True
def test_hexdump(self):
r = self.get(r"200:b'\xf0'")
class TestNocraft(tutils.DaemonTests):
nocraft = True
def test_nocraft(self):
r = self.get(r"200:b'\xf0'")
assert r.status_code == 800
assert "Crafting disabled" in r.content
class CommonTests(tutils.DaemonTests):
def test_binarydata(self):<|fim▁hole|> # FIXME: Other binary data elements
def test_sizelimit(self):
r = self.get("200:b@1g")
assert r.status_code == 800
l = self.d.last_log()
assert "too large" in l["response"]["msg"]
def test_preline(self):
r, _ = self.pathoc([r"get:'/p/200':i0,'\r\n'"])
assert r[0].status_code == 200
def test_info(self):
assert tuple(self.d.info()["version"]) == version.IVERSION
def test_logs(self):
assert self.d.clear_log()
assert not self.d.last_log()
rsp = self.get("202:da")
assert len(self.d.log()) == 1
assert self.d.clear_log()
assert len(self.d.log()) == 0
def test_disconnect(self):
rsp = self.get("202:b@100k:d200")
assert len(rsp.content) < 200
def test_parserr(self):
rsp = self.get("400:msg,b:")
assert rsp.status_code == 800
def test_static(self):
rsp = self.get("200:b<file")
assert rsp.status_code == 200
assert rsp.content.strip() == "testfile"
def test_anchor(self):
rsp = self.getpath("anchor/foo")
assert rsp.status_code == 202
def test_invalid_first_line(self):
c = tcp.TCPClient(("localhost", self.d.port))
c.connect()
if self.ssl:
c.convert_to_ssl()
c.wfile.write("foo\n\n\n")
c.wfile.flush()
l = self.d.last_log()
assert l["type"] == "error"
assert "foo" in l["msg"]
def test_invalid_content_length(self):
tutils.raises(
HttpException,
self.pathoc,
["get:/:h'content-length'='foo'"]
)
l = self.d.last_log()
assert l["type"] == "error"
assert "Unparseable Content Length" in l["msg"]
def test_invalid_headers(self):
tutils.raises(HttpException, self.pathoc, ["get:/:h'\t'='foo'"])
l = self.d.last_log()
assert l["type"] == "error"
assert "Invalid headers" in l["msg"]
def test_access_denied(self):
rsp = self.get("=nonexistent")
assert rsp.status_code == 800
def test_source_access_denied(self):
rsp = self.get("200:b</foo")
assert rsp.status_code == 800
assert "File access denied" in rsp.content
def test_proxy(self):
r, _ = self.pathoc([r"get:'http://foo.com/p/202':da"])
assert r[0].status_code == 202
def test_websocket(self):
r, _ = self.pathoc(["ws:/p/"], ws_read_limit=0)
assert r[0].status_code == 101
r, _ = self.pathoc(["ws:/p/ws"], ws_read_limit=0)
assert r[0].status_code == 101
def test_websocket_frame(self):
r, _ = self.pathoc(
["ws:/p/", "wf:f'wf:b\"test\"':pa,1"],
ws_read_limit=1
)
assert r[1].payload == "test"
def test_websocket_frame_reflect_error(self):
r, _ = self.pathoc(
["ws:/p/", "wf:-mask:knone:f'wf:b@10':i13,'a'"],
ws_read_limit=1,
timeout=1
)
# FIXME: Race Condition?
assert "Parse error" in self.d.text_log()
def test_websocket_frame_disconnect_error(self):
self.pathoc(["ws:/p/", "wf:b@10:d3"], ws_read_limit=0)
assert self.d.last_log()
class TestDaemon(CommonTests):
ssl = False
def test_connect(self):
r, _ = self.pathoc(
[r"get:'http://foo.com/p/202':da"],
connect_to=("localhost", self.d.port),
ssl=True
)
assert r[0].status_code == 202
def test_connect_err(self):
tutils.raises(
HttpException,
self.pathoc,
[r"get:'http://foo.com/p/202':da"],
connect_to=("localhost", self.d.port)
)
class TestDaemonSSL(CommonTests):
ssl = True
def test_ssl_conn_failure(self):
c = tcp.TCPClient(("localhost", self.d.port))
c.rbufsize = 0
c.wbufsize = 0
c.connect()
c.wfile.write("\0\0\0\0")
tutils.raises(TlsException, c.convert_to_ssl)
l = self.d.last_log()
assert l["type"] == "error"
assert "SSL" in l["msg"]
def test_ssl_cipher(self):
r, _ = self.pathoc([r"get:/p/202"])
assert r[0].status_code == 202
assert self.d.last_log()["cipher"][1] > 0
class TestHTTP2(tutils.DaemonTests):
ssl = True
noweb = True
noapi = True
nohang = True
if OpenSSL._util.lib.Cryptography_HAS_ALPN:
def test_http2(self):
r, _ = self.pathoc(["GET:/"], ssl=True, use_http2=True)
assert r[0].status_code == 800<|fim▁end|>
|
r = self.get(r"200:b'\xf0'")
l = self.d.last_log()
|
<|file_name|>_serialize.py<|end_file_name|><|fim▁begin|>import logging
from json import JSONEncoder, dumps
from flask import current_app, make_response, request
__all__ = (
"serialize",
"jsonify",
)
log = logging.getLogger(__name__)
def serialize(rv):
log.debug("Serializing output")
if rv is None or (isinstance(rv, str) and not len(rv)):
log.info("No content")
rv = make_response("", 204)
elif (
isinstance(rv, current_app.response_class)
or callable(rv)
or isinstance(rv, str)
):
...
else:
log.info("Serializing")
rv = jsonify(rv)
if request.method == "POST":
make_response(rv, 201)
return rv
def jsonify(*args, **kwargs):
if args and kwargs:
raise TypeError("jsonify() behavior undefined when passed both args and kwargs")
elif len(args) == 1: # single args are passed directly to dumps()
data = args[0]
else:
data = args or kwargs
pretty_print = bool(
request.args.get(
"pretty-print", current_app.config["JSONIFY_PRETTYPRINT_REGULAR"]
)
)
indent = None
separators = (",", ":")
cls = current_app.json_encoder or JSONEncoder<|fim▁hole|>
if hasattr(request, "operation") and request.operation.produces:
mime_type = request.operation.produces[0]
elif "JSONIFY_MIMETYPE" in current_app.config:
mime_type = current_app.config["JSONIFY_MIMETYPE"]
else:
mime_type = "application/json; charset=utf-8"
json_str = dumps(data, indent=indent, separators=separators, cls=cls) + "\n"
json_str.encode("utf-8")
return current_app.response_class(json_str, mimetype=mime_type)<|fim▁end|>
|
if pretty_print is True and request.is_xhr is False:
indent = 2
separators = (", ", ": ")
|
<|file_name|>util.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from django.conf import settings
from mongodbforms.documentoptions import DocumentMetaWrapper, LazyDocumentMetaWrapper
from mongodbforms.fieldgenerator import MongoDefaultFormFieldGenerator
try:
from django.utils.module_loading import import_by_path
except ImportError:
# this is only in Django's devel version for now
# and the following code comes from there. Yet it's too nice to
# pass on this. So we do define it here for now.
import sys
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from django.utils import six
def import_by_path(dotted_path, error_prefix=''):
"""
Import a dotted module path and return the attribute/class designated
by the last name in the path. Raise ImproperlyConfigured if something
goes wrong.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
raise ImproperlyConfigured("%s%s doesn't look like a module path" %
(error_prefix, dotted_path))
try:
module = import_module(module_path)
except ImportError as e:
msg = '%sError importing module %s: "%s"' % (
error_prefix, module_path, e)
six.reraise(ImproperlyConfigured, ImproperlyConfigured(msg),
sys.exc_info()[2])
try:
attr = getattr(module, class_name)
except AttributeError:
raise ImproperlyConfigured(
'%sModule "%s" does not define a "%s" attribute/class' %
(error_prefix, module_path, class_name))
return attr
def load_field_generator():
if hasattr(settings, 'MONGODBFORMS_FIELDGENERATOR'):
return import_by_path(settings.MONGODBFORMS_FIELDGENERATOR)
return MongoDefaultFormFieldGenerator
def init_document_options(document):
if not isinstance(document._meta, (DocumentMetaWrapper, LazyDocumentMetaWrapper)):
document._meta = DocumentMetaWrapper(document)
return document
def get_document_options(document):
return DocumentMetaWrapper(document)
def format_mongo_validation_errors(validation_exception):
"""Returns a string listing all errors within a document"""
def generate_key(value, prefix=''):
if isinstance(value, list):
value = ' '.join([generate_key(k) for k in value])
if isinstance(value, dict):
value = ' '.join([
generate_key(v, k) for k, v in value.iteritems()
])
results = "%s.%s" % (prefix, value) if prefix else value
return results
error_dict = defaultdict(list)
for k, v in validation_exception.to_dict().iteritems():
error_dict[generate_key(v)].append(k)
return ["%s: %s" % (k, v) for k, v in error_dict.iteritems()]
# Taken from six (https://pypi.python.org/pypi/six)
# by "Benjamin Peterson <[email protected]>"
#
# Copyright (c) 2010-2013 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
return meta("NewBase", bases, {})<|fim▁end|>
|
from collections import defaultdict
|
<|file_name|>question-mark-type-infer.rs<|end_file_name|><|fim▁begin|>#![feature(question_mark, question_mark_carrier)]
// Test that type inference fails where there are multiple possible return types
// for the `?` operator.
fn f(x: &i32) -> Result<i32, ()> {<|fim▁hole|>fn g() -> Result<Vec<i32>, ()> {
let l = [1, 2, 3, 4];
l.iter().map(f).collect()? //~ ERROR type annotations needed
}
fn main() {
g();
}<|fim▁end|>
|
Ok(*x)
}
|
<|file_name|>test_functional.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
"""
from flask import url_for
from recruit_app.user.models import User
from .factories import UserFactory
class TestLoggingIn:
"""Login."""
def test_can_log_in_returns_200(self, user, testapp):
"""Login successful."""
# Goes to homepage
res = testapp.get('/')<|fim▁hole|> # Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
# res = form.submit().follow()
res = form.submit()
assert res.status_code == 200
def test_sees_alert_on_log_out(self, user, testapp):
"""Show alert on logout."""
res = testapp.get('/')
# Fills out login form in navbar
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'myprecious'
# Submits
res = form.submit()
res = testapp.get(url_for('security.logout')).follow()
# sees alert
assert 'loginForm' in res
def test_sees_error_message_if_password_is_incorrect(self, user, testapp):
"""Show error if password is incorrect."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = user.email
form['password'] = 'wrong'
# Submits
res = form.submit()
# sees error
assert 'Invalid password' in res
def test_sees_error_message_if_email_doesnt_exist(self, user, testapp):
"""Show error if email doesn't exist."""
# Goes to homepage
res = testapp.get('/')
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['email'] = '[email protected]'
form['password'] = 'myprecious'
# Submits
res = form.submit()
# sees error
assert 'Specified user does not exist' in res
class TestRegistering:
"""Register a user."""
def test_can_register(self, user, testapp):
"""Register a new user."""
old_count = len(User.query.all())
# Goes to homepage
res = testapp.get('/')
# Clicks Create Account button
res = res.click('Create account')
# Fills out the form
form = res.forms['registerForm']
form['email'] = '[email protected]'
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
# res = form.submit().follow()
res = form.submit().follow()
assert res.status_code == 200
# A new user was created
assert len(User.query.all()) == old_count + 1
def test_sees_error_message_if_passwords_dont_match(self, user, testapp):
"""Show error if passwords don't match."""
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but passwords don't match
form = res.forms['registerForm']
form['email'] = '[email protected]'
form['password'] = 'secret'
form['password_confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert 'Passwords do not match' in res
def test_sees_error_message_if_user_already_registered(self, user, testapp):
"""Show error if user already registered."""
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = testapp.get(url_for('security.register'))
# Fills out form, but email is already registered
form = res.forms['registerForm']
form['email'] = user.email
form['password'] = 'secret'
form['password_confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert 'is already associated with an account' in res<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const electron = require('electron');
const ipcRenderer = electron.ipcRenderer;
window.onload = function() {
ipcRenderer.send('game-preview-loaded');
}
ipcRenderer.on('game-preview-start', function(event, data) {
var app = new Application({
// resize: true,
fullscreen: true,
antyAliasing: true,
preload: function(){
console.log(data.assets);
//load images
for (var i = 0; i < data.assets.length; i++) {
var meta = data.assets[i];
if(meta.type == 'script') {
require(meta.path);<|fim▁hole|> } else {
this.loader.load(meta.type, meta.path, meta.name, meta.uuid);
}
}
//load scene (json)
// this.loader.load('json', scene file path, 'scene.json');
},
//instantiate scene objects
loaded: function(){
//instantiate all object
for(var i = 0; i < data.sceneFile.length; i++) {
if(data.sceneFile[i].tag == 'mainCamera') {
this.mainCamera = this.scene.instantiate(data.sceneFile[i]);
} else {
this.scene.instantiate(data.sceneFile[i]);
}
}
},
//actual start function
start: function(){
//show must go on
},
preupdate: function(){
},
postupdate: function(){
},
postrender: function(){
// var layer = this.mainCamera.camera.layer;
// layer.ctx.save();
// layer.textAlign('left');
// layer.font('30px Arial');
// layer.fillStyle('white');
//
// var fps = (Time.deltaTime).toFixed(3);
//
// layer.fillText(fps || 0, 0,30);
// layer.ctx.restore();
}
});
})<|fim▁end|>
|
// connect script with uuid
|
<|file_name|>users.test.js<|end_file_name|><|fim▁begin|>'use strict'; // eslint-disable-line semi<|fim▁hole|>const request = require('supertest');
const {expect} = require('chai');
const db = require('APP/db');
const app = require('./start');
describe('/api/users', () => {
before('Await database sync', () => db.didSync);
afterEach('Clear the tables', () => db.truncate({ cascade: true }));
describe('GET /:id', () => {
describe('when not logged in', () => {
it('fails with a 401 (Unauthorized)', () =>
request(app)
.get(`/api/users/1`)
.expect(401)
);
});
});
describe('POST', () => {
describe('when not logged in', () => {
it('creates a user', () =>
request(app)
.post('/api/users')
.send({
email: '[email protected]',
password: '12345'
})
.expect(201)
);
it('redirects to the user it just made', () =>
request(app)
.post('/api/users')
.send({
email: '[email protected]',
password: '23456',
})
.redirects(1)
.then(res => expect(res.body).to.contain({
email: '[email protected]'
}))
);
});
});
});<|fim▁end|>
| |
<|file_name|>isPlainObject.js<|end_file_name|><|fim▁begin|>( function () { 'use strict';
function isPlainObject(stuff) {
if (
typeof stuff !== 'object' || stuff === null // Не объект
|| stuff === stuff.window // window
|| stuff.nodeType // DOM node
|| !stuff.constructor
|| !stuff.constructor.prototype.hasOwnProperty('isPrototypeOf')
// Проверка, что прототип объекта равняется "Object.prototype"
) {
return false;
} else {
return true;
}
}
Object.defineProperty( Object, 'isPlainObject', { value: isPlainObject } );<|fim▁hole|>
} () );<|fim▁end|>
| |
<|file_name|>exprtools.py<|end_file_name|><|fim▁begin|>"""Tools for manipulating of large commutative expressions. """
from __future__ import print_function, division
from sympy.core.add import Add
from sympy.core.compatibility import iterable, is_sequence, SYMPY_INTS
from sympy.core.mul import Mul, _keep_coeff
from sympy.core.power import Pow
from sympy.core.basic import Basic, preorder_traversal
from sympy.core.expr import Expr
from sympy.core.sympify import sympify
from sympy.core.numbers import Rational, Integer, Number, I
from sympy.core.singleton import S
from sympy.core.symbol import Dummy
from sympy.core.coreerrors import NonCommutativeExpression
from sympy.core.containers import Tuple, Dict
from sympy.utilities import default_sort_key
from sympy.utilities.iterables import (common_prefix, common_suffix,
variations, ordered)
from collections import defaultdict
def _isnumber(i):
return isinstance(i, (SYMPY_INTS, float)) or i.is_Number
def decompose_power(expr):
"""
Decompose power into symbolic base and integer exponent.
This is strictly only valid if the exponent from which
the integer is extracted is itself an integer or the
base is positive. These conditions are assumed and not
checked here.
Examples
========
>>> from sympy.core.exprtools import decompose_power
>>> from sympy.abc import x, y
>>> decompose_power(x)
(x, 1)
>>> decompose_power(x**2)
(x, 2)
>>> decompose_power(x**(2*y))
(x**y, 2)
>>> decompose_power(x**(2*y/3))
(x**(y/3), 2)
"""
base, exp = expr.as_base_exp()
if exp.is_Number:
if exp.is_Rational:
if not exp.is_Integer:
base = Pow(base, Rational(1, exp.q))
exp = exp.p
else:
base, exp = expr, 1
else:
exp, tail = exp.as_coeff_Mul(rational=True)
if exp is S.NegativeOne:
base, exp = Pow(base, tail), -1
elif exp is not S.One:
tail = _keep_coeff(Rational(1, exp.q), tail)
base, exp = Pow(base, tail), exp.p
else:
base, exp = expr, 1
return base, exp
class Factors(object):
"""Efficient representation of ``f_1*f_2*...*f_n``."""
__slots__ = ['factors', 'gens']
def __init__(self, factors=None): # Factors
"""Initialize Factors from dict or expr.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x
>>> from sympy import I
>>> e = 2*x**3
>>> Factors(e)
Factors({2: 1, x: 3})
>>> Factors(e.as_powers_dict())
Factors({2: 1, x: 3})
>>> f = _
>>> f.factors # underlying dictionary
{2: 1, x: 3}
>>> f.gens # base of each factor
frozenset([2, x])
>>> Factors(0)
Factors({0: 1})
>>> Factors(I)
Factors({I: 1})
Notes
=====
Although a dictionary can be passed, only minimal checking is
performed: powers of -1 and I are made canonical.
"""
if isinstance(factors, (SYMPY_INTS, float)):
factors = S(factors)
if isinstance(factors, Factors):
factors = factors.factors.copy()
elif factors is None or factors is S.One:
factors = {}
elif factors is S.Zero or factors == 0:
factors = {S.Zero: S.One}
elif isinstance(factors, Number):
n = factors
factors = {}
if n < 0:
factors[S.NegativeOne] = S.One
n = -n
if n is not S.One:
if n.is_Float or n.is_Integer or n is S.Infinity:
factors[n] = S.One
elif n.is_Rational:
# since we're processing Numbers, the denominator is
# stored with a negative exponent; all other factors
# are left .
if n.p != 1:
factors[Integer(n.p)] = S.One
factors[Integer(n.q)] = S.NegativeOne
else:
raise ValueError('Expected Float|Rational|Integer, not %s' % n)
elif isinstance(factors, Basic) and not factors.args:
factors = {factors: S.One}
elif isinstance(factors, Expr):
c, nc = factors.args_cnc()
i = c.count(I)
for _ in range(i):
c.remove(I)
factors = dict(Mul._from_args(c).as_powers_dict())
if i:
factors[I] = S.One*i
if nc:
factors[Mul(*nc, evaluate=False)] = S.One
else:
factors = factors.copy() # /!\ should be dict-like
# tidy up -/+1 and I exponents if Rational
handle = []
for k in factors:
if k is I or k in (-1, 1):
handle.append(k)
if handle:
i1 = S.One
for k in handle:
if not _isnumber(factors[k]):
continue
i1 *= k**factors.pop(k)
if i1 is not S.One:
for a in i1.args if i1.is_Mul else [i1]: # at worst, -1.0*I*(-1)**e
if a is S.NegativeOne:
factors[a] = S.One
elif a is I:
factors[I] = S.One
elif a.is_Pow:
if S.NegativeOne not in factors:
factors[S.NegativeOne] = S.Zero
factors[S.NegativeOne] += a.exp
elif a == 1:
factors[a] = S.One
elif a == -1:
factors[-a] = S.One
factors[S.NegativeOne] = S.One
else:
raise ValueError('unexpected factor in i1: %s' % a)
self.factors = factors
try:
self.gens = frozenset(factors.keys())
except AttributeError:
raise TypeError('expecting Expr or dictionary')
def __hash__(self): # Factors
keys = tuple(ordered(self.factors.keys()))
values = [self.factors[k] for k in keys]
return hash((keys, values))
def __repr__(self): # Factors
return "Factors({%s})" % ', '.join(
['%s: %s' % (k, v) for k, v in ordered(self.factors.items())])
@property
def is_zero(self): # Factors
"""
>>> from sympy.core.exprtools import Factors
>>> Factors(0).is_zero
True
"""
f = self.factors
return len(f) == 1 and S.Zero in f
@property
def is_one(self): # Factors
"""
>>> from sympy.core.exprtools import Factors
>>> Factors(1).is_one
True
"""
return not self.factors
def as_expr(self): # Factors
"""Return the underlying expression.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y
>>> Factors((x*y**2).as_powers_dict()).as_expr()
x*y**2
"""
args = []
for factor, exp in self.factors.items():
if exp != 1:
b, e = factor.as_base_exp()
if isinstance(exp, int):
e = _keep_coeff(Integer(exp), e)
elif isinstance(exp, Rational):
e = _keep_coeff(exp, e)
else:
e *= exp
args.append(b**e)
else:
args.append(factor)
return Mul(*args)
def mul(self, other): # Factors
"""Return Factors of ``self * other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.mul(b)
Factors({x: 2, y: 3, z: -1})
>>> a*b
Factors({x: 2, y: 3, z: -1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if any(f.is_zero for f in (self, other)):
return Factors(S.Zero)
factors = dict(self.factors)
for factor, exp in other.factors.items():
if factor in factors:
exp = factors[factor] + exp
if not exp:
del factors[factor]
continue
factors[factor] = exp
return Factors(factors)
def normal(self, other):
"""Return ``self`` and ``other`` with ``gcd`` removed from each.
The only differences between this and method ``div`` is that this
is 1) optimized for the case when there are few factors in common and
2) this does not raise an error if ``other`` is zero.
See Also
========
div
"""
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
return (Factors(), Factors(S.Zero))
if self.is_zero:
return (Factors(S.Zero), Factors())
self_factors = dict(self.factors)
other_factors = dict(other.factors)
for factor, self_exp in self.factors.items():
try:
other_exp = other.factors[factor]
except KeyError:
continue
exp = self_exp - other_exp
if not exp:
del self_factors[factor]
del other_factors[factor]
elif _isnumber(exp):
if exp > 0:
self_factors[factor] = exp
del other_factors[factor]
else:
del self_factors[factor]
other_factors[factor] = -exp
else:
r = self_exp.extract_additively(other_exp)
if r is not None:
if r:
self_factors[factor] = r
del other_factors[factor]
else: # should be handled already
del self_factors[factor]
del other_factors[factor]
else:
sc, sa = self_exp.as_coeff_Add()
if sc:
oc, oa = other_exp.as_coeff_Add()
diff = sc - oc
if diff > 0:
self_factors[factor] -= oc
other_exp = oa
elif diff < 0:
self_factors[factor] -= sc
other_factors[factor] -= sc
other_exp = oa - diff
else:
self_factors[factor] = sa
other_exp = oa
if other_exp:
other_factors[factor] = other_exp
else:
del other_factors[factor]
return Factors(self_factors), Factors(other_factors)
def div(self, other): # Factors
"""Return ``self`` and ``other`` with ``gcd`` removed from each.
This is optimized for the case when there are many factors in common.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> from sympy import S
>>> a = Factors((x*y**2).as_powers_dict())
>>> a.div(a)
(Factors({}), Factors({}))
>>> a.div(x*z)
(Factors({y: 2}), Factors({z: 1}))
The ``/`` operator only gives ``quo``:
>>> a/x
Factors({y: 2})
Factors treats its factors as though they are all in the numerator, so
if you violate this assumption the results will be correct but will
not strictly correspond to the numerator and denominator of the ratio:
>>> a.div(x/z)
(Factors({y: 2}), Factors({z: -1}))
Factors is also naive about bases: it does not attempt any denesting
of Rational-base terms, for example the following does not become
2**(2*x)/2.
>>> Factors(2**(2*x + 2)).div(S(8))
(Factors({2: 2*x + 2}), Factors({8: 1}))
factor_terms can clean up such Rational-bases powers:
>>> from sympy.core.exprtools import factor_terms
>>> n, d = Factors(2**(2*x + 2)).div(S(8))
>>> n.as_expr()/d.as_expr()
2**(2*x + 2)/8
>>> factor_terms(_)
2**(2*x)/2
"""
quo, rem = dict(self.factors), {}
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
raise ZeroDivisionError
if self.is_zero:
return (Factors(S.Zero), Factors())
for factor, exp in other.factors.items():
if factor in quo:
d = quo[factor] - exp
if _isnumber(d):
if d <= 0:
del quo[factor]
if d >= 0:
if d:
quo[factor] = d
continue
exp = -d
else:
r = quo[factor].extract_additively(exp)
if r is not None:
if r:
quo[factor] = r
else: # should be handled already
del quo[factor]
else:
other_exp = exp
sc, sa = quo[factor].as_coeff_Add()
if sc:
oc, oa = other_exp.as_coeff_Add()
diff = sc - oc
if diff > 0:
quo[factor] -= oc
other_exp = oa
elif diff < 0:
quo[factor] -= sc
other_exp = oa - diff
else:
quo[factor] = sa
other_exp = oa
if other_exp:
rem[factor] = other_exp
else:
assert factor not in rem
continue
rem[factor] = exp
return Factors(quo), Factors(rem)
def quo(self, other): # Factors
"""Return numerator Factor of ``self / other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.quo(b) # same as a/b
Factors({y: 1})
"""
return self.div(other)[0]
def rem(self, other): # Factors
"""Return denominator Factors of ``self / other``.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.rem(b)
Factors({z: -1})
>>> a.rem(a)
Factors({})
"""
return self.div(other)[1]
def pow(self, other): # Factors
"""Return self raised to a non-negative integer power.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y
>>> a = Factors((x*y**2).as_powers_dict())
>>> a**2
Factors({x: 2, y: 4})
"""
if isinstance(other, Factors):
other = other.as_expr()
if other.is_Integer:
other = int(other)
if isinstance(other, SYMPY_INTS) and other >= 0:
factors = {}
if other:
for factor, exp in self.factors.items():
factors[factor] = exp*other
return Factors(factors)
else:
raise ValueError("expected non-negative integer, got %s" % other)
def gcd(self, other): # Factors
"""Return Factors of ``gcd(self, other)``. The keys are
the intersection of factors with the minimum exponent for
each factor.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.gcd(b)
Factors({x: 1, y: 1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if other.is_zero:
return Factors(self.factors)
factors = {}
for factor, exp in self.factors.items():
if factor in other.factors:
exp = min(exp, other.factors[factor])
factors[factor] = exp
return Factors(factors)
def lcm(self, other): # Factors
"""Return Factors of ``lcm(self, other)`` which are
the union of factors with the maximum exponent for
each factor.
Examples
========
>>> from sympy.core.exprtools import Factors
>>> from sympy.abc import x, y, z
>>> a = Factors((x*y**2).as_powers_dict())
>>> b = Factors((x*y/z).as_powers_dict())
>>> a.lcm(b)
Factors({x: 1, y: 2, z: -1})
"""
if not isinstance(other, Factors):
other = Factors(other)
if any(f.is_zero for f in (self, other)):
return Factors(S.Zero)
factors = dict(self.factors)
for factor, exp in other.factors.items():
if factor in factors:
exp = max(exp, factors[factor])
factors[factor] = exp
return Factors(factors)
def __mul__(self, other): # Factors
return self.mul(other)
def __divmod__(self, other): # Factors
return self.div(other)
def __div__(self, other): # Factors
return self.quo(other)
__truediv__ = __div__
def __mod__(self, other): # Factors
return self.rem(other)
def __pow__(self, other): # Factors
return self.pow(other)
def __eq__(self, other): # Factors
if not isinstance(other, Factors):
other = Factors(other)
return self.factors == other.factors
def __ne__(self, other): # Factors
return not self.__eq__(other)
class Term(object):
"""Efficient representation of ``coeff*(numer/denom)``. """
__slots__ = ['coeff', 'numer', 'denom']
def __init__(self, term, numer=None, denom=None): # Term
if numer is None and denom is None:
if not term.is_commutative:
raise NonCommutativeExpression(
'commutative expression expected')
coeff, factors = term.as_coeff_mul()
numer, denom = defaultdict(int), defaultdict(int)
for factor in factors:
base, exp = decompose_power(factor)
if base.is_Add:
cont, base = base.primitive()
coeff *= cont**exp
if exp > 0:
numer[base] += exp
else:
denom[base] += -exp
numer = Factors(numer)
denom = Factors(denom)
else:
coeff = term
if numer is None:
numer = Factors()
if denom is None:
denom = Factors()
self.coeff = coeff
self.numer = numer
self.denom = denom
def __hash__(self): # Term
return hash((self.coeff, self.numer, self.denom))
def __repr__(self): # Term
return "Term(%s, %s, %s)" % (self.coeff, self.numer, self.denom)
def as_expr(self): # Term
return self.coeff*(self.numer.as_expr()/self.denom.as_expr())
def mul(self, other): # Term
coeff = self.coeff*other.coeff
numer = self.numer.mul(other.numer)
denom = self.denom.mul(other.denom)
numer, denom = numer.normal(denom)
return Term(coeff, numer, denom)
def inv(self): # Term
return Term(1/self.coeff, self.denom, self.numer)
def quo(self, other): # Term
return self.mul(other.inv())
def pow(self, other): # Term
if other < 0:
return self.inv().pow(-other)
else:
return Term(self.coeff ** other,
self.numer.pow(other),
self.denom.pow(other))
def gcd(self, other): # Term
return Term(self.coeff.gcd(other.coeff),
self.numer.gcd(other.numer),
self.denom.gcd(other.denom))
def lcm(self, other): # Term
return Term(self.coeff.lcm(other.coeff),
self.numer.lcm(other.numer),
self.denom.lcm(other.denom))
def __mul__(self, other): # Term
if isinstance(other, Term):
return self.mul(other)
else:
return NotImplemented
def __div__(self, other): # Term
if isinstance(other, Term):
return self.quo(other)
else:
return NotImplemented
__truediv__ = __div__
def __pow__(self, other): # Term
if isinstance(other, SYMPY_INTS):
return self.pow(other)
else:
return NotImplemented
def __eq__(self, other): # Term
return (self.coeff == other.coeff and
self.numer == other.numer and
self.denom == other.denom)
def __ne__(self, other): # Term
return not self.__eq__(other)
def _gcd_terms(terms, isprimitive=False, fraction=True):
"""Helper function for :func:`gcd_terms`.
If ``isprimitive`` is True then the call to primitive
for an Add will be skipped. This is useful when the
content has already been extrated.
If ``fraction`` is True then the expression will appear over a common
denominator, the lcm of all term denominators.
"""
if isinstance(terms, Basic) and not isinstance(terms, Tuple):
terms = Add.make_args(terms)
terms = list(map(Term, [t for t in terms if t]))
# there is some simplification that may happen if we leave this
# here rather than duplicate it before the mapping of Term onto
# the terms
if len(terms) == 0:
return S.Zero, S.Zero, S.One
if len(terms) == 1:
cont = terms[0].coeff
numer = terms[0].numer.as_expr()
denom = terms[0].denom.as_expr()
else:
cont = terms[0]
for term in terms[1:]:
cont = cont.gcd(term)
for i, term in enumerate(terms):
terms[i] = term.quo(cont)
if fraction:
denom = terms[0].denom
for term in terms[1:]:
denom = denom.lcm(term.denom)
numers = []
for term in terms:
numer = term.numer.mul(denom.quo(term.denom))
numers.append(term.coeff*numer.as_expr())
else:
numers = [t.as_expr() for t in terms]
denom = Term(S(1)).numer
cont = cont.as_expr()
numer = Add(*numers)
denom = denom.as_expr()
if not isprimitive and numer.is_Add:
_cont, numer = numer.primitive()
cont *= _cont
return cont, numer, denom
def gcd_terms(terms, isprimitive=False, clear=True, fraction=True):
"""Compute the GCD of ``terms`` and put them together.
``terms`` can be an expression or a non-Basic sequence of expressions
which will be handled as though they are terms from a sum.
If ``isprimitive`` is True the _gcd_terms will not run the primitive
method on the terms.
``clear`` controls the removal of integers from the denominator of an Add
expression. When True (default), all numerical denominator will be cleared;
when False the denominators will be cleared only if all terms had numerical
denominators other than 1.
``fraction``, when True (default), will put the expression over a common
denominator.
Examples
========
>>> from sympy.core import gcd_terms
>>> from sympy.abc import x, y
>>> gcd_terms((x + 1)**2*y + (x + 1)*y**2)
y*(x + 1)*(x + y + 1)
>>> gcd_terms(x/2 + 1)
(x + 2)/2
>>> gcd_terms(x/2 + 1, clear=False)
x/2 + 1
>>> gcd_terms(x/2 + y/2, clear=False)
(x + y)/2
>>> gcd_terms(x/2 + 1/x)
(x**2 + 2)/(2*x)
>>> gcd_terms(x/2 + 1/x, fraction=False)
(x + 2/x)/2
>>> gcd_terms(x/2 + 1/x, fraction=False, clear=False)
x/2 + 1/x
>>> gcd_terms(x/2/y + 1/x/y)
(x**2 + 2)/(2*x*y)
>>> gcd_terms(x/2/y + 1/x/y, fraction=False, clear=False)
(x + 2/x)/(2*y)
The ``clear`` flag was ignored in this case because the returned
expression was a rational expression, not a simple sum.
See Also
========
factor_terms, sympy.polys.polytools.terms_gcd
"""
def mask(terms):
"""replace nc portions of each term with a unique Dummy symbols
and return the replacements to restore them"""
args = [(a, []) if a.is_commutative else a.args_cnc() for a in terms]
reps = []
for i, (c, nc) in enumerate(args):
if nc:
nc = Mul._from_args(nc)
d = Dummy()
reps.append((d, nc))
c.append(d)
args[i] = Mul._from_args(c)
else:
args[i] = c
return args, dict(reps)
isadd = isinstance(terms, Add)
addlike = isadd or not isinstance(terms, Basic) and \
is_sequence(terms, include=set) and \
not isinstance(terms, Dict)
if addlike:
if isadd: # i.e. an Add
terms = list(terms.args)
else:
terms = sympify(terms)
terms, reps = mask(terms)
cont, numer, denom = _gcd_terms(terms, isprimitive, fraction)
numer = numer.xreplace(reps)
coeff, factors = cont.as_coeff_Mul()
return _keep_coeff(coeff, factors*numer/denom, clear=clear)
if not isinstance(terms, Basic):
return terms
if terms.is_Atom:
return terms
if terms.is_Mul:
c, args = terms.as_coeff_mul()
return _keep_coeff(c, Mul(*[gcd_terms(i, isprimitive, clear, fraction)
for i in args]), clear=clear)
def handle(a):
# don't treat internal args like terms of an Add
if not isinstance(a, Expr):
if isinstance(a, Basic):
return a.func(*[handle(i) for i in a.args])
return type(a)([handle(i) for i in a])
return gcd_terms(a, isprimitive, clear, fraction)
if isinstance(terms, Dict):
return Dict(*[(k, handle(v)) for k, v in terms.args])
return terms.func(*[handle(i) for i in terms.args])
def factor_terms(expr, radical=False, clear=False, fraction=False, sign=True):
"""Remove common factors from terms in all arguments without
changing the underlying structure of the expr. No expansion or
simplification (and no processing of non-commutatives) is performed.
If radical=True then a radical common to all terms will be factored
out of any Add sub-expressions of the expr.
If clear=False (default) then coefficients will not be separated
from a single Add if they can be distributed to leave one or more
terms with integer coefficients.
If fraction=True (default is False) then a common denominator will be
constructed for the expression.
If sign=True (default) then even if the only factor in common is a -1,
it will be factored out of the expression.
Examples
========
>>> from sympy import factor_terms, Symbol
>>> from sympy.abc import x, y
>>> factor_terms(x + x*(2 + 4*y)**3)
x*(8*(2*y + 1)**3 + 1)
>>> A = Symbol('A', commutative=False)
>>> factor_terms(x*A + x*A + x*y*A)
x*(y*A + 2*A)
When ``clear`` is False, a rational will only be factored out of an
Add expression if all terms of the Add have coefficients that are
fractions:
>>> factor_terms(x/2 + 1, clear=False)
x/2 + 1
>>> factor_terms(x/2 + 1, clear=True)
(x + 2)/2
This only applies when there is a single Add that the coefficient
multiplies:
>>> factor_terms(x*y/2 + y, clear=True)
y*(x + 2)/2
>>> factor_terms(x*y/2 + y, clear=False) == _
True
If a -1 is all that can be factored out, to *not* factor it out, the
flag ``sign`` must be False:
>>> factor_terms(-x - y)<|fim▁hole|> -(x + y)
>>> factor_terms(-x - y, sign=False)
-x - y
>>> factor_terms(-2*x - 2*y, sign=False)
-2*(x + y)
See Also
========
gcd_terms, sympy.polys.polytools.terms_gcd
"""
from sympy.simplify.simplify import bottom_up
def do(expr):
is_iterable = iterable(expr)
if not isinstance(expr, Basic) or expr.is_Atom:
if is_iterable:
return type(expr)([do(i) for i in expr])
return expr
if expr.is_Pow or expr.is_Function or \
is_iterable or not hasattr(expr, 'args_cnc'):
args = expr.args
newargs = tuple([do(i) for i in args])
if newargs == args:
return expr
return expr.func(*newargs)
cont, p = expr.as_content_primitive(radical=radical)
if p.is_Add:
list_args = [do(a) for a in Add.make_args(p)]
# get a common negative (if there) which gcd_terms does not remove
if all(a.as_coeff_Mul()[0] < 0 for a in list_args):
cont = -cont
list_args = [-a for a in list_args]
# watch out for exp(-(x+2)) which gcd_terms will change to exp(-x-2)
special = {}
for i, a in enumerate(list_args):
b, e = a.as_base_exp()
if e.is_Mul and e != Mul(*e.args):
list_args[i] = Dummy()
special[list_args[i]] = a
# rebuild p not worrying about the order which gcd_terms will fix
p = Add._from_args(list_args)
p = gcd_terms(p,
isprimitive=True,
clear=clear,
fraction=fraction).xreplace(special)
elif p.args:
p = p.func(
*[do(a) for a in p.args])
rv = _keep_coeff(cont, p, clear=clear, sign=sign)
return rv
expr = sympify(expr)
return do(expr)
def _mask_nc(eq, name=None):
"""
Return ``eq`` with non-commutative objects replaced with Dummy
symbols. A dictionary that can be used to restore the original
values is returned: if it is None, the expression is noncommutative
and cannot be made commutative. The third value returned is a list
of any non-commutative symbols that appear in the returned equation.
``name``, if given, is the name that will be used with numered Dummy
variables that will replace the non-commutative objects and is mainly
used for doctesting purposes.
Notes
=====
All non-commutative objects other than Symbols are replaced with
a non-commutative Symbol. Identical objects will be identified
by identical symbols.
If there is only 1 non-commutative object in an expression it will
be replaced with a commutative symbol. Otherwise, the non-commutative
entities are retained and the calling routine should handle
replacements in this case since some care must be taken to keep
track of the ordering of symbols when they occur within Muls.
Examples
========
>>> from sympy.physics.secondquant import Commutator, NO, F, Fd
>>> from sympy import symbols, Mul
>>> from sympy.core.exprtools import _mask_nc
>>> from sympy.abc import x, y
>>> A, B, C = symbols('A,B,C', commutative=False)
One nc-symbol:
>>> _mask_nc(A**2 - x**2, 'd')
(_d0**2 - x**2, {_d0: A}, [])
Multiple nc-symbols:
>>> _mask_nc(A**2 - B**2, 'd')
(A**2 - B**2, None, [A, B])
An nc-object with nc-symbols but no others outside of it:
>>> _mask_nc(1 + x*Commutator(A, B), 'd')
(_d0*x + 1, {_d0: Commutator(A, B)}, [])
>>> _mask_nc(NO(Fd(x)*F(y)), 'd')
(_d0, {_d0: NO(CreateFermion(x)*AnnihilateFermion(y))}, [])
Multiple nc-objects:
>>> eq = x*Commutator(A, B) + x*Commutator(A, C)*Commutator(A, B)
>>> _mask_nc(eq, 'd')
(x*_d0 + x*_d1*_d0, {_d0: Commutator(A, B), _d1: Commutator(A, C)}, [_d0, _d1])
Multiple nc-objects and nc-symbols:
>>> eq = A*Commutator(A, B) + B*Commutator(A, C)
>>> _mask_nc(eq, 'd')
(A*_d0 + B*_d1, {_d0: Commutator(A, B), _d1: Commutator(A, C)}, [_d0, _d1, A, B])
If there is an object that:
- doesn't contain nc-symbols
- but has arguments which derive from Basic, not Expr
- and doesn't define an _eval_is_commutative routine
then it will give False (or None?) for the is_commutative test. Such
objects are also removed by this routine:
>>> from sympy import Basic
>>> eq = (1 + Mul(Basic(), Basic(), evaluate=False))
>>> eq.is_commutative
False
>>> _mask_nc(eq, 'd')
(_d0**2 + 1, {_d0: Basic()}, [])
"""
name = name or 'mask'
# Make Dummy() append sequential numbers to the name
def numbered_names():
i = 0
while True:
yield name + str(i)
i += 1
names = numbered_names()
def Dummy(*args, **kwargs):
from sympy import Dummy
return Dummy(next(names), *args, **kwargs)
expr = eq
if expr.is_commutative:
return eq, {}, []
# identify nc-objects; symbols and other
rep = []
nc_obj = set()
nc_syms = set()
pot = preorder_traversal(expr, keys=default_sort_key)
for i, a in enumerate(pot):
if any(a == r[0] for r in rep):
pot.skip()
elif not a.is_commutative:
if a.is_Symbol:
nc_syms.add(a)
elif not (a.is_Add or a.is_Mul or a.is_Pow):
if all(s.is_commutative for s in a.free_symbols):
rep.append((a, Dummy()))
else:
nc_obj.add(a)
pot.skip()
# If there is only one nc symbol or object, it can be factored regularly
# but polys is going to complain, so replace it with a Dummy.
if len(nc_obj) == 1 and not nc_syms:
rep.append((nc_obj.pop(), Dummy()))
elif len(nc_syms) == 1 and not nc_obj:
rep.append((nc_syms.pop(), Dummy()))
# Any remaining nc-objects will be replaced with an nc-Dummy and
# identified as an nc-Symbol to watch out for
nc_obj = sorted(nc_obj, key=default_sort_key)
for n in nc_obj:
nc = Dummy(commutative=False)
rep.append((n, nc))
nc_syms.add(nc)
expr = expr.subs(rep)
nc_syms = list(nc_syms)
nc_syms.sort(key=default_sort_key)
return expr, dict([(v, k) for k, v in rep]) or None, nc_syms
def factor_nc(expr):
"""Return the factored form of ``expr`` while handling non-commutative
expressions.
**examples**
>>> from sympy.core.exprtools import factor_nc
>>> from sympy import Symbol
>>> from sympy.abc import x
>>> A = Symbol('A', commutative=False)
>>> B = Symbol('B', commutative=False)
>>> factor_nc((x**2 + 2*A*x + A**2).expand())
(x + A)**2
>>> factor_nc(((x + A)*(x + B)).expand())
(x + A)*(x + B)
"""
from sympy.simplify.simplify import powsimp
from sympy.polys import gcd, factor
def _pemexpand(expr):
"Expand with the minimal set of hints necessary to check the result."
return expr.expand(deep=True, mul=True, power_exp=True,
power_base=False, basic=False, multinomial=True, log=False)
expr = sympify(expr)
if not isinstance(expr, Expr) or not expr.args:
return expr
if not expr.is_Add:
return expr.func(*[factor_nc(a) for a in expr.args])
expr, rep, nc_symbols = _mask_nc(expr)
if rep:
return factor(expr).subs(rep)
else:
args = [a.args_cnc() for a in Add.make_args(expr)]
c = g = l = r = S.One
hit = False
# find any commutative gcd term
for i, a in enumerate(args):
if i == 0:
c = Mul._from_args(a[0])
elif a[0]:
c = gcd(c, Mul._from_args(a[0]))
else:
c = S.One
if c is not S.One:
hit = True
c, g = c.as_coeff_Mul()
if g is not S.One:
for i, (cc, _) in enumerate(args):
cc = list(Mul.make_args(Mul._from_args(list(cc))/g))
args[i][0] = cc
for i, (cc, _) in enumerate(args):
cc[0] = cc[0]/c
args[i][0] = cc
# find any noncommutative common prefix
for i, a in enumerate(args):
if i == 0:
n = a[1][:]
else:
n = common_prefix(n, a[1])
if not n:
# is there a power that can be extracted?
if not args[0][1]:
break
b, e = args[0][1][0].as_base_exp()
ok = False
if e.is_Integer:
for t in args:
if not t[1]:
break
bt, et = t[1][0].as_base_exp()
if et.is_Integer and bt == b:
e = min(e, et)
else:
break
else:
ok = hit = True
l = b**e
il = b**-e
for i, a in enumerate(args):
args[i][1][0] = il*args[i][1][0]
break
if not ok:
break
else:
hit = True
lenn = len(n)
l = Mul(*n)
for i, a in enumerate(args):
args[i][1] = args[i][1][lenn:]
# find any noncommutative common suffix
for i, a in enumerate(args):
if i == 0:
n = a[1][:]
else:
n = common_suffix(n, a[1])
if not n:
# is there a power that can be extracted?
if not args[0][1]:
break
b, e = args[0][1][-1].as_base_exp()
ok = False
if e.is_Integer:
for t in args:
if not t[1]:
break
bt, et = t[1][-1].as_base_exp()
if et.is_Integer and bt == b:
e = min(e, et)
else:
break
else:
ok = hit = True
r = b**e
il = b**-e
for i, a in enumerate(args):
args[i][1][-1] = args[i][1][-1]*il
break
if not ok:
break
else:
hit = True
lenn = len(n)
r = Mul(*n)
for i, a in enumerate(args):
args[i][1] = a[1][:len(a[1]) - lenn]
if hit:
mid = Add(*[Mul(*cc)*Mul(*nc) for cc, nc in args])
else:
mid = expr
# sort the symbols so the Dummys would appear in the same
# order as the original symbols, otherwise you may introduce
# a factor of -1, e.g. A**2 - B**2) -- {A:y, B:x} --> y**2 - x**2
# and the former factors into two terms, (A - B)*(A + B) while the
# latter factors into 3 terms, (-1)*(x - y)*(x + y)
rep1 = [(n, Dummy()) for n in sorted(nc_symbols, key=default_sort_key)]
unrep1 = [(v, k) for k, v in rep1]
unrep1.reverse()
new_mid, r2, _ = _mask_nc(mid.subs(rep1))
new_mid = powsimp(factor(new_mid))
new_mid = new_mid.subs(r2).subs(unrep1)
if new_mid.is_Pow:
return _keep_coeff(c, g*l*new_mid*r)
if new_mid.is_Mul:
# XXX TODO there should be a way to inspect what order the terms
# must be in and just select the plausible ordering without
# checking permutations
cfac = []
ncfac = []
for f in new_mid.args:
if f.is_commutative:
cfac.append(f)
else:
b, e = f.as_base_exp()
if e.is_Integer:
ncfac.extend([b]*e)
else:
ncfac.append(f)
pre_mid = g*Mul(*cfac)*l
target = _pemexpand(expr/c)
for s in variations(ncfac, len(ncfac)):
ok = pre_mid*Mul(*s)*r
if _pemexpand(ok) == target:
return _keep_coeff(c, ok)
# mid was an Add that didn't factor successfully
return _keep_coeff(c, g*l*mid*r)<|fim▁end|>
| |
<|file_name|>package.rs<|end_file_name|><|fim▁begin|>use std::cmp;
use std::fmt::{Show,Formatter};
use std::fmt;
use std::slice;
use semver::Version;
use core::{
Dependency,
Manifest,
PackageId,
Registry,
Target,
Summary
};
use core::dependency::SerializedDependency;
use util::{CargoResult, graph, Config};
use serialize::{Encoder,Encodable};
use core::source::{SourceId, SourceSet, Source};
// TODO: Is manifest_path a relic?
#[deriving(Clone)]
pub struct Package {
// The package's manifest
manifest: Manifest,
// The root of the package
manifest_path: Path,
// Where this package came from
source_id: SourceId,
}
#[deriving(Encodable)]
struct SerializedPackage {
name: String,
version: String,
dependencies: Vec<SerializedDependency>,
authors: Vec<String>,
targets: Vec<Target>,
manifest_path: String,
}
impl<E, S: Encoder<E>> Encodable<S, E> for Package {
fn encode(&self, s: &mut S) -> Result<(), E> {
let manifest = self.get_manifest();
let summary = manifest.get_summary();
let package_id = summary.get_package_id();
SerializedPackage {
name: package_id.get_name().to_string(),
version: package_id.get_version().to_string(),
dependencies: summary.get_dependencies().iter().map(|d| {
SerializedDependency::from_dependency(d)
}).collect(),
authors: Vec::from_slice(manifest.get_authors()),
targets: Vec::from_slice(manifest.get_targets()),
manifest_path: self.manifest_path.display().to_string()
}.encode(s)
}
}
impl Package {
pub fn new(manifest: Manifest,
manifest_path: &Path,
source_id: &SourceId) -> Package {
Package {
manifest: manifest,
manifest_path: manifest_path.clone(),
source_id: source_id.clone(),
}
}
pub fn get_manifest(&self) -> &Manifest {
&self.manifest
}
pub fn get_summary(&self) -> &Summary {
self.manifest.get_summary()
}
pub fn get_package_id(&self) -> &PackageId {
self.manifest.get_package_id()
}<|fim▁hole|> }
pub fn get_version(&self) -> &Version {
self.get_package_id().get_version()
}
pub fn get_dependencies(&self) -> &[Dependency] {
self.get_manifest().get_dependencies()
}
pub fn get_targets(&self) -> &[Target] {
self.get_manifest().get_targets()
}
pub fn get_manifest_path(&self) -> &Path {
&self.manifest_path
}
pub fn get_root(&self) -> Path {
self.manifest_path.dir_path()
}
pub fn get_target_dir(&self) -> &Path {
self.manifest.get_target_dir()
}
pub fn get_absolute_target_dir(&self) -> Path {
self.get_root().join(self.get_target_dir())
}
pub fn get_source_ids(&self) -> Vec<SourceId> {
let mut ret = vec!(self.source_id.clone());
ret.push_all(self.manifest.get_source_ids());
ret
}
pub fn get_fingerprint(&self, config: &mut Config) -> CargoResult<String> {
let mut sources = self.get_source_ids();
// Sort the sources just to make sure we have a consistent fingerprint.
sources.sort_by(|a, b| {
cmp::lexical_ordering(a.kind.cmp(&b.kind),
a.location.to_string().cmp(&b.location.to_string()))
});
let sources = sources.iter().map(|source_id| {
source_id.load(config)
}).collect::<Vec<_>>();
SourceSet::new(sources).fingerprint(self)
}
}
impl Show for Package {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.get_summary().get_package_id())
}
}
impl PartialEq for Package {
fn eq(&self, other: &Package) -> bool {
self.get_package_id() == other.get_package_id()
}
}
#[deriving(PartialEq,Clone,Show)]
pub struct PackageSet {
packages: Vec<Package>,
}
impl PackageSet {
pub fn new(packages: &[Package]) -> PackageSet {
//assert!(packages.len() > 0,
// "PackageSet must be created with at least one package")
PackageSet { packages: Vec::from_slice(packages) }
}
pub fn len(&self) -> uint {
self.packages.len()
}
pub fn pop(&mut self) -> Package {
self.packages.pop().expect("PackageSet.pop: empty set")
}
/// Get a package by name out of the set
pub fn get(&self, name: &str) -> &Package {
self.packages.iter().find(|pkg| name == pkg.get_name())
.expect("PackageSet.get: empty set")
}
pub fn get_all(&self, names: &[&str]) -> Vec<&Package> {
names.iter().map(|name| self.get(*name) ).collect()
}
pub fn get_packages(&self) -> &[Package] {
self.packages.as_slice()
}
// For now, assume that the package set contains only one package with a
// given name
pub fn sort(&self) -> Option<PackageSet> {
let mut graph = graph::Graph::new();
for pkg in self.packages.iter() {
let deps: Vec<&str> = pkg.get_dependencies().iter()
.map(|dep| dep.get_name())
.collect();
graph.add(pkg.get_name(), deps.as_slice());
}
let pkgs = some!(graph.sort()).iter().map(|name| {
self.get(*name).clone()
}).collect();
Some(PackageSet {
packages: pkgs
})
}
pub fn iter(&self) -> slice::Items<Package> {
self.packages.iter()
}
}
impl Registry for PackageSet {
fn query(&mut self, name: &Dependency) -> CargoResult<Vec<Summary>> {
Ok(self.packages.iter()
.filter(|pkg| name.get_name() == pkg.get_name())
.map(|pkg| pkg.get_summary().clone())
.collect())
}
}<|fim▁end|>
|
pub fn get_name(&self) -> &str {
self.get_package_id().get_name()
|
<|file_name|>physEnt.js<|end_file_name|><|fim▁begin|>//basic physics entity
//This entity extends the joncom base entity and is responsible for
//collision categories
//property inheritance
//default settings/hooks
//It also defines the ObjectWorld and RenderWorld objects "soma" and "animus" (or whatever distinct names you can think of)
//The soma cannot call animus functions and vice versa. Both must be called from top level functions like update, init, draw, or top level physics callbacks like beginContact, preSolve, etc:.
//This is to maintain separation between ObjectWorld and RenderWorld
ig.module('game.entities.physEnt')
.requires('plugins.joncom.box2d.entity', 'game.const_defs', 'plugins.tween', 'plugins.tileUtil')
.defines(function() {
EntityPhysEnt = ig.Entity.extend({
//default settings, overwritten by _loadSettings
gravityFactor: 1,
categoryBits: ig.Filter.NOCOLLIDE,
maskBits: ig.Filter.ALL,
isTransient: false,
currentDim: 'normal',
currentFix: null,
init: function( x, y, settings ) {
//inject filter data into settings before creating box2d body
settings.categoryBits = this.categoryBits;
settings.maskBits = this.maskBits;
this.parent( x, y, settings );
//this._loadSettings(settings);
//presume non-rotating body
//will almost certainly be entity-specific later
if (!ig.global.wm) {
this.body.SetFixedRotation(this.isFixedRotation);
this.currentFix = this.body.GetFixtureList();
}
this.setupAnimation();
},
//checks that allow zero value... is there a shorter way to handle this?
//allows entities to get context-sensitive properties, though most settings will still be pre-defined
_loadSettings: function(settings) {
if (typeof(settings.categoryBits) !== 'null' && typeof(settings.categoryBits) !== 'undefined') {
console.log("Category is happening");
this.categoryBits = settings.categoryBits;
}
if (typeof(settings.maskBits) !== 'null' && typeof(settings.maskBits) !== 'undefined' ) {
console.log("Mask is happening");
this.maskBits = settings.maskBits;
}
if (typeof(settings.gravityFactor) !== 'null' && typeof(settings.gravityFactor) !== 'undefined') {
console.log("Gravity is happening");
this.gravityFactor = settings.gravityFactor;
}
if (typeof(settings.isFixedRotation) !== 'null' && typeof(settings.isFixedRotation) !== 'undefined') {
console.log("Rotation is happening");
this.isFixedRotation = settings.isFixedRotation;
}
if (settings.isTransient !== 'null' && settings.isTransient !== undefined) {
console.log("Transient is happening");
this.isTransient = settings.isTransient;
}
},
beginContact: function(other, contact) {
this.parent(other,contact);
},
setupAnimation: function() { },
//creates a sensor fixture for altering an entity's shape or size
makeDim: function(name, size, filterSettings) {
var shapeDef = new Box2D.Collision.Shapes.b2PolygonShape();
shapeDef.SetAsBox(size.x / 2 * Box2D.SCALE, size.y / 2 * Box2D.SCALE);
var fixtureDef = new Box2D.Dynamics.b2FixtureDef();
fixtureDef.shape = shapeDef;
fixtureDef.density = 0; //massless sensor
fixtureDef.friction = this.uniFriction;
fixtureDef.restitution = this.bounciness;
fixtureDef.userData = {name: name, categoryBits: null, maskBits: null, type: 'dim'};
if (filterSettings) {
fixtureDef.userData.categoryBits = filterSettings.categoryBits;
fixtureDef.userData.maskBits = filterSettings.maskBits;
}
else {
fixtureDef.userData.categoryBits = this.body.GetFixtureList().GetFilterData().categoryBits;
fixtureDef.userData.maskBits = this.body.GetFixtureList().GetFilterData().maskBits;
}
fixtureDef.filter.categoryBits = ig.Filter.NOCOLLIDE;
fixtureDef.filter.maskBits = ig.Filter.NOCOLLIDE;
fixtureDef.isSensor = true;
this.body.CreateFixture(fixtureDef);
},
//set a sensor fixture as the solid fixture that represents the entity. Automatically turns the current solid fixture into a sensor (standby).
setDim: function(name) {
var fix = this.body.GetFixtureList();
var curr = null;
var next = null;
do {
if (fix.GetUserData().name == name) {
next = fix;
}
if (fix.GetUserData().name == this.currentDim) {
curr = fix;
}
if (next && curr) {
break;
}
} while (fix = fix.GetNext());
if (next && curr) {
next.SetDensity(curr.GetDensity()); //should actually set to a density that sets the same mass
curr.SetSensor(true);
next.SetSensor(false);
curr.SetDensity(0);
this.currentDim = name;
this.currentFix = next;
var filt = curr.GetFilterData();
filt.categoryBits = ig.Filter.NOCOLLIDE;
filt.maskBits = ig.Filter.NOCOLLIDE;
curr.SetFilterData(filt);
filt = next.GetFilterData();
filt.categoryBits = next.GetUserData().categoryBits;
filt.maskBits = next.GetUserData().maskBits;
next.SetFilterData(filt);
}
else {
//PANIC
console.log("PANIC");
}
},
makeSense: function(name, senseObj) {
var shapeDef = new Box2D.Collision.Shapes.b2PolygonShape();
shapeDef.SetAsOrientedBox(senseObj.size.x*Box2D.SCALE/2, senseObj.size.y*Box2D.SCALE/2, new Box2D.Common.Math.b2Vec2(senseObj.pos.x*Box2D.SCALE, senseObj.pos.y*Box2D.SCALE), 0);
var fixtureDef = new Box2D.Dynamics.b2FixtureDef();
fixtureDef.shape = shapeDef;
fixtureDef.density = 0; //massless sensor
fixtureDef.friction = 0;
fixtureDef.restitution = 0;
fixtureDef.userData = {name: name, categoryBits: senseObj.categoryBits, maskBits: senseObj.maskBits, type: 'sense'};
fixtureDef.filter.categoryBits = senseObj.categoryBits;<|fim▁hole|> },
getFirstNonSensor: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
if (!fix.IsSensor()) {
return fix;
}
}
return null;
},
//dump all fixture info to console
_dumpFixtureData: function() {
console.log("***FIXTURE DUMP***");
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
console.log(fix);
}
console.log("***END FIX DUMP***");
},
//draw all non-sensor, massive fixtures associated with this entity
_debugDraw: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
if (!fix.IsSensor() && fix.GetDensity()) {
this._debugDrawFixture(fix, 0);
}
}
},
//draw all fixtures associated with this entity, regardless of status
_debugDrawAll: function() {
for (var fix = this.body.GetFixtureList(); fix; fix = fix.GetNext()) {
this._debugDrawFixture(fix, 0);
}
},
//draw the given fixture, using the second parameter to generate a random outline color
//guess we ignore colorRand for now...
//currently only works for 4 vertex box shapes
//WILL NOT ERROR CHECK. The function will only work if the fixture's shape is an axially aligned box
_debugDrawFixture: function(fix, colorRand) {
if (!fix.GetUserData().color) {
var r = Math.floor(Math.random() * 255);
var g = Math.floor(Math.random() * 255);
var b = Math.floor(Math.random() * 255);
fix.SetUserData({name: fix.GetUserData().name,
color: { r: r, g: g, b:b}
});
}
var color = fix.GetUserData().color;
ig.system.context.strokeStyle = 'rgba(' + color.r.toString() + ',' + color.g.toString() + ',' + color.b.toString() + "," + '1)';
//figure out where we need to draw this box...
var bodyPos = this.body.GetPosition(); //center and scaled
var fixShape = fix.GetShape().GetVertices();
var width, height = null;
//lazy method to find width and height
for (var i = 0; i < fixShape.length; i++) {
for (var j = 0; j < fixShape.length; j++) {
if (i == j) continue;
if (fixShape[i].x == fixShape[j].x) {
if (height == null) {
height = Math.abs(fixShape[i].y - fixShape[j].y)/ Box2D.SCALE;
}
}
if (fixShape[i].y == fixShape[j].y) {
if (width == null) {
width = Math.abs(fixShape[i].x - fixShape[j].x)/ Box2D.SCALE;
}
}
}
}
var worldPos = {
x: (bodyPos.x/Box2D.SCALE) - width/2,
y: (bodyPos.y/Box2D.SCALE) - height/2,
};
//console.log("Drawing rect @ ", worldPos);
//console.log("Body position @ ", this.pos);
ig.system.context.strokeRect(
ig.system.getDrawPos(worldPos.x - ig.game.screen.x),
ig.system.getDrawPos(worldPos.y - ig.game.screen.y),
ig.system.getDrawPos(width),
ig.system.getDrawPos(height)
);
},
draw: function() {
this.parent();
if (this._debugD) {
this._debugDraw();
}
},
//spawn an entity @ local body coordinates rather than world coordinates
//x and y are already scaled (in pixels). Technically not local coords then
localSpawnEntity: function(entityType, x, y, settings) {
var worldX = this.body.GetPosition().x + x;
var worldY = this.body.GetPosition().y + y;
ig.game.spawnEntity(entityType, worldX, worldY, settings);
},
//passthrough
//some serious issues with getting rid of bodies...
kill: function() {
this.parent();
if (this.body && this._killed) {
ig.game.entityKillList.push(this.body);
}
},
update: function() {
this.parent();
},
//left = 1
//right = 2
//just checks if the current setup would result in cover
//unit is responsible for making sure all other conditions are met
checkCover: function() {
var result = 0;
if (this._checkCoverRight()) {
result += 2;
}
if (this._checkCoverLeft()) {
result +=1;
}
return result;
},
_checkCoverRight: function() {
var leading = {x: this.pos.x + this.size.x, y: this.pos.y + this.size.y};
var checkCoord = tileUtil.pxToTile(leading.x, leading.y);
checkCoord.tX += 1;
var pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1 ) { //only regular solid blocks for now
return false;
}
if (pixelCoord.pX - (this.pos.x + this.size.x) > 8) {
return false;
}
checkCoord.tY -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) { //only totally blank spaces for now
return false;
}
checkCoord.tX -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) {
return false;
}
//underneath
checkCoord.tY += 2;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1) {
return false;
}
return true;
},
//almost carbon copy!
_checkCoverLeft: function() {
var leading = {x: this.pos.x, y: this.pos.y + this.size.y};
var checkCoord = tileUtil.pxToTile(leading.x, leading.y);
checkCoord.tX -= 1;
var pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1 ) { //only regular solid blocks for now
return false;
}
if (this.pos.x - pixelCoord.pX > 24) {
return false;
}
checkCoord.tY -= 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) { //only totally blank spaces for now
return false;
}
checkCoord.tX += 1;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 0) {
return false;
}
//underneath
checkCoord.tY += 2;
pixelCoord = tileUtil.tileToPx(checkCoord.tX, checkCoord.tY);
if (ig.game.collisionMap.getTile(pixelCoord.pX, pixelCoord.pY) != 1) {
return false;
}
return true;
},
});
});<|fim▁end|>
|
fixtureDef.filter.maskBits = senseObj.maskBits;
fixtureDef.isSensor = true;
senseObj.fixture = this.body.CreateFixture(fixtureDef);
console.log(senseObj.fixture.GetFilterData());
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.