prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>physics_context.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>impl PhysicsContext {
pub fn new() -> PhysicsContext { PhysicsContext }
}
unsafe impl Send for PhysicsContext {}
unsafe impl Sync for PhysicsContext {}<|fim▁end|> | pub struct PhysicsContext;
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import base64
from datetime import datetime, timedelta
import functools
import json
import os
import time
import yaml
import jinja2
import jmespath
from dateutil import parser
from dateutil.tz import gettz, tzutc
try:
from botocore.exceptions import ClientError
except ImportError: # pragma: no cover
pass # Azure provider
class Providers:
AWS = 0
Azure = 1
def get_jinja_env(template_folders):
env = jinja2.Environment(trim_blocks=True, autoescape=False) # nosec nosemgrep
env.filters['yaml_safe'] = functools.partial(yaml.safe_dump, default_flow_style=False)
env.filters['date_time_format'] = date_time_format<|fim▁hole|> env.filters['from_json'] = json.loads
env.filters['get_date_age'] = get_date_age
env.globals['format_resource'] = resource_format
env.globals['format_struct'] = format_struct
env.globals['resource_tag'] = get_resource_tag_value
env.globals['get_resource_tag_value'] = get_resource_tag_value
env.globals['search'] = jmespath.search
env.loader = jinja2.FileSystemLoader(template_folders)
return env
def get_rendered_jinja(
target, sqs_message, resources, logger,
specified_template, default_template, template_folders):
env = get_jinja_env(template_folders)
mail_template = sqs_message['action'].get(specified_template, default_template)
if not os.path.isabs(mail_template):
mail_template = '%s.j2' % mail_template
try:
template = env.get_template(mail_template)
except Exception as error_msg:
logger.error("Invalid template reference %s\n%s" % (mail_template, error_msg))
return
# recast seconds since epoch as utc iso datestring, template
# authors can use date_time_format helper func to convert local
# tz. if no execution start time was passed use current time.
execution_start = datetime.utcfromtimestamp(
sqs_message.get(
'execution_start',
time.mktime(
datetime.utcnow().timetuple())
)).isoformat()
rendered_jinja = template.render(
recipient=target,
resources=resources,
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
execution_start=execution_start,
region=sqs_message.get('region', ''))
return rendered_jinja
# eg, target_tag_keys could be resource-owners ['Owners', 'SupportTeam']
# and this function would go through the resource and look for any tag keys
# that match Owners or SupportTeam, and return those values as targets
def get_resource_tag_targets(resource, target_tag_keys):
if 'Tags' not in resource:
return []
if isinstance(resource['Tags'], dict):
tags = resource['Tags']
else:
tags = {tag['Key']: tag['Value'] for tag in resource['Tags']}
targets = []
for target_tag_key in target_tag_keys:
if target_tag_key in tags:
targets.append(tags[target_tag_key])
return targets
def get_message_subject(sqs_message):
default_subject = 'Custodian notification - %s' % (sqs_message['policy']['name'])
subject = sqs_message['action'].get('subject', default_subject)
jinja_template = jinja2.Template(subject)
subject = jinja_template.render(
account=sqs_message.get('account', ''),
account_id=sqs_message.get('account_id', ''),
partition=sqs_message.get('partition', ''),
event=sqs_message.get('event', None),
action=sqs_message['action'],
policy=sqs_message['policy'],
region=sqs_message.get('region', '')
)
return subject
def setup_defaults(config):
config.setdefault('region', 'us-east-1')
config.setdefault('ses_region', config.get('region'))
config.setdefault('memory', 1024)
config.setdefault('runtime', 'python3.7')
config.setdefault('timeout', 300)
config.setdefault('subnets', None)
config.setdefault('security_groups', None)
config.setdefault('contact_tags', [])
config.setdefault('ldap_uri', None)
config.setdefault('ldap_bind_dn', None)
config.setdefault('ldap_bind_user', None)
config.setdefault('ldap_bind_password', None)
config.setdefault('endpoint_url', None)
config.setdefault('datadog_api_key', None)
config.setdefault('slack_token', None)
config.setdefault('slack_webhook', None)
def date_time_format(utc_str, tz_str='US/Eastern', format='%Y %b %d %H:%M %Z'):
return parser.parse(utc_str).astimezone(gettz(tz_str)).strftime(format)
def get_date_time_delta(delta):
return str(datetime.now().replace(tzinfo=gettz('UTC')) + timedelta(delta))
def get_date_age(date):
return (datetime.now(tz=tzutc()) - parser.parse(date)).days
def format_struct(evt):
return json.dumps(evt, indent=2, ensure_ascii=False)
def get_resource_tag_value(resource, k):
for t in resource.get('Tags', []):
if t['Key'] == k:
return t['Value']
return ''
def strip_prefix(value, prefix):
if value.startswith(prefix):
return value[len(prefix):]
return value
def resource_format(resource, resource_type):
if resource_type.startswith('aws.'):
resource_type = strip_prefix(resource_type, 'aws.')
if resource_type == 'ec2':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s %s %s %s" % (
resource['InstanceId'],
resource.get('VpcId', 'NO VPC!'),
resource['InstanceType'],
resource.get('LaunchTime'),
tag_map.get('Name', ''),
resource.get('PrivateIpAddress'))
elif resource_type == 'ami':
return "%s %s %s" % (
resource.get('Name'), resource['ImageId'], resource['CreationDate'])
elif resource_type == 'sagemaker-notebook':
return "%s" % (resource['NotebookInstanceName'])
elif resource_type == 's3':
return "%s" % (resource['Name'])
elif resource_type == 'ebs':
return "%s %s %s %s" % (
resource['VolumeId'],
resource['Size'],
resource['State'],
resource['CreateTime'])
elif resource_type == 'rds':
return "%s %s %s %s" % (
resource['DBInstanceIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['DBInstanceClass'],
resource['AllocatedStorage'])
elif resource_type == 'rds-cluster':
return "%s %s %s" % (
resource['DBClusterIdentifier'],
"%s-%s" % (
resource['Engine'], resource['EngineVersion']),
resource['AllocatedStorage'])
elif resource_type == 'asg':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
return "%s %s %s" % (
resource['AutoScalingGroupName'],
tag_map.get('Name', ''),
"instances: %d" % (len(resource.get('Instances', []))))
elif resource_type == 'elb':
tag_map = {t['Key']: t['Value'] for t in resource.get('Tags', ())}
if 'ProhibitedPolicies' in resource:
return "%s %s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']),
"prohibited_policies: %s" % ','.join(
resource['ProhibitedPolicies']))
return "%s %s %s" % (
resource['LoadBalancerName'],
"instances: %d" % len(resource['Instances']),
"zones: %d" % len(resource['AvailabilityZones']))
elif resource_type == 'redshift':
return "%s %s %s" % (
resource['ClusterIdentifier'],
'nodes:%d' % len(resource['ClusterNodes']),
'encrypted:%s' % resource['Encrypted'])
elif resource_type == 'emr':
return "%s status:%s" % (
resource['Id'],
resource['Status']['State'])
elif resource_type == 'cfn':
return "%s" % (
resource['StackName'])
elif resource_type == 'launch-config':
return "%s" % (
resource['LaunchConfigurationName'])
elif resource_type == 'security-group':
name = resource.get('GroupName', '')
for t in resource.get('Tags', ()):
if t['Key'] == 'Name':
name = t['Value']
return "%s %s %s inrules: %d outrules: %d" % (
name,
resource['GroupId'],
resource.get('VpcId', 'na'),
len(resource.get('IpPermissions', ())),
len(resource.get('IpPermissionsEgress', ())))
elif resource_type == 'log-group':
if 'lastWrite' in resource:
return "name: %s last_write: %s" % (
resource['logGroupName'],
resource['lastWrite'])
return "name: %s" % (resource['logGroupName'])
elif resource_type == 'cache-cluster':
return "name: %s created: %s status: %s" % (
resource['CacheClusterId'],
resource['CacheClusterCreateTime'],
resource['CacheClusterStatus'])
elif resource_type == 'cache-snapshot':
cid = resource.get('CacheClusterId')
if cid is None:
cid = ', '.join([
ns['CacheClusterId'] for ns in resource['NodeSnapshots']])
return "name: %s cluster: %s source: %s" % (
resource['SnapshotName'],
cid,
resource['SnapshotSource'])
elif resource_type == 'redshift-snapshot':
return "name: %s db: %s" % (
resource['SnapshotIdentifier'],
resource['DBName'])
elif resource_type == 'ebs-snapshot':
return "name: %s date: %s" % (
resource['SnapshotId'],
resource['StartTime'])
elif resource_type == 'subnet':
return "%s %s %s %s %s %s" % (
resource['SubnetId'],
resource['VpcId'],
resource['AvailabilityZone'],
resource['State'],
resource['CidrBlock'],
resource['AvailableIpAddressCount'])
elif resource_type == 'account':
return " %s %s" % (
resource['account_id'],
resource['account_name'])
elif resource_type == 'cloudtrail':
return "%s" % (
resource['Name'])
elif resource_type == 'vpc':
return "%s " % (
resource['VpcId'])
elif resource_type == 'iam-group':
return " %s %s %s" % (
resource['GroupName'],
resource['Arn'],
resource['CreateDate'])
elif resource_type == 'rds-snapshot':
return " %s %s %s" % (
resource['DBSnapshotIdentifier'],
resource['DBInstanceIdentifier'],
resource['SnapshotCreateTime'])
elif resource_type == 'iam-user':
return " %s " % (
resource['UserName'])
elif resource_type == 'iam-role':
return " %s %s " % (
resource['RoleName'],
resource['CreateDate'])
elif resource_type == 'iam-policy':
return " %s " % (
resource['PolicyName'])
elif resource_type == 'iam-profile':
return " %s " % (
resource['InstanceProfileId'])
elif resource_type == 'dynamodb-table':
return "name: %s created: %s status: %s" % (
resource['TableName'],
resource['CreationDateTime'],
resource['TableStatus'])
elif resource_type == "sqs":
return "QueueURL: %s QueueArn: %s " % (
resource['QueueUrl'],
resource['QueueArn'])
elif resource_type == "efs":
return "name: %s id: %s state: %s" % (
resource['Name'],
resource['FileSystemId'],
resource['LifeCycleState']
)
elif resource_type == "network-addr":
return "ip: %s id: %s scope: %s" % (
resource['PublicIp'],
resource['AllocationId'],
resource['Domain']
)
elif resource_type == "route-table":
return "id: %s vpc: %s" % (
resource['RouteTableId'],
resource['VpcId']
)
elif resource_type == "app-elb":
return "arn: %s zones: %s scheme: %s" % (
resource['LoadBalancerArn'],
len(resource['AvailabilityZones']),
resource['Scheme'])
elif resource_type == "nat-gateway":
return "id: %s state: %s vpc: %s" % (
resource['NatGatewayId'],
resource['State'],
resource['VpcId'])
elif resource_type == "internet-gateway":
return "id: %s attachments: %s" % (
resource['InternetGatewayId'],
len(resource['Attachments']))
elif resource_type == 'lambda':
return "Name: %s RunTime: %s \n" % (
resource['FunctionName'],
resource['Runtime'])
else:
return "%s" % format_struct(resource)
def get_provider(mailer_config):
if mailer_config.get('queue_url', '').startswith('asq://'):
return Providers.Azure
return Providers.AWS
def kms_decrypt(config, logger, session, encrypted_field):
if config.get(encrypted_field):
try:
kms = session.client('kms')
return kms.decrypt(
CiphertextBlob=base64.b64decode(config[encrypted_field]))[
'Plaintext'].decode('utf8')
except (TypeError, base64.binascii.Error) as e:
logger.warning(
"Error: %s Unable to base64 decode %s, will assume plaintext." %
(e, encrypted_field))
except ClientError as e:
if e.response['Error']['Code'] != 'InvalidCiphertextException':
raise
logger.warning(
"Error: %s Unable to decrypt %s with kms, will assume plaintext." %
(e, encrypted_field))
return config[encrypted_field]
else:
logger.debug("No encrypted value to decrypt.")
return None
def decrypt(config, logger, session, encrypted_field):
if config.get(encrypted_field):
provider = get_provider(config)
if provider == Providers.Azure:
from c7n_mailer.azure_mailer.utils import azure_decrypt
return azure_decrypt(config, logger, session, encrypted_field)
elif provider == Providers.AWS:
return kms_decrypt(config, logger, session, encrypted_field)
else:
raise Exception("Unknown provider")
else:
logger.debug("No encrypted value to decrypt.")
return None
# https://docs.aws.amazon.com/awscloudtrail/latest/userguide/cloudtrail-event-reference-user-identity.html
def get_aws_username_from_event(logger, event):
if event is None:
return None
identity = event.get('detail', {}).get('userIdentity', {})
if not identity:
logger.warning("Could not get recipient from event \n %s" % (
format_struct(event)))
return None
if identity['type'] == 'AssumedRole':
logger.debug(
'In some cases there is no ldap uid is associated with AssumedRole: %s',
identity['arn'])
logger.debug(
'We will try to assume that identity is in the AssumedRoleSessionName')
user = identity['arn'].rsplit('/', 1)[-1]
if user is None or user.startswith('i-') or user.startswith('awslambda'):
return None
if ':' in user:
user = user.split(':', 1)[-1]
return user
if identity['type'] == 'IAMUser' or identity['type'] == 'WebIdentityUser':
return identity['userName']
if identity['type'] == 'Root':
return None
# this conditional is left here as a last resort, it should
# be better documented with an example UserIdentity json
if ':' in identity['principalId']:
user_id = identity['principalId'].split(':', 1)[-1]
else:
user_id = identity['principalId']
return user_id<|fim▁end|> | env.filters['get_date_time_delta'] = get_date_time_delta |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.contrib.auth import logout
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from django.views.generic.base import View, TemplateView
from socialregistration.clients.oauth import OAuthError
from socialregistration.mixins import SocialRegistration
GENERATE_USERNAME = getattr(settings, 'SOCIALREGISTRATION_GENERATE_USERNAME', False)
USERNAME_FUNCTION = getattr(settings, 'SOCIALREGISTRATION_GENERATE_USERNAME_FUNCTION',
'socialregistration.utils.generate_username')
FORM_CLASS = getattr(settings, 'SOCIALREGISTRATION_SETUP_FORM',
'socialregistration.forms.UserForm')
INITAL_DATA_FUNCTION = getattr(settings, 'SOCIALREGISTRATION_INITIAL_DATA_FUNCTION',
None)
class Setup(SocialRegistration, View):
"""
Setup view to create new Django users from third party APIs.
"""
template_name = 'socialregistration/setup.html'
def get_form(self):
"""
Return the form to be used. The return form is controlled
with ``SOCIALREGISTRATION_SETUP_FORM``.
"""
return self.import_attribute(FORM_CLASS)
def get_username_function(self):
"""
Return a function that can generate a username. The function
is controlled with ``SOCIALREGISTRATION_GENERATE_USERNAME_FUNCTION``.
"""
return self.import_attribute(USERNAME_FUNCTION)
def get_initial_data(self, request, user, profile, client):
"""
Return initial data for the setup form. The function can be
controlled with ``SOCIALREGISTRATION_INITIAL_DATA_FUNCTION``.
:param request: The current request object
:param user: The unsaved user object
:param profile: The unsaved profile object
:param client: The API client
"""
if INITAL_DATA_FUNCTION:
func = self.import_attribute(INITAL_DATA_FUNCTION)
return func(request, user, profile, client)
return {}
def generate_username_and_redirect(self, request, user, profile, client):
"""
Generate a username and then redirect the user to the correct place.
This method is called when ``SOCIALREGISTRATION_GENERATE_USERNAME``
is set.
:param request: The current request object
:param user: The unsaved user object
:param profile: The unsaved profile object
:param client: The API client
"""
func = self.get_username_function()
user.username = func(user, profile, client)
user.set_unusable_password()
user.save()
profile.user = user
profile.save()
user = profile.authenticate()
self.send_connect_signal(request, user, profile, client)
self.login(request, user)
self.send_login_signal(request, user, profile, client)
self.delete_session_data(request)
return HttpResponseRedirect(self.get_next(request))
def get(self, request):
"""
When signing a new user up - either display a setup form, or
generate the username automatically.
"""
try:
user, profile, client = self.get_session_data(request)
except KeyError:
return self.render_to_response(dict(
error=_("Social profile is missing from your session.")))
if GENERATE_USERNAME:
return self.generate_username_and_redirect(request, user, profile, client)
form = self.get_form()(initial=self.get_initial_data(request, user, profile, client))
return self.render_to_response(dict(form=form))
def post(self, request):
"""
Save the user and profile, login and send the right signals.
"""
try:
user, profile, client = self.get_session_data(request)
except KeyError:
return self.render_to_response(dict(
error=_("A social profile is missing from your session.")))
form = self.get_form()(request.POST, request.FILES,
initial=self.get_initial_data(request, user, profile, client))
if not form.is_valid():
return self.render_to_response(dict(form=form))
user, profile = form.save(request, user, profile, client)
user = profile.authenticate()
self.send_connect_signal(request, user, profile, client)
self.login(request, user)
self.send_login_signal(request, user, profile, client)
self.delete_session_data(request)
return HttpResponseRedirect(self.get_next(request))
class Logout(View):
"""
Log the user out of Django. This **does not** log the user out
of third party sites.
"""
def get(self, request):
logout(request)
url = getattr(settings, 'LOGOUT_REDIRECT_URL', '/')
return HttpResponseRedirect(url)
class OAuthRedirect(SocialRegistration, View):
"""
Base class for both OAuth and OAuth2 redirects.
:param client: The API client class that should be used.
:param template_name: The error template.
"""
# The OAuth{1,2} client to be used
client = None
# The template to render in case of errors
template_name = None
def post(self, request):
"""
Create a client, store it in the user's session and redirect the user<|fim▁hole|> to the API provider to authorize our app and permissions.
"""
request.session['next'] = self.get_next(request)
client = self.get_client()()
request.session[self.get_client().get_session_key()] = client
try:
return HttpResponseRedirect(client.get_redirect_url())
except OAuthError, error:
return self.render_to_response({'error': error})
class OAuthCallback(SocialRegistration, View):
"""
Base class for OAuth and OAuth2 callback views.
:param client: The API client class that should be used.
:param template_name: The error template.
"""
# The OAuth{1,2} client to be used
client = None
# The template to render in case of errors
template_name = None
def get_redirect(self):
"""
Return a URL that will set up the correct models if the
OAuth flow succeeded. Subclasses **must** override this
method.
"""
raise NotImplementedError
def get(self, request):
"""
Called after the user is redirected back to our application.
Tries to:
- Complete the OAuth / OAuth2 flow
- Redirect the user to another view that deals with login, connecting
or user creation.
"""
try:
client = request.session[self.get_client().get_session_key()]
client.complete(dict(request.GET.items()))
request.session[self.get_client().get_session_key()] = client
return HttpResponseRedirect(self.get_redirect())
except KeyError:
return self.render_to_response({'error': "Session expired."})
except OAuthError, error:
return self.render_to_response({'error': error})
class SetupCallback(SocialRegistration, TemplateView):
"""
Base class for OAuth and OAuth2 login / connects / registration.
"""
template_name = 'socialregistration/setup.error.html'
def get(self, request):
"""
Called after authorization was granted and the OAuth flow
successfully completed.
Tries to:
- Connect the remote account if the user is logged in already
- Log the user in if a local profile of the remote account
exists already
- Create a user and profile object if none of the above succeed
and redirect the user further to either capture some data via
form or generate a username automatically
"""
try:
client = request.session[self.get_client().get_session_key()]
except KeyError:
return self.render_to_response({'error': "Session expired."})
# Get the lookup dictionary to find the user's profile
lookup_kwargs = self.get_lookup_kwargs(request, client)
# Logged in user (re-)connecting an account
if request.user.is_authenticated():
try:
profile = self.get_profile(**lookup_kwargs)
# Make sure that there is only *one* account per profile.
if not profile.user == request.user:
self.delete_session_data(request)
return self.render_to_response({
'error': _('This profile is already connected to another user account.')
})
except self.get_model().DoesNotExist:
profile, created = self.get_or_create_profile(request.user,
save=True, **lookup_kwargs)
self.send_connect_signal(request, request.user, profile, client)
return self.redirect(request)
# Logged out user - let's see if we've got the identity saved already.
# If so - just log the user in. If not, create profile and redirect
# to the setup view
user = self.authenticate(**lookup_kwargs)
# No user existing - create a new one and redirect to the final setup view
if user is None:
user = self.create_user()
profile = self.create_profile(user, **lookup_kwargs)
self.store_user(request, user)
self.store_profile(request, profile)
self.store_client(request, client)
return HttpResponseRedirect(reverse('socialregistration:setup'))
# Inactive user - displaying / redirect to the appropriate place.
if not user.is_active:
return self.inactive_response()
# Active user with existing profile: login, send signal and redirect
self.login(request, user)
profile = self.get_profile(user=user, **lookup_kwargs)
self.send_login_signal(request, user, profile, client)
return self.redirect(request)<|fim▁end|> | |
<|file_name|>filesystem.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../typings/node/node.d.ts"/>
import Data = require('./data');
import Helpers = require('./utils/helpers');
import Map = require('./utils/map');
import Str = require('./utils/string');
import fs = require('fs');
export interface FileSystem {
readdirSync: (path: string) => string[];
readdir: (path: string,
callback: (error: string, files: string[]) => void) => string[];
readFileSync: Function;
statSync: (path: string) => Data.Stats;
unlinkSync: (path: string) => void;
writeFileSync: (path: string, data: string) => void;
}
export function readFilenamesSync(path: string): string[]
{
return fs.readdirSync(path);
}
export function loadDirectoryNamesSync(path: string): string[]
{
return fs.readdirSync(path).filter((filename) =>
isDirectorySync(path, filename));
}
export function isDirectorySync(path: string, filename: string): boolean
{
return fs.statSync(path + '/' + filename).isDirectory();
}
export function loadJSONSync<T>(path: string): T
{
const encoding = 'utf8';
const data = fs.readFileSync(path, encoding);
return JSON.parse(data);
}
export function loadJSONDirSync<T>(path: string): T[]
{
const filenames = readFilenamesSync(path);
const filepaths = filenames.map(name => Data.join(path, name));
return filepaths.map(path => loadJSONSync<T>(path));
}
export function loadJSONDirAsMap<T>(path: string): Map.Map<T>
{
const filenames = readFilenamesSync(path);
const stringNames = filenames.map(Str.removeFileExtension);
const filepaths = filenames.map(name => Data.join(path, name));
return Helpers.mapFromArray(
filenames,
Str.removeFileExtension,
name => loadJSONSync<T>(Data.join(path, name)));
}
export function saveJSONSync(path: string, data: Object)
{
const dataString = JSON.stringify(data, null, 8);
fs.writeFileSync(path, dataString);<|fim▁hole|>
export function deleteFile(path: string)
{
fs.unlinkSync(path);
}<|fim▁end|> | } |
<|file_name|>atttb.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#!coding=utf-8
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Table,Column,Integer,String,Numeric,MetaData,DateTime,Date
from ._base import BaseInit,Base
'''
員工刷卡原始數據表
'''
class ATTTB(BaseInit,Base):
__tablename__='ATTTB'
TB001=Column(String(20),nullable=False,primary_key=True,doc='員工編號')
TB002=Column(DateTime,nullable=False,primary_key=True,doc='打卡時間')
TB003=Column(Date,nullable=False,doc='出勤日期')
TB004=Column(String(30),default='',doc='員工姓名')
TB005=Column(String(10),default='',doc='員工部門編號')
TB006=Column(String(30),default='',doc='員工部門編號')<|fim▁hole|> TB011=Column(String(1),default='',doc='打卡上下班識別')
TB012=Column(String(1),default='',doc='出勤狀態')<|fim▁end|> | TB007=Column(String(10),default='',doc='員工職務編號')
TB008=Column(String(30),default='',doc='員工職務名稱')
TB009=Column(String(10),default='',doc='卡鍾代號')
TB010=Column(String(30),default='',doc='卡鍾名稱') |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
#![allow(non_camel_case_types)]
use std::cell::RefCell;
use std::path::Path;
use ::revlogindex::RevlogIndex;
use cpython::*;
use cpython_ext::PyNone;
use cpython_ext::ResultPyErrExt;
use pydag::Spans;
// XXX: The revlogindex is a temporary solution before migrating to
// segmented changelog. It is here to experiment breaking changes with
// revlog, incluing:
//
// - Redefine "head()" to only return remotenames and tracked draft heads.
// - Get rid of "filtered revs" and "repo view" layer entirely.
// - Switch phases to be defined by heads (remotenames), instead of roots.
pub fn init_module(py: Python, package: &str) -> PyResult<PyModule> {
let name = [package, "revlogindex"].join(".");
let m = PyModule::new(py, &name)?;
m.add_class::<revlogindex>(py)?;
Ok(m)
}
py_class!(class revlogindex |py| {
data index: RefCell<RevlogIndex>;
def __new__(_cls, changelogipath: String, nodemappath: String) -> PyResult<Self> {
let changelogipath = Path::new(&changelogipath);
let nodemappath = Path::new(&nodemappath);
let index = RevlogIndex::new(&changelogipath, &nodemappath).map_pyerr(py)?;
Self::create_instance(py, RefCell::new(index))
}
/// Obtain the index data.
def indexdata(&self) -> PyResult<pybytes::Bytes> {
let bytes = self.index(py).borrow().changelogi_data.clone();
pybytes::Bytes::from_bytes(py, bytes)
}
/// Calculate `heads(ancestors(revs))`.<|fim▁hole|>
/// Given public and draft head revision numbers, calculate the "phase sets".
/// Return (publicset, draftset).
def phasesets(&self, publicheads: Vec<u32>, draftheads: Vec<u32>) -> PyResult<(Spans, Spans)> {
let revlog = self.index(py).borrow();
let (public_set, draft_set) = revlog.phasesets(publicheads, draftheads).map_pyerr(py)?;
Ok((Spans(public_set), Spans(draft_set)))
}
/// Get parent revisions.
def parentrevs(&self, rev: u32) -> PyResult<Vec<u32>> {
let revlog = self.index(py).borrow();
Ok(revlog.parent_revs(rev).map_pyerr(py)?.as_revs().to_vec())
}
/// Insert a new revision that hasn't been written to disk.
/// Used by revlog._addrevision.
def insert(&self, node: PyBytes, parents: Vec<u32>, data: Option<PyBytes> = None) -> PyResult<PyNone> {
let node = node.data(py).to_vec().into();
let mut revlog = self.index(py).borrow_mut();
let data = data.map(|p| p.data(py).to_vec()).unwrap_or_default();
revlog.insert(node, parents, data.into());
Ok(PyNone)
}
def __len__(&self) -> PyResult<usize> {
let revlog = self.index(py).borrow();
Ok(revlog.len())
}
});<|fim▁end|> | def headsancestors(&self, revs: Vec<u32>) -> PyResult<Vec<u32>> {
let revlog = self.index(py).borrow();
Ok(revlog.headsancestors(revs).map_pyerr(py)?)
} |
<|file_name|>repo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import time
import gzip
import tempfile
import ConfigParser
from cStringIO import StringIO
from datetime import datetime
from ellen.repo import Jagare
from ellen.utils import JagareError
from vilya.libs.permdir import get_tmpdir
from vilya.models.user import User
from vilya.models.ngit.commit import Commit
from vilya.models.ngit.diff import Diff
from vilya.models.ngit.blob import Blob
from vilya.models.ngit.submodule import Submodule
from vilya.models.ngit.tree import Tree
from vilya.models.ngit.blame import Blame
LATEST_UPDATE_REF_THRESHOLD = 60 * 60 * 24
MAX_DIFF_PATCHES = 2000
REFS_HEADS_PREFIX_LENGTH = len('refs/heads/')
class RepoMergeError(Exception):
pass
class RepoPushError(Exception):
pass
class Repo(object):
provided_features = []
def __init__(self, path):
self.type = "repo"
self.path = path
self.repo = Jagare(self.path)
def provide(self, name):
'''检查是否提供某功能,即是否提供某接口'''
return name in self.provided_features
@property
def empty(self):
return self.is_empty
@property
def is_empty(self):
return self.repo.empty
@property
def default_branch(self):
branch = ''
head = self.repo.head
if head:
branch = head.name[REFS_HEADS_PREFIX_LENGTH:]
return branch
def update_default_branch(self, name):
branches = self.repo.branches
if name not in branches:
return None
self.repo.update_head(name)
def clone(self, path, bare=None, branch=None,
mirror=None, env=None, shared=None):
self.repo.clone(path,
bare=bare, branch=branch,
mirror=mirror, env=env)
# shared=shared) why?
def archive(self, name, ref='master', ext='tar.gz'):
content = self.repo.archive(name, ref=ref)
if ext == 'tar':
return content
outbuffer = StringIO()
zipfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=outbuffer)
zipfile.write(content)
zipfile.close()
out = outbuffer.getvalue()
return out
def get_submodule(self, ref, path):
path = path.strip()
gitmodules = self.repo.show("%s:%s" % (ref, '.gitmodules'))
if not gitmodules:
return None
submodules_lines = gitmodules["data"].split('\n')
modules_str = '\n'.join([line.strip() for line in submodules_lines])
config = ConfigParser.RawConfigParser()
config.readfp(StringIO(modules_str))<|fim▁hole|> for section in config.sections():
if config.has_option(section, 'path') and config.get(
section, 'path') == path:
url = config.get(section, 'url')
return Submodule(url, path)
return None
def get_file(self, ref, path):
blob = self.repo.show("%s:%s" % (ref, path))
if not blob:
return None
if blob['type'] != 'blob':
return None
# TODO: validate blob
return Blob(self, blob)
def get_file_by_lines(self, ref, path):
blob = self.get_file(ref, path)
# TODO: blob.size < xxx
if not blob or blob.binary:
return None
if not blob.data:
return []
src = blob.data
return src.splitlines()
def get_file_n_lines(self, ref, path):
lines = self.get_file_by_lines(ref, path)
if lines:
return len(lines)
return 0
def get_commits(self, to_ref, from_ref=None, path=None, skip=0,
max_count=0, author=None, query=None, first_parent=None,
since=0, no_merges=None):
commits = self.repo.rev_list(to_ref=to_ref, from_ref=from_ref,
path=path, skip=skip,
max_count=max_count, author=author,
query=query, first_parent=first_parent,
since=since, no_merges=no_merges)
return [Commit(self, commit) for commit in commits]
def get_raw_diff(self, ref, from_ref=None, paths=None, **kw):
''' get Jagare formated diff dict '''
try:
diff = self.repo.diff(ref, from_ref=from_ref, paths=paths, **kw)
except KeyError:
return None
return diff
def get_diff(self, ref=None, from_ref=None,
linecomments=[], raw_diff=None, paths=None, **kw):
''' get ngit wrapped diff object '''
_raw_diff = None
if raw_diff:
_raw_diff = raw_diff
elif ref:
_raw_diff = self.get_raw_diff(ref, from_ref=from_ref,
paths=paths, **kw)
if _raw_diff:
return Diff(self, _raw_diff, linecomments)
else:
return None
def get_diff_length(self, ref, from_ref=None, **kw):
_raw_diff = self.get_raw_diff(ref, from_ref=from_ref, **kw)
return len(_raw_diff['patches']) if _raw_diff else 0
def get_last_commit(self, ref, path=None, no_merges=False):
if not path:
return self.get_commit(ref)
commit = self.repo.rev_list(ref, path=path, max_count=1,
no_merges=no_merges)
if not commit:
return None
commit = commit[0]
commit = Commit(self, commit)
return commit
def get_previours_commit(self, ref, path):
"""previours commit that touch the specified path"""
commits = self.repo.rev_list(ref, path=path, max_count=2,
no_merges=True)
for commit in commits:
if commit['sha'] != self.repo.sha(ref):
return Commit(self, commit)
return None
def get_commit(self, ref):
sha = self.repo.resolve_commit(ref)
if not sha:
return None
commit = self.repo.show(sha)
if not commit:
return None
# TODO: validate commit
return Commit(self, commit)
def delete_branch(self, name):
self.repo.delete_branch(name)
def get_path_by_ref(self, ref):
''' get blob or tree '''
path = self.repo.show(ref)
if not path:
return None
if path['type'] == 'tree':
path = Tree(self, path['entries'])
elif path['type'] == 'blob':
path = Blob(self, path)
else:
path = None
return path
def get_path(self, ref, path):
_item = self.repo.show("%s:%s" % (ref, path))
if not _item:
return None
if _item['type'] == 'tree':
item = Tree(self, _item['entries'])
elif _item['type'] == 'blob':
item = Blob(self, _item)
else:
item = None
return item
def get_last_update_timestamp(self):
commit = self.get_last_commit('HEAD')
if not commit:
return 0
return int(commit.author_timestamp)
class ProjectRepo(Repo):
provided_features = ['project', 'fulltext', 'moreline',
'side_by_side', 'patch_actions']
def __init__(self, project, pull=None):
self.type = "project"
self.pull = pull
self.project = project
self.project_name = project.name
self.name = project.name
self.path = project.repo_path
self.repo = Jagare(self.path)
# TODO: url
@property
def api_url(self):
return ''
@property
def context_url(self):
return 'moreline'
@property
def fulltext_url(self):
return 'fulltext'
@property
def branches(self):
return self.repo.branches
@property
def tags(self):
return self.repo.tags
def get_tree(self, ref, path=None, recursive=False, with_commit=False,
recursive_with_tree_node=False):
tree = self.repo.ls_tree(
ref, path=path, recursive=recursive,
with_commit=with_commit)
# recursive_with_tree_node=recursive_with_tree_node)
if not tree:
return None
return Tree(self, tree)
def get_file_by_ref(self, ref):
blob = self.repo.show(ref)
if not blob:
return None
return blob['data']
def get_contexts(self, ref, path, line_start, line_end):
def fix_line_index(index, max_i, min_i=0):
i = index - 1
i = max(i, min_i)
i = min(i, max_i)
return i
lines = self.get_file_by_lines(ref, path)
if not lines:
return None
n = len(lines)
start = fix_line_index(line_start, n)
end = fix_line_index(line_end, n)
return lines[start:end]
def blame_file(self, *w, **kw):
blame = self.repo.blame(*w, **kw)
if not blame:
return None
return Blame(self, blame)
def get_renamed_files(self, ref, path=None):
return self.repo.detect_renamed(ref)
def commit_file(self, *w, **kw):
return self.repo.commit_file(*w, **kw)
def get_temp_branch(self):
commit = self.get_commit('HEAD')
return 'patch_tmp' + time.strftime('%Y%m%d%H%M%S-') + commit.sha[10]
def get_patch_file(self, ref, from_ref=None):
return self.repo.format_patch(ref, from_ref)
def get_diff_file(self, ref, from_ref=None):
_raw_diff = self.get_raw_diff(ref, from_ref)
if not _raw_diff:
return ''
patch = _raw_diff['diff'].patch
if not patch:
return ''
return patch
@classmethod
def init(cls, path, work_path=None, bare=True):
return Jagare.init(path, work_path=work_path, bare=bare)
@classmethod
def mirror(cls, url, path, env=None):
Jagare.mirror(url, path, env=env)
def add_remote(self, name, url):
return self.repo.add_remote(name, url)
def add_remote_hub(self, name, url):
self.add_remote('hub/%s' % name, url)
def update_ref(self, ref, value):
result = None
try:
result = self.repo.update_ref(ref, value)
except JagareError:
# FIXME: logging
# FIXME: more meaningful error (JagareError)
pass
return result
def sha(self, rev='HEAD'):
return self.repo.sha(rev)
def merge_base(self, to_sha, from_sha):
return self.repo.merge_base(to_sha, from_sha)
@property
def remotes(self):
return self.repo.remotes
def fetch_all(self):
self.repo.fetch_all()
def fetch(self, name):
self.repo.fetch(name)
def fetch_(self, *w, **kw):
return self.repo.fetch_(*w, **kw)
def get_latest_update_branches(self):
refs = self.repo.listall_references()
refs = filter(lambda r: r.startswith('refs/heads'), refs)
current_time = time.time()
latest_branches = []
for ref in refs:
commit_time = self.repo.lookup_reference(ref).get_object().commit_time # noqa
delta = current_time - commit_time
if delta < LATEST_UPDATE_REF_THRESHOLD:
latest_branches.append((commit_time, ref.split('/')[-1]))
return sorted(latest_branches, key=lambda r: r[0], reverse=True)
def get_all_src_objects(self):
refs = self.repo.listall_references()
refs = filter(lambda r: r.startswith('refs/heads'), refs)
commits_dict = {}
for ref in refs:
commits = self.repo.rev_list(ref)
commits = {c['sha']: c for c in commits}
commits_dict.update(commits)
commits = sorted(commits_dict.values(),
key=lambda x: x['committer']['time'],
reverse=True)
pruned_set = set()
objects_dict = {}
treenode_list = [(commit['sha'], commit['tree'], '')
for commit in commits]
while treenode_list:
commit_id, tree_id, path = treenode_list.pop()
if tree_id in pruned_set:
continue
pruned_set.add(tree_id)
objects = self.repo.ls_tree(tree_id, size=True)
for obj in objects:
obj_id = obj['id']
obj_path = '%s/%s' % (path, obj['name'])
if obj['type'] == 'tree':
treenode_list.append((commit_id, obj_id, obj_path))
elif obj['type'] == 'blob':
if obj_id not in objects_dict:
commit = commits_dict[commit_id]
objects_dict[obj_id] = dict(
path=obj_path[1:],
commit=commit_id,
size=obj['size'],
commit_time=datetime.fromtimestamp(
commit['committer']['time']),
committer=commit['committer']['name']
)
return objects_dict
class GistRepo(Repo):
provided_features = []
# TODO: move to utils
PREFIX = 'gistfile'
def __init__(self, gist):
self.type = "gist"
self.gist = gist
self.name = gist.name
self.path = gist.repo_path
self.repo = Jagare(gist.repo_path)
@classmethod
def init(cls, gist):
Jagare.init(gist.repo_path, bare=True)
def clone(self, gist):
super(GistRepo, self).clone(gist.repo_path, bare=True)
def get_files(self):
files = []
if self.empty:
return files
tree = self.repo.ls_tree('HEAD')
for f in tree:
files.append([f['sha'], f['name']])
return files
# TODO: move to utils
def check_filename(self, fn):
for c in (' ', '<', '>', '|', ';', ':', '&', '`', "'"):
fn = fn.replace(c, '\%s' % c)
fn = fn.replace('/', '')
return fn
def commit_all_files(self, names, contents, oids, author):
data = []
for i, (name, content, oid) in enumerate(zip(names, contents, oids),
start=1):
if not name and not content:
continue
if not name:
name = self.PREFIX + str(i)
name = self.check_filename(name)
data.append([name, content, 'insert'])
files = self.get_files()
for sha, name in files:
if name in names:
continue
data.append([name, '', 'remove'])
self.repo.commit_file(branch='master',
parent='master',
author_name=author.name,
author_email=author.email,
message=' ',
reflog=' ',
data=data)
def is_commit(self, ref):
commit = self.repo.show(ref)
if commit:
return True
class PullRepo(ProjectRepo):
provided_features = ProjectRepo.provided_features + ['show_inline_toggle']
def __init__(self, pull):
# TODO: When to_proj or from_proj not exist?
# TODO: catch exception if from_proj was deleted
super(PullRepo, self).__init__(pull.to_proj, pull)
self.type = "pull"
self.from_repo = None
try:
if pull.from_proj:
self.from_repo = ProjectRepo(pull.from_proj, pull)
except JagareError:
self.from_repo = None
self._temp_dir = None
# no use
#self.merge_repo = None
#self.test_repo = None
# TODO: 统一 url
@property
def api_url(self):
project_name = self.project.name
ticket_id = self.pull.ticket_id
# FIXME: pull/new,没有ticket
if not ticket_id:
return '/api/%s/diff/' % project_name
url = "/api/%s/pulls/%s/" % (project_name, ticket_id)
return url
@property
def context_url(self):
project_name = self.project.name
ticket_id = self.pull.ticket_id
if not ticket_id:
return '/api/%s/diff/moreline' % project_name
url = "/api/%s/pulls/%s/moreline" % (project_name, ticket_id)
return url
@property
def fulltext_url(self):
project_name = self.project.name
ticket_id = self.pull.ticket_id
# FIXME: pull/new,没有ticket
if not ticket_id:
return '/api/%s/diff/fulltext' % project_name
url = "/api/%s/pulls/%s/fulltext" % (project_name, ticket_id)
return url
@property
def temp_dir(self):
if self._temp_dir:
return self._temp_dir
# TODO: move to Jagare
pulltmp = os.path.join(get_tmpdir(), "pulltmp")
if not os.path.exists(pulltmp):
os.makedirs(pulltmp)
worktree = tempfile.mkdtemp(dir=pulltmp)
self._temp_dir = worktree
return worktree
def init(self):
import os
path = os.path.join(self.temp_dir, '.git')
work_path = self.temp_dir
return Jagare.init(path, work_path=work_path, bare=False)
@property
def from_local(self):
return self.pull.to_proj == self.pull.from_proj
@property
def from_sha(self):
sha = None
ticket_id = self.pull.ticket_id
if ticket_id:
from vilya.models.consts import PULL_REF_H
# FIXME: catch more exceptions
try:
sha = self.sha(PULL_REF_H % ticket_id)
except:
# 旧有的被close但又未merge的pr可能出错
pass
if not sha and self.from_repo:
sha = self.from_repo.sha(self.pull.from_ref)
return sha
@property
def to_sha(self):
sha = None
ticket_id = self.pull.ticket_id
if ticket_id:
from vilya.models.consts import PULL_REF_M
# FIXME: catch more exceptions
try:
sha = self.sha(PULL_REF_M % ticket_id)
except:
# 旧有的被close但又未merge的pr可能出错
pass
if not sha:
sha = self.sha(self.pull.to_ref)
return sha
def merge(self, merger, message_header, message_body):
import shutil
from vilya.models.git import make_git_env
# TODO: Use User only
if merger and isinstance(merger, basestring):
merger = User(merger)
if not isinstance(merger, User):
raise Exception("User is needed to merge pull")
env = make_git_env(merger)
worktree = self.temp_dir
merge_commit_sha = None
try:
if self.pull.is_up_to_date():
return ''
from_sha = self.from_sha
to_sha = self.to_sha
repo = self.pull.pull_clone(worktree)
ref = self.pull.pull_fetch(repo)
result = repo.merge(ref, message_header, message_body, no_ff=True,
_env=env)
errcode = result['returncode']
if errcode != 0:
raise RepoMergeError()
result = repo.push('origin', self.pull.to_ref,
_env=dict(CODE_REMOTE_USER=merger.name))
errcode = result['returncode']
if errcode != 0:
raise RepoPushError
merge_commit_sha = self.sha(self.pull.to_ref)
except RepoMergeError:
# FIXME: error msg
pass
except RepoPushError:
# FIXME: error msg
pass
else:
if merge_commit_sha and self.pull.ticket:
self.pull._save_merged(merger.name,
from_sha,
to_sha,
merge_commit_sha)
finally:
shutil.rmtree(worktree)
return merge_commit_sha
def can_merge(self):
import os
import shutil
worktree = self.temp_dir
try:
self.clone(worktree, branch=self.pull.to_ref,
bare=False, shared=True)
repo = ProjectRepo.init(
os.path.join(worktree, '.git'), worktree, bare=False)
ref = self.pull.pull_fetch(repo)
result = repo.merge_commits(self.pull.to_ref, ref)
except KeyError: # dummy result
result = {}
finally:
shutil.rmtree(worktree)
if result.get('has_conflicts', None) is False:
return True
else:
return False
def can_fastforward(self):
if not self.get_commits(self.to_sha, self.from_sha):
return True
def backport_project_name(name):
return name.replace('~', '_')<|fim▁end|> | |
<|file_name|>beehive.ts<|end_file_name|><|fim▁begin|>import {config} from '../config.ts';
import {Util, Location} from './app.ts';
import {Map} from './map.ts';
import {IHiveOptions, Hive} from './hive.ts';
import * as ko from 'knockout';
import * as _ from 'lodash';<|fim▁hole|> steps: number;
leaps: number;
}
export class Beehive {
private options: IBeehiveOptions;
private lastHiveCenter: google.maps.LatLng;
private hives: KnockoutObservableArray<Hive>;
private mapObject: google.maps.Circle;
private isActive: boolean;
private isEditingHives: boolean;
private coveringRadius: number;
public activeHives: KnockoutComputed<Hive[]>;
constructor (options: IBeehiveOptions) {
this.options = options;
this.hives = ko.observableArray([]);
this.isEditingHives = false;
this.coveringRadius = Util.getBeehiveRadius(this.options.leaps, this.options.steps);
this.activeHives = ko.computed(() => this.getActiveHives(), this, { deferEvaluation: true });
this.mapObject = new google.maps.Circle({
radius: this.coveringRadius,
fillColor: '#0000FF',
fillOpacity: 0.0,
strokeWeight: 1,
clickable: true,
center: this.options.center.getLatLng(),
editable: true,
draggable: true,
zIndex: 3
});
this.options.map.addMapObject(this.mapObject);
this.generateHives();
this.toggleActive();
this.options.map.addListener(this.mapObject, 'click', () => this.toggleActive());
this.options.map.addListener(this.mapObject, 'radius_changed', () => {
this.coveringRadius = this.mapObject.getRadius();
let newLeaps = Util.getLeapsToCoverRadius(this.coveringRadius, this.options.steps);
if (this.options.leaps !== newLeaps) {
this.options.leaps = newLeaps;
this.generateHives();
}
});
this.options.map.addListener(this.mapObject, 'center_changed', () => {
let center = this.mapObject.getCenter();
this.options.center = new Location(center.lat(), center.lng());
this.generateHives();
});
}
public getHives(): Hive[] {
console.log(`getting hives for ${this.options.center.toString()}`);
return this.hives();
}
public reset(dispose: boolean = false): void {
// cleanup old hives
for (let i = 0; i < this.hives().length; i++) {
this.hives()[i].reset();
}
this.hives([]);
if (dispose) {
this.mapObject = this.options.map.removeMapObject(this.mapObject) as google.maps.Circle;
}
}
private generateHives(): Hive[] {
this.reset();
let locations: Hive[] = [];
let distanceBetweenHiveCenters = Util.distanceBetweenHiveCenters(this.options.steps);
let getNextPoint = (p, heading, distance = distanceBetweenHiveCenters, adjust = true) => {
let nextPoint = google.maps.geometry.spherical.computeOffset(p, distance, heading);
if (adjust) {
nextPoint = google.maps.geometry.spherical.computeOffset(nextPoint, Util.locationAdjustment, heading + 90);
}
locations.push(new Hive(<IHiveOptions>{ center: new Location(nextPoint.lat(), nextPoint.lng()), steps: this.options.steps, map: this.options.map }));
return nextPoint;
};
let point: google.maps.LatLng = this.options.center.getLatLng();
point = getNextPoint(point, 0, 0, false);
this.lastHiveCenter = point;
for (let leap = 2; leap <= this.options.leaps; leap++) {
point = getNextPoint(this.lastHiveCenter, 0, distanceBetweenHiveCenters);
this.lastHiveCenter = point;
for (let se = 1; se < leap; se++) {
point = getNextPoint(point, 120);
}
for (let s = 1; s < leap; s++) {
point = getNextPoint(point, 180);
}
for (let sw = 1; sw < leap; sw++) {
point = getNextPoint(point, 240);
}
for (let nw = 1; nw < leap; nw++) {
point = getNextPoint(point, 300);
}
for (let n = 1; n < leap; n++) {
point = getNextPoint(point, 0);
}
for (let ne = 2; ne < leap; ne++) {
point = getNextPoint(point, 60);
}
}
this.hives(locations);
return this.hives();
}
public resize(steps: number): void {
this.options.steps = steps;
this.options.leaps = Util.getLeapsToCoverRadius(this.coveringRadius, this.options.steps);
this.generateHives();
}
public disableActive(): void {
this.isActive = false;
if (this.isEditingHives) {
this.editHives();
}
this.mapObject.set('fillOpacity', 0);
}
public toggleActive(fromMap: boolean = false): void {
this.isActive = !this.isActive;
this.mapObject.set('fillOpacity', this.isActive ? 0.3 : 0);
this.options.map.setActiveBeehive(this.isActive ? this : null);
}
public editHives(): void {
this.isEditingHives = !this.isEditingHives;
for (let i = 0; i < this.hives().length; i++) {
this.isEditingHives ? this.hives()[i].addListener() : this.hives()[i].removeListener();
}
this.mapObject.set('zIndex', this.isEditingHives ? 1 : 3);
this.mapObject.set('fillOpacity', this.isEditingHives ? 0 : 0.3);
}
private getActiveHives(): Hive[] {
return _.filter(this.hives(), (h) => h.isActive());
}
}<|fim▁end|> |
export interface IBeehiveOptions {
map: Map;
center: Location; |
<|file_name|>htmltableelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::utils::{DOMString, null_string, ErrorResult};
use dom::htmlelement::HTMLElement;
pub struct HTMLTableElement {
parent: HTMLElement,
}
impl HTMLTableElement {
pub fn DeleteCaption(&self) {
}
pub fn DeleteTHead(&self) {
}
pub fn DeleteTFoot(&self) {
}
pub fn DeleteRow(&mut self, _index: i32, _rv: &mut ErrorResult) {
}
pub fn Sortable(&self) -> bool {
false
}
pub fn SetSortable(&self, _sortable: bool) {
}
pub fn StopSorting(&self) {
}
pub fn Align(&self) -> DOMString {
null_string
}
pub fn SetAlign(&self, _align: &DOMString, _rv: &mut ErrorResult) {
}
pub fn Border(&self) -> DOMString {
null_string
}
pub fn SetBorder(&self, _border: &DOMString, _rv: &mut ErrorResult) {
}
pub fn Frame(&self) -> DOMString {
null_string
}
pub fn SetFrame(&self, _frame: &DOMString, _rv: &mut ErrorResult) {
}
pub fn Rules(&self) -> DOMString {
null_string
}
pub fn SetRules(&self, _rules: &DOMString, _rv: &mut ErrorResult) {
}
pub fn Summary(&self) -> DOMString {
null_string
}
pub fn SetSummary(&self, _summary: &DOMString, _rv: &mut ErrorResult) {<|fim▁hole|> }
pub fn SetWidth(&self, _width: &DOMString, _rv: &mut ErrorResult) {
}
pub fn BgColor(&self) -> DOMString {
null_string
}
pub fn SetBgColor(&self, _bg_color: &DOMString, _rv: &mut ErrorResult) {
}
pub fn CellPadding(&self) -> DOMString {
null_string
}
pub fn SetCellPadding(&self, _cell_padding: &DOMString, _rv: &mut ErrorResult) {
}
pub fn CellSpacing(&self) -> DOMString {
null_string
}
pub fn SetCellSpacing(&self, _cell_spacing: &DOMString, _rv: &mut ErrorResult) {
}
}<|fim▁end|> | }
pub fn Width(&self) -> DOMString {
null_string |
<|file_name|>message.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Handling of the <message> element.
'''
from __future__ import print_function
import re
import six
from grit.node import base
from grit import clique
from grit import exception
from grit import lazy_re
from grit import tclib
from grit import util
# Matches exactly three dots ending a line or followed by whitespace.
_ELLIPSIS_PATTERN = lazy_re.compile(r'(?<!\.)\.\.\.(?=$|\s)')
_ELLIPSIS_SYMBOL = u'\u2026' # Ellipsis
# Finds whitespace at the start and end of a string which can be multiline.
_WHITESPACE = lazy_re.compile(r'(?P<start>\s*)(?P<body>.+?)(?P<end>\s*)\Z',
re.DOTALL | re.MULTILINE)
# <ph> placeholder elements should contain the special character formatters
# used to format <ph> element content.
# Android format.
_ANDROID_FORMAT = (r'%[1-9]+\$'
r'([-#+ 0,(]*)([0-9]+)?(\.[0-9]+)?'
r'([bBhHsScCdoxXeEfgGaAtT%n])')
# Chrome l10n format.
_CHROME_FORMAT = r'\$+\d'
# Windows EWT numeric and GRIT %s %d formats.
_OTHER_FORMAT = r'%[0-9sd]'
# Finds formatters that must be in a placeholder (<ph>) element.
_FORMATTERS = lazy_re.compile(
'(%s)|(%s)|(%s)' % (_ANDROID_FORMAT, _CHROME_FORMAT, _OTHER_FORMAT))
_BAD_PLACEHOLDER_MSG = ('ERROR: Placeholder formatter found outside of <ph> '
'tag in message "%s" in %s.')
_INVALID_PH_CHAR_MSG = ('ERROR: Invalid format characters found in message '
'"%s" <ph> tag in %s.')
# Finds HTML tag tokens.
_HTMLTOKEN = lazy_re.compile(r'<[/]?[a-z][a-z0-9]*[^>]*>', re.I)
# Finds HTML entities.
_HTMLENTITY = lazy_re.compile(r'&[^\s]*;')
class MessageNode(base.ContentNode):
'''A <message> element.'''
# For splitting a list of things that can be separated by commas or
# whitespace
_SPLIT_RE = lazy_re.compile(r'\s*,\s*|\s+')
def __init__(self):
super(MessageNode, self).__init__()
# Valid after EndParsing, this is the MessageClique that contains the
# source message and any translations of it that have been loaded.
self.clique = None
# We don't send leading and trailing whitespace into the translation
# console, but rather tack it onto the source message and any
# translations when formatting them into RC files or what have you.
self.ws_at_start = '' # Any whitespace characters at the start of the text
self.ws_at_end = '' # --"-- at the end of the text
# A list of "shortcut groups" this message is in. We check to make sure
# that shortcut keys (e.g. &J) within each shortcut group are unique.
self.shortcut_groups_ = []
# Formatter-specific data used to control the output of individual strings.
# formatter_data is a space separated list of C preprocessor-style
# definitions. Names without values are given the empty string value.
# Example: "foo=5 bar baz=100"
self.formatter_data = {}
# Whether or not to convert ... -> U+2026 within Translate().
self._replace_ellipsis = False
def _IsValidChild(self, child):
return isinstance(child, (PhNode))
def _IsValidAttribute(self, name, value):
if name not in ['name', 'offset', 'translateable', 'desc', 'meaning',
'internal_comment', 'shortcut_groups', 'custom_type',
'validation_expr', 'use_name_for_id', 'sub_variable',
'formatter_data']:
return False
if (name in ('translateable', 'sub_variable') and
value not in ['true', 'false']):
return False
return True
def SetReplaceEllipsis(self, value):
r'''Sets whether to replace ... with \u2026.
'''
self._replace_ellipsis = value
def MandatoryAttributes(self):
return ['name|offset']
def DefaultAttributes(self):
return {
'custom_type' : '',
'desc' : '',
'formatter_data' : '',
'internal_comment' : '',
'meaning' : '',
'shortcut_groups' : '',
'sub_variable' : 'false',
'translateable' : 'true',
'use_name_for_id' : 'false',
'validation_expr' : '',
}
def HandleAttribute(self, attrib, value):
base.ContentNode.HandleAttribute(self, attrib, value)
if attrib != 'formatter_data':
return
# Parse value, a space-separated list of defines, into a dict.
# Example: "foo=5 bar" -> {'foo':'5', 'bar':''}
for item in value.split():
name, _, val = item.partition('=')
self.formatter_data[name] = val
def GetTextualIds(self):<|fim▁hole|> '''
if 'offset' not in self.attrs:
return super(MessageNode, self).GetTextualIds()
# we search for the first grouping node in the parents' list
# to take care of the case where the first parent is an <if> node
grouping_parent = self.parent
import grit.node.empty
while grouping_parent and not isinstance(grouping_parent,
grit.node.empty.GroupingNode):
grouping_parent = grouping_parent.parent
assert 'first_id' in grouping_parent.attrs
return [grouping_parent.attrs['first_id'] + '_' + self.attrs['offset']]
def IsTranslateable(self):
return self.attrs['translateable'] == 'true'
def EndParsing(self):
super(MessageNode, self).EndParsing()
# Make the text (including placeholder references) and list of placeholders,
# verify placeholder formats, then strip and store leading and trailing
# whitespace and create the tclib.Message() and a clique to contain it.
text = ''
placeholders = []
for item in self.mixed_content:
if isinstance(item, six.string_types):
# Not a <ph> element: fail if any <ph> formatters are detected.
if _FORMATTERS.search(item):
print(_BAD_PLACEHOLDER_MSG % (item, self.source))
raise exception.PlaceholderNotInsidePhNode
text += item
else:
# Extract the <ph> element components.
presentation = item.attrs['name'].upper()
text += presentation
ex = ' ' # <ex> example element cdata if present.
if len(item.children):
ex = item.children[0].GetCdata()
original = item.GetCdata()
# Sanity check the <ph> element content.
cdata = original
# Replace all HTML tag tokens in cdata.
match = _HTMLTOKEN.search(cdata)
while match:
cdata = cdata.replace(match.group(0), '_')
match = _HTMLTOKEN.search(cdata)
# Replace all HTML entities in cdata.
match = _HTMLENTITY.search(cdata)
while match:
cdata = cdata.replace(match.group(0), '_')
match = _HTMLENTITY.search(cdata)
# Remove first matching formatter from cdata.
match = _FORMATTERS.search(cdata)
if match:
cdata = cdata.replace(match.group(0), '')
# Fail if <ph> special chars remain in cdata.
if re.search(r'[%\$]', cdata):
message_id = self.attrs['name'] + ' ' + original;
print(_INVALID_PH_CHAR_MSG % (message_id, self.source))
raise exception.InvalidCharactersInsidePhNode
# Otherwise, accept this <ph> placeholder.
placeholders.append(tclib.Placeholder(presentation, original, ex))
m = _WHITESPACE.match(text)
if m:
self.ws_at_start = m.group('start')
self.ws_at_end = m.group('end')
text = m.group('body')
self.shortcut_groups_ = self._SPLIT_RE.split(self.attrs['shortcut_groups'])
self.shortcut_groups_ = [i for i in self.shortcut_groups_ if i != '']
description_or_id = self.attrs['desc']
if description_or_id == '' and 'name' in self.attrs:
description_or_id = 'ID: %s' % self.attrs['name']
assigned_id = None
if self.attrs['use_name_for_id'] == 'true':
assigned_id = self.attrs['name']
message = tclib.Message(text=text, placeholders=placeholders,
description=description_or_id,
meaning=self.attrs['meaning'],
assigned_id=assigned_id)
self.InstallMessage(message)
def InstallMessage(self, message):
'''Sets this node's clique from a tclib.Message instance.
Args:
message: A tclib.Message.
'''
self.clique = self.UberClique().MakeClique(message, self.IsTranslateable())
for group in self.shortcut_groups_:
self.clique.AddToShortcutGroup(group)
if self.attrs['custom_type'] != '':
self.clique.SetCustomType(util.NewClassInstance(self.attrs['custom_type'],
clique.CustomType))
elif self.attrs['validation_expr'] != '':
self.clique.SetCustomType(
clique.OneOffCustomType(self.attrs['validation_expr']))
def SubstituteMessages(self, substituter):
'''Applies substitution to this message.
Args:
substituter: a grit.util.Substituter object.
'''
message = substituter.SubstituteMessage(self.clique.GetMessage())
if message is not self.clique.GetMessage():
self.InstallMessage(message)
def GetCliques(self):
return [self.clique] if self.clique else []
def Translate(self, lang):
'''Returns a translated version of this message.
'''
assert self.clique
msg = self.clique.MessageForLanguage(lang,
self.PseudoIsAllowed(),
self.ShouldFallbackToEnglish()
).GetRealContent()
if self._replace_ellipsis:
msg = _ELLIPSIS_PATTERN.sub(_ELLIPSIS_SYMBOL, msg)
# Always remove all byte order marks (\uFEFF) https://crbug.com/1033305
msg = msg.replace(u'\uFEFF','')
return msg.replace('[GRITLANGCODE]', lang)
def NameOrOffset(self):
key = 'name' if 'name' in self.attrs else 'offset'
return self.attrs[key]
def ExpandVariables(self):
'''We always expand variables on Messages.'''
return True
def GetDataPackValue(self, lang, encoding):
'''Returns a str represenation for a data_pack entry.'''
message = self.ws_at_start + self.Translate(lang) + self.ws_at_end
return util.Encode(message, encoding)
def IsResourceMapSource(self):
return True
@staticmethod
def Construct(parent, message, name, desc='', meaning='', translateable=True):
'''Constructs a new message node that is a child of 'parent', with the
name, desc, meaning and translateable attributes set using the same-named
parameters and the text of the message and any placeholders taken from
'message', which must be a tclib.Message() object.'''
# Convert type to appropriate string
translateable = 'true' if translateable else 'false'
node = MessageNode()
node.StartParsing('message', parent)
node.HandleAttribute('name', name)
node.HandleAttribute('desc', desc)
node.HandleAttribute('meaning', meaning)
node.HandleAttribute('translateable', translateable)
items = message.GetContent()
for ix, item in enumerate(items):
if isinstance(item, six.string_types):
# Ensure whitespace at front and back of message is correctly handled.
if ix == 0:
item = "'''" + item
if ix == len(items) - 1:
item = item + "'''"
node.AppendContent(item)
else:
phnode = PhNode()
phnode.StartParsing('ph', node)
phnode.HandleAttribute('name', item.GetPresentation())
phnode.AppendContent(item.GetOriginal())
if len(item.GetExample()) and item.GetExample() != ' ':
exnode = ExNode()
exnode.StartParsing('ex', phnode)
exnode.AppendContent(item.GetExample())
exnode.EndParsing()
phnode.AddChild(exnode)
phnode.EndParsing()
node.AddChild(phnode)
node.EndParsing()
return node
class PhNode(base.ContentNode):
'''A <ph> element.'''
def _IsValidChild(self, child):
return isinstance(child, ExNode)
def MandatoryAttributes(self):
return ['name']
def EndParsing(self):
super(PhNode, self).EndParsing()
# We only allow a single example for each placeholder
if len(self.children) > 1:
raise exception.TooManyExamples()
def GetTextualIds(self):
# The 'name' attribute is not an ID.
return []
class ExNode(base.ContentNode):
'''An <ex> element.'''
pass<|fim▁end|> | '''
Returns the concatenation of the parent's node first_id and
this node's offset if it has one, otherwise just call the
superclass' implementation |
<|file_name|>ActivityListViewMultiHolder.java<|end_file_name|><|fim▁begin|>package cn.jzvd.demo;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import cn.jzvd.Jzvd;
import cn.jzvd.JzvdStd;
/**
* Created by Nathen
* On 2016/05/23 21:34
*/
public class ActivityListViewMultiHolder extends AppCompatActivity {
ListView listView;
VideoListAdapter mAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_listview_normal);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
getSupportActionBar().setDisplayShowTitleEnabled(true);
getSupportActionBar().setDisplayUseLogoEnabled(false);
getSupportActionBar().setTitle("MultiHolderListView");
listView = findViewById(R.id.listview);
mAdapter = new VideoListAdapter(this);
listView.setAdapter(mAdapter);
listView.setOnScrollListener(new AbsListView.OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView view, int scrollState) {
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
if (Jzvd.CURRENT_JZVD == null) return;
int lastVisibleItem = firstVisibleItem + visibleItemCount;
int currentPlayPosition = Jzvd.CURRENT_JZVD.positionInList;
// Log.e(TAG, "onScrollReleaseAllVideos: " +
// currentPlayPosition + " " + firstVisibleItem + " " + currentPlayPosition + " " + lastVisibleItem);
if (currentPlayPosition >= 0) {
if ((currentPlayPosition < firstVisibleItem || currentPlayPosition > (lastVisibleItem - 1))) {
if (Jzvd.CURRENT_JZVD.screen != Jzvd.SCREEN_FULLSCREEN) {
Jzvd.releaseAllVideos();//为什么最后一个视频横屏会调用这个,其他地方不会
}
}
}
}
});
}
@Override
public void onBackPressed() {
if (Jzvd.backPress()) {
return;
}
super.onBackPressed();
}
@Override
protected void onPause() {
super.onPause();
Jzvd.releaseAllVideos();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
break;
}
return super.onOptionsItemSelected(item);
}
public class VideoListAdapter extends BaseAdapter {
int[] viewtype = {0, 0, 0, 1, 0, 0, 0, 1, 0, 0};//1 = jzvdStd, 0 = textView
Context context;
LayoutInflater mInflater;
public VideoListAdapter(Context context) {
this.context = context;
mInflater = LayoutInflater.from(context);
}
@Override
public int getCount() {
return viewtype.length;
}
@Override
public Object getItem(int position) {
return null;
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
if (getItemViewType(position) == 1) {
VideoHolder viewHolder;
if (convertView != null && convertView.getTag() != null && convertView.getTag() instanceof VideoHolder) {
viewHolder = (VideoHolder) convertView.getTag();
} else {
viewHolder = new VideoHolder();
convertView = mInflater.inflate(R.layout.item_videoview, null);
viewHolder.jzvdStd = convertView.findViewById(R.id.videoplayer);
convertView.setTag(viewHolder);
}
viewHolder.jzvdStd.setUp(
VideoConstant.videoUrls[0][position],
VideoConstant.videoTitles[0][position], Jzvd.SCREEN_NORMAL);
viewHolder.jzvdStd.positionInList = position;
Glide.with(ActivityListViewMultiHolder.this)
.load(VideoConstant.videoThumbs[0][position])
.into(viewHolder.jzvdStd.thumbImageView);
} else {
TextViewHolder textViewHolder;
if (convertView != null && convertView.getTag() != null && convertView.getTag() instanceof TextViewHolder) {
textViewHolder = (TextViewHolder) convertView.getTag();
} else {
textViewHolder = new TextViewHolder();
LayoutInflater mInflater = LayoutInflater.from(context);
convertView = mInflater.inflate(R.layout.item_textview, null);
textViewHolder.textView = convertView.findViewById(R.id.textview);
convertView.setTag(textViewHolder);<|fim▁hole|> }
return convertView;
}
@Override
public int getItemViewType(int position) {
return viewtype[position];
}
@Override
public int getViewTypeCount() {
return 2;
}
class VideoHolder {
JzvdStd jzvdStd;
}
class TextViewHolder {
TextView textView;
}
}
}<|fim▁end|> | } |
<|file_name|>server.py<|end_file_name|><|fim▁begin|>import tornado.web
import tornado.ioloop
import os
from handlers import *
urls = [
(r'/', IndexHandler),
(r'/api/(?P<action>[a-zA-Z0-9-_]+)', ApiServiceHandler),
(r'/about', AboutHandler),
]
settings = {
"static_path" : os.path.join(os.path.dirname(__file__), "static"),
"template_path" : os.path.join(os.path.dirname(__file__), "templates"),
"debug" : True,
"gzip" : True,
"cookie_secret" : "asdf"
}
def main(addr):
application = tornado.web.Application(urls, **settings)<|fim▁hole|>if __name__ == "__main__":
main("127.0.0.1")<|fim▁end|> | application.listen(8080, addr)
tornado.ioloop.IOLoop.instance().start()
|
<|file_name|>IRenderingOption.ts<|end_file_name|><|fim▁begin|>import { DownloadOption } from "./DownloadOption";
<|fim▁hole|>}<|fim▁end|> |
export interface IRenderingOption {
type: DownloadOption;
button: JQuery;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>'''
Wrap some important functions in sqlite3 so we can instrument them.
'''
<|fim▁hole|>from xrayvision.monkeypatch import mark_patched, is_patched
_old_connect = sqlite3.connect
def patch(module):
module<|fim▁end|> | |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
import optparse
import re
import socket
from swift.common import exceptions
from swift.common.utils import expand_ipv6, is_valid_ip, is_valid_ipv4, \
is_valid_ipv6
def tiers_for_dev(dev):
"""
Returns a tuple of tiers for a given device in ascending order by
length.
:returns: tuple of tiers
"""
t1 = dev['region']
t2 = dev['zone']
t3 = dev['ip']
t4 = dev['id']
return ((t1,),
(t1, t2),
(t1, t2, t3),
(t1, t2, t3, t4))
def build_tier_tree(devices):
"""
Construct the tier tree from the zone layout.
The tier tree is a dictionary that maps tiers to their child tiers.
A synthetic root node of () is generated so that there's one tree,
not a forest.
Example:<|fim▁hole|> | | +---- device id 1
| | |
| | +---- device id 2
| |
| +---- 192.168.101.2 -+---- device id 3
| |
| +---- device id 4
| |
| +---- device id 5
|
+---- zone 2 -+---- 192.168.102.1 -+---- device id 6
| |
| +---- device id 7
| |
| +---- device id 8
|
+---- 192.168.102.2 -+---- device id 9
|
+---- device id 10
region 2 -+---- zone 1 -+---- 192.168.201.1 -+---- device id 12
| |
| +---- device id 13
| |
| +---- device id 14
|
+---- 192.168.201.2 -+---- device id 15
|
+---- device id 16
|
+---- device id 17
The tier tree would look like:
{
(): [(1,), (2,)],
(1,): [(1, 1), (1, 2)],
(2,): [(2, 1)],
(1, 1): [(1, 1, 192.168.101.1),
(1, 1, 192.168.101.2)],
(1, 2): [(1, 2, 192.168.102.1),
(1, 2, 192.168.102.2)],
(2, 1): [(2, 1, 192.168.201.1),
(2, 1, 192.168.201.2)],
(1, 1, 192.168.101.1): [(1, 1, 192.168.101.1, 0),
(1, 1, 192.168.101.1, 1),
(1, 1, 192.168.101.1, 2)],
(1, 1, 192.168.101.2): [(1, 1, 192.168.101.2, 3),
(1, 1, 192.168.101.2, 4),
(1, 1, 192.168.101.2, 5)],
(1, 2, 192.168.102.1): [(1, 2, 192.168.102.1, 6),
(1, 2, 192.168.102.1, 7),
(1, 2, 192.168.102.1, 8)],
(1, 2, 192.168.102.2): [(1, 2, 192.168.102.2, 9),
(1, 2, 192.168.102.2, 10)],
(2, 1, 192.168.201.1): [(2, 1, 192.168.201.1, 12),
(2, 1, 192.168.201.1, 13),
(2, 1, 192.168.201.1, 14)],
(2, 1, 192.168.201.2): [(2, 1, 192.168.201.2, 15),
(2, 1, 192.168.201.2, 16),
(2, 1, 192.168.201.2, 17)],
}
:devices: device dicts from which to generate the tree
:returns: tier tree
"""
tier2children = defaultdict(set)
for dev in devices:
for tier in tiers_for_dev(dev):
if len(tier) > 1:
tier2children[tier[0:-1]].add(tier)
else:
tier2children[()].add(tier)
return tier2children
def validate_and_normalize_ip(ip):
"""
Return normalized ip if the ip is a valid ip.
Otherwise raise ValueError Exception. The hostname is
normalized to all lower case. IPv6-addresses are converted to
lowercase and fully expanded.
"""
# first convert to lower case
new_ip = ip.lower()
if is_valid_ipv4(new_ip):
return new_ip
elif is_valid_ipv6(new_ip):
return expand_ipv6(new_ip)
else:
raise ValueError('Invalid ip %s' % ip)
def validate_and_normalize_address(address):
"""
Return normalized address if the address is a valid ip or hostname.
Otherwise raise ValueError Exception. The hostname is
normalized to all lower case. IPv6-addresses are converted to
lowercase and fully expanded.
RFC1123 2.1 Host Names and Nubmers
DISCUSSION
This last requirement is not intended to specify the complete
syntactic form for entering a dotted-decimal host number;
that is considered to be a user-interface issue. For
example, a dotted-decimal number must be enclosed within
"[ ]" brackets for SMTP mail (see Section 5.2.17). This
notation could be made universal within a host system,
simplifying the syntactic checking for a dotted-decimal
number.
If a dotted-decimal number can be entered without such
identifying delimiters, then a full syntactic check must be
made, because a segment of a host domain name is now allowed
to begin with a digit and could legally be entirely numeric
(see Section 6.1.2.4). However, a valid host name can never
have the dotted-decimal form #.#.#.#, since at least the
highest-level component label will be alphabetic.
"""
new_address = address.lstrip('[').rstrip(']')
if address.startswith('[') and address.endswith(']'):
return validate_and_normalize_ip(new_address)
new_address = new_address.lower()
if is_valid_ipv4(new_address):
return new_address
elif is_valid_ipv6(new_address):
return expand_ipv6(new_address)
elif is_valid_hostname(new_address):
return new_address
else:
raise ValueError('Invalid address %s' % address)
def is_valid_hostname(hostname):
"""
Return True if the provided hostname is a valid hostname
"""
if len(hostname) < 1 or len(hostname) > 255:
return False
if hostname.endswith('.'):
# strip exactly one dot from the right, if present
hostname = hostname[:-1]
allowed = re.compile("(?!-)[A-Z\d-]{1,63}(?<!-)$", re.IGNORECASE)
return all(allowed.match(x) for x in hostname.split("."))
def is_local_device(my_ips, my_port, dev_ip, dev_port):
"""
Return True if the provided dev_ip and dev_port are among the IP
addresses specified in my_ips and my_port respectively.
To support accurate locality determination in the server-per-port
deployment, when my_port is None, only IP addresses are used for
determining locality (dev_port is ignored).
If dev_ip is a hostname then it is first translated to an IP
address before checking it against my_ips.
"""
candidate_ips = []
if not is_valid_ip(dev_ip) and is_valid_hostname(dev_ip):
try:
# get the ip for this host; use getaddrinfo so that
# it works for both ipv4 and ipv6 addresses
addrinfo = socket.getaddrinfo(dev_ip, dev_port)
for addr in addrinfo:
family = addr[0]
dev_ip = addr[4][0] # get the ip-address
if family == socket.AF_INET6:
dev_ip = expand_ipv6(dev_ip)
candidate_ips.append(dev_ip)
except socket.gaierror:
return False
else:
if is_valid_ipv6(dev_ip):
dev_ip = expand_ipv6(dev_ip)
candidate_ips = [dev_ip]
for dev_ip in candidate_ips:
if dev_ip in my_ips and (my_port is None or dev_port == my_port):
return True
return False
def parse_search_value(search_value):
"""The <search-value> can be of the form::
d<device_id>r<region>z<zone>-<ip>:<port>R<r_ip>:<r_port>/
<device_name>_<meta>
Where <r_ip> and <r_port> are replication ip and port.
Any part is optional, but you must include at least one part.
Examples::
d74 Matches the device id 74
r4 Matches devices in region 4
z1 Matches devices in zone 1
z1-1.2.3.4 Matches devices in zone 1 with the ip 1.2.3.4
1.2.3.4 Matches devices in any zone with the ip 1.2.3.4
z1:5678 Matches devices in zone 1 using port 5678
:5678 Matches devices that use port 5678
R5.6.7.8 Matches devices that use replication ip 5.6.7.8
R:5678 Matches devices that use replication port 5678
1.2.3.4R5.6.7.8 Matches devices that use ip 1.2.3.4 and replication ip
5.6.7.8
/sdb1 Matches devices with the device name sdb1
_shiny Matches devices with shiny in the meta data
_"snet: 5.6.7.8" Matches devices with snet: 5.6.7.8 in the meta data
[::1] Matches devices in any zone with the ip ::1
z1-[::1]:5678 Matches devices in zone 1 with ip ::1 and port 5678
Most specific example::
d74r4z1-1.2.3.4:5678/sdb1_"snet: 5.6.7.8"
Nerd explanation:
All items require their single character prefix except the ip, in which
case the - is optional unless the device id or zone is also included.
"""
orig_search_value = search_value
match = {}
if search_value.startswith('d'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['id'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('r'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['region'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('z'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['zone'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('-'):
search_value = search_value[1:]
if search_value and search_value[0].isdigit():
i = 1
while i < len(search_value) and search_value[i] in '0123456789.':
i += 1
match['ip'] = search_value[:i]
search_value = search_value[i:]
elif search_value and search_value.startswith('['):
i = 1
while i < len(search_value) and search_value[i] != ']':
i += 1
i += 1
match['ip'] = search_value[:i].lstrip('[').rstrip(']')
search_value = search_value[i:]
if 'ip' in match:
# ipv6 addresses are converted to all lowercase
# and use the fully expanded representation
match['ip'] = validate_and_normalize_ip(match['ip'])
if search_value.startswith(':'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['port'] = int(search_value[1:i])
search_value = search_value[i:]
# replication parameters
if search_value.startswith('R'):
search_value = search_value[1:]
if search_value and search_value[0].isdigit():
i = 1
while (i < len(search_value) and
search_value[i] in '0123456789.'):
i += 1
match['replication_ip'] = search_value[:i]
search_value = search_value[i:]
elif search_value and search_value.startswith('['):
i = 1
while i < len(search_value) and search_value[i] != ']':
i += 1
i += 1
match['replication_ip'] = search_value[:i].lstrip('[').rstrip(']')
search_value = search_value[i:]
if 'replication_ip' in match:
# ipv6 addresses are converted to all lowercase
# and use the fully expanded representation
match['replication_ip'] = \
validate_and_normalize_ip(match['replication_ip'])
if search_value.startswith(':'):
i = 1
while i < len(search_value) and search_value[i].isdigit():
i += 1
match['replication_port'] = int(search_value[1:i])
search_value = search_value[i:]
if search_value.startswith('/'):
i = 1
while i < len(search_value) and search_value[i] != '_':
i += 1
match['device'] = search_value[1:i]
search_value = search_value[i:]
if search_value.startswith('_'):
match['meta'] = search_value[1:]
search_value = ''
if search_value:
raise ValueError('Invalid <search-value>: %s' %
repr(orig_search_value))
return match
def parse_search_values_from_opts(opts):
"""
Convert optparse style options into a dictionary for searching.
:param opts: optparse style options
:returns: a dictionary with search values to filter devices,
supported parameters are id, region, zone, ip, port,
replication_ip, replication_port, device, weight, meta
"""
search_values = {}
for key in ('id', 'region', 'zone', 'ip', 'port', 'replication_ip',
'replication_port', 'device', 'weight', 'meta'):
value = getattr(opts, key, None)
if value:
if key == 'ip' or key == 'replication_ip':
value = validate_and_normalize_address(value)
search_values[key] = value
return search_values
def parse_change_values_from_opts(opts):
"""
Convert optparse style options into a dictionary for changing.
:param opts: optparse style options
:returns: a dictonary with change values to filter devices,
supported parameters are ip, port, replication_ip,
replication_port
"""
change_values = {}
for key in ('change_ip', 'change_port', 'change_replication_ip',
'change_replication_port', 'change_device', 'change_meta'):
value = getattr(opts, key, None)
if value:
if key == 'change_ip' or key == 'change_replication_ip':
value = validate_and_normalize_address(value)
change_values[key.replace('change_', '')] = value
return change_values
def parse_add_value(add_value):
"""
Convert an add value, like 'r1z2-10.1.2.3:7878/sdf', to a dictionary.
If the string does not start with 'r<N>', then the value of 'region' in
the returned dictionary will be None. Callers should check for this and
set a reasonable default. This is done so callers can emit errors or
warnings if desired.
Similarly, 'replication_ip' and 'replication_port' will be None if not
specified.
:returns: dictionary with keys 'region', 'zone', 'ip', 'port', 'device',
'replication_ip', 'replication_port', 'meta'
:raises ValueError: if add_value is malformed
"""
region = None
rest = add_value
if add_value.startswith('r'):
i = 1
while i < len(add_value) and add_value[i].isdigit():
i += 1
region = int(add_value[1:i])
rest = add_value[i:]
if not rest.startswith('z'):
raise ValueError('Invalid add value: %s' % add_value)
i = 1
while i < len(rest) and rest[i].isdigit():
i += 1
zone = int(rest[1:i])
rest = rest[i:]
if not rest.startswith('-'):
raise ValueError('Invalid add value: %s' % add_value)
ip, port, rest = parse_address(rest[1:])
replication_ip = replication_port = None
if rest.startswith('R'):
replication_ip, replication_port, rest = \
parse_address(rest[1:])
if not rest.startswith('/'):
raise ValueError(
'Invalid add value: %s' % add_value)
i = 1
while i < len(rest) and rest[i] != '_':
i += 1
device_name = rest[1:i]
if not validate_device_name(device_name):
raise ValueError('Invalid device name')
rest = rest[i:]
meta = ''
if rest.startswith('_'):
meta = rest[1:]
return {'region': region, 'zone': zone, 'ip': ip, 'port': port,
'device': device_name, 'replication_ip': replication_ip,
'replication_port': replication_port, 'meta': meta}
def parse_address(rest):
if rest.startswith('['):
# remove first [] for ip
rest = rest.replace('[', '', 1).replace(']', '', 1)
pos = 0
while (pos < len(rest) and
not (rest[pos] == 'R' or rest[pos] == '/')):
pos += 1
address = rest[:pos]
rest = rest[pos:]
port_start = address.rfind(':')
if port_start == -1:
raise ValueError('Invalid port in add value')
ip = address[:port_start]
try:
port = int(address[(port_start + 1):])
except (TypeError, ValueError):
raise ValueError(
'Invalid port %s in add value' % address[port_start:])
# if this is an ipv6 address then we want to convert it
# to all lowercase and use its fully expanded representation
# to make searches easier
ip = validate_and_normalize_ip(ip)
return (ip, port, rest)
def validate_args(argvish):
"""
Build OptionParse and validate it whether the format is new command-line
format or not.
"""
opts, args = parse_args(argvish)
# id can be 0 (swift starts generating id from 0),
# also zone, region and weight can be set to zero.
new_cmd_format = opts.id is not None or opts.region is not None or \
opts.zone is not None or opts.ip or opts.port or \
opts.replication_ip or opts.replication_port or \
opts.device or opts.weight is not None or opts.meta
return (new_cmd_format, opts, args)
def parse_args(argvish):
"""
Build OptionParser and evaluate command line arguments.
"""
parser = optparse.OptionParser()
parser.add_option('-u', '--id', type="int",
help="Device ID")
parser.add_option('-r', '--region', type="int",
help="Region")
parser.add_option('-z', '--zone', type="int",
help="Zone")
parser.add_option('-i', '--ip', type="string",
help="IP address")
parser.add_option('-p', '--port', type="int",
help="Port number")
parser.add_option('-j', '--replication-ip', type="string",
help="Replication IP address")
parser.add_option('-q', '--replication-port', type="int",
help="Replication port number")
parser.add_option('-d', '--device', type="string",
help="Device name (e.g. md0, sdb1)")
parser.add_option('-w', '--weight', type="float",
help="Device weight")
parser.add_option('-m', '--meta', type="string", default="",
help="Extra device info (just a string)")
parser.add_option('-I', '--change-ip', type="string",
help="IP address for change")
parser.add_option('-P', '--change-port', type="int",
help="Port number for change")
parser.add_option('-J', '--change-replication-ip', type="string",
help="Replication IP address for change")
parser.add_option('-Q', '--change-replication-port', type="int",
help="Replication port number for change")
parser.add_option('-D', '--change-device', type="string",
help="Device name (e.g. md0, sdb1) for change")
parser.add_option('-M', '--change-meta', type="string", default="",
help="Extra device info (just a string) for change")
parser.add_option('-y', '--yes', default=False, action="store_true",
help="Assume a yes response to all questions")
return parser.parse_args(argvish)
def parse_builder_ring_filename_args(argvish):
first_arg = argvish[1]
if first_arg.endswith('.ring.gz'):
ring_file = first_arg
builder_file = first_arg[:-len('.ring.gz')] + '.builder'
else:
builder_file = first_arg
if not builder_file.endswith('.builder'):
ring_file = first_arg
else:
ring_file = builder_file[:-len('.builder')]
ring_file += '.ring.gz'
return builder_file, ring_file
def build_dev_from_opts(opts):
"""
Convert optparse stype options into a device dictionary.
"""
for attribute, shortopt, longopt in (['region', '-r', '--region'],
['zone', '-z', '--zone'],
['ip', '-i', '--ip'],
['port', '-p', '--port'],
['device', '-d', '--device'],
['weight', '-w', '--weight']):
if getattr(opts, attribute, None) is None:
raise ValueError('Required argument %s/%s not specified.' %
(shortopt, longopt))
ip = validate_and_normalize_address(opts.ip)
replication_ip = validate_and_normalize_address(
(opts.replication_ip or opts.ip))
replication_port = opts.replication_port or opts.port
if not validate_device_name(opts.device):
raise ValueError('Invalid device name')
return {'region': opts.region, 'zone': opts.zone, 'ip': ip,
'port': opts.port, 'device': opts.device, 'meta': opts.meta,
'replication_ip': replication_ip,
'replication_port': replication_port, 'weight': opts.weight}
def dispersion_report(builder, search_filter=None,
verbose=False, recalculate=False):
if recalculate or not builder._dispersion_graph:
builder._build_dispersion_graph()
max_allowed_replicas = builder._build_max_replicas_by_tier()
worst_tier = None
max_dispersion = 0.0
sorted_graph = []
for tier, replica_counts in sorted(builder._dispersion_graph.items()):
tier_name = get_tier_name(tier, builder)
if search_filter and not re.match(search_filter, tier_name):
continue
max_replicas = int(max_allowed_replicas[tier])
at_risk_parts = sum(replica_counts[i] * (i - max_replicas)
for i in range(max_replicas + 1,
len(replica_counts)))
placed_parts = sum(replica_counts[i] * i for i in range(
1, len(replica_counts)))
tier_dispersion = 100.0 * at_risk_parts / placed_parts
if tier_dispersion > max_dispersion:
max_dispersion = tier_dispersion
worst_tier = tier_name
if not verbose:
continue
tier_report = {
'max_replicas': max_replicas,
'placed_parts': placed_parts,
'dispersion': tier_dispersion,
'replicas': replica_counts,
}
sorted_graph.append((tier_name, tier_report))
return {
'max_dispersion': max_dispersion,
'worst_tier': worst_tier,
'graph': sorted_graph,
}
def validate_replicas_by_tier(replicas, replicas_by_tier):
"""
Validate the sum of the replicas at each tier.
The sum of the replicas at each tier should be less than or very close to
the upper limit indicated by replicas
:param replicas: float,the upper limit of replicas
:param replicas_by_tier: defaultdict,the replicas by tier
"""
tiers = ['cluster', 'regions', 'zones', 'servers', 'devices']
for i, tier_name in enumerate(tiers):
replicas_at_tier = sum(replicas_by_tier[t] for t in
replicas_by_tier if len(t) == i)
if abs(replicas - replicas_at_tier) > 1e-10:
raise exceptions.RingValidationError(
'%s != %s at tier %s' % (
replicas_at_tier, replicas, tier_name))
def format_device(region=None, zone=None, ip=None, device=None, **kwargs):
"""
Convert device dict or tier attributes to a representative string.
:returns: a string, the normalized format of a device tier
"""
return "r%sz%s-%s/%s" % (region, zone, ip, device)
def get_tier_name(tier, builder):
if len(tier) == 1:
return "r%s" % (tier[0], )
if len(tier) == 2:
return "r%sz%s" % (tier[0], tier[1])
if len(tier) == 3:
return "r%sz%s-%s" % (tier[0], tier[1], tier[2])
if len(tier) == 4:
device = builder.devs[tier[3]] or {}
return format_device(tier[0], tier[1], tier[2], device.get(
'device', 'IDd%s' % tier[3]))
def validate_device_name(device_name):
return not (
device_name.startswith(' ') or
device_name.endswith(' ') or
len(device_name) == 0)
def pretty_dev(device):
return format_device(**device)<|fim▁end|> |
region 1 -+---- zone 1 -+---- 192.168.101.1 -+---- device id 0
| | | |
<|file_name|>tuple_impl.rs<|end_file_name|><|fim▁begin|>//! Some iterator that produces tuples
use std::iter::Fuse;
// `HomogeneousTuple` is a public facade for `TupleCollect`, allowing
// tuple-related methods to be used by clients in generic contexts, while
// hiding the implementation details of `TupleCollect`.
// See https://github.com/rust-itertools/itertools/issues/387
/// Implemented for homogeneous tuples of size up to 4.
pub trait HomogeneousTuple
: TupleCollect
{}
impl<T: TupleCollect> HomogeneousTuple for T {}
/// An iterator over a incomplete tuple.
///
/// See [`.tuples()`](../trait.Itertools.html#method.tuples) and
/// [`Tuples::into_buffer()`](struct.Tuples.html#method.into_buffer).
#[derive(Clone, Debug)]
pub struct TupleBuffer<T>
where T: HomogeneousTuple
{
cur: usize,
buf: T::Buffer,
}
impl<T> TupleBuffer<T>
where T: HomogeneousTuple
{
fn new(buf: T::Buffer) -> Self {
TupleBuffer {
cur: 0,
buf,
}
}
}
impl<T> Iterator for TupleBuffer<T>
where T: HomogeneousTuple
{
type Item = T::Item;
fn next(&mut self) -> Option<Self::Item> {
let s = self.buf.as_mut();
if let Some(ref mut item) = s.get_mut(self.cur) {
self.cur += 1;
item.take()
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let buffer = &self.buf.as_ref()[self.cur..];
let len = if buffer.len() == 0 {
0
} else {
buffer.iter()
.position(|x| x.is_none())
.unwrap_or(buffer.len())
};
(len, Some(len))
}
}
impl<T> ExactSizeIterator for TupleBuffer<T>
where T: HomogeneousTuple
{
}
/// An iterator that groups the items in tuples of a specific size.
///
/// See [`.tuples()`](../trait.Itertools.html#method.tuples) for more information.
#[derive(Clone)]
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
pub struct Tuples<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple
{
iter: Fuse<I>,
buf: T::Buffer,
}
/// Create a new tuples iterator.
pub fn tuples<I, T>(iter: I) -> Tuples<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple
{
Tuples {
iter: iter.fuse(),
buf: Default::default(),
}
}
impl<I, T> Iterator for Tuples<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple
{
type Item = T;
fn next(&mut self) -> Option<T> {
T::collect_from_iter(&mut self.iter, &mut self.buf)
}
}
impl<I, T> Tuples<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple
{
/// Return a buffer with the produced items that was not enough to be grouped in a tuple.
///
/// ```
/// use itertools::Itertools;
///
/// let mut iter = (0..5).tuples();
/// assert_eq!(Some((0, 1, 2)), iter.next());
/// assert_eq!(None, iter.next());
/// itertools::assert_equal(vec![3, 4], iter.into_buffer());
/// ```
pub fn into_buffer(self) -> TupleBuffer<T> {
TupleBuffer::new(self.buf)
}
}
/// An iterator over all contiguous windows that produces tuples of a specific size.
///
/// See [`.tuple_windows()`](../trait.Itertools.html#method.tuple_windows) for more
/// information.
#[must_use = "iterator adaptors are lazy and do nothing unless consumed"]
#[derive(Clone, Debug)]
pub struct TupleWindows<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple
{
iter: I,
last: Option<T>,
}
/// Create a new tuple windows iterator.
pub fn tuple_windows<I, T>(mut iter: I) -> TupleWindows<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple,
T::Item: Clone
{
use std::iter::once;
let mut last = None;
if T::num_items() != 1 {
// put in a duplicate item in front of the tuple; this simplifies
// .next() function.
if let Some(item) = iter.next() {
let iter = once(item.clone()).chain(once(item)).chain(&mut iter);
last = T::collect_from_iter_no_buf(iter);
}
}
TupleWindows {
last,
iter,
}
}
impl<I, T> Iterator for TupleWindows<I, T>
where I: Iterator<Item = T::Item>,
T: HomogeneousTuple + Clone,
T::Item: Clone
{
type Item = T;
fn next(&mut self) -> Option<T> {
if T::num_items() == 1 {
return T::collect_from_iter_no_buf(&mut self.iter)
}
if let Some(ref mut last) = self.last {
if let Some(new) = self.iter.next() {
last.left_shift_push(new);
return Some(last.clone());
}
}
None
}
}
pub trait TupleCollect: Sized {
type Item;
type Buffer: Default + AsRef<[Option<Self::Item>]> + AsMut<[Option<Self::Item>]>;
fn collect_from_iter<I>(iter: I, buf: &mut Self::Buffer) -> Option<Self>
where I: IntoIterator<Item = Self::Item>;
fn collect_from_iter_no_buf<I>(iter: I) -> Option<Self>
where I: IntoIterator<Item = Self::Item>;
fn num_items() -> usize;
fn left_shift_push(&mut self, item: Self::Item);
}
macro_rules! impl_tuple_collect {
() => ();
($N:expr; $A:ident ; $($X:ident),* ; $($Y:ident),* ; $($Y_rev:ident),*) => (
impl<$A> TupleCollect for ($($X),*,) {
type Item = $A;
type Buffer = [Option<$A>; $N - 1];
#[allow(unused_assignments, unused_mut)]
fn collect_from_iter<I>(iter: I, buf: &mut Self::Buffer) -> Option<Self>
where I: IntoIterator<Item = $A>
{
let mut iter = iter.into_iter();
$(
let mut $Y = None;
)*
loop {
$(
$Y = iter.next();
if $Y.is_none() {
break
}
)*
return Some(($($Y.unwrap()),*,))
}
let mut i = 0;
let mut s = buf.as_mut();
$(
if i < s.len() {
s[i] = $Y;
i += 1;
}
)*
return None;
}
#[allow(unused_assignments)]
fn collect_from_iter_no_buf<I>(iter: I) -> Option<Self>
where I: IntoIterator<Item = $A>
{
let mut iter = iter.into_iter();
loop {
$(
let $Y = if let Some($Y) = iter.next() {
$Y<|fim▁hole|> } else {
break;
};
)*
return Some(($($Y),*,))
}
return None;
}
fn num_items() -> usize {
$N
}
fn left_shift_push(&mut self, item: $A) {
use std::mem::replace;
let &mut ($(ref mut $Y),*,) = self;
let tmp = item;
$(
let tmp = replace($Y_rev, tmp);
)*
drop(tmp);
}
}
)
}
impl_tuple_collect!(1; A; A; a; a);
impl_tuple_collect!(2; A; A, A; a, b; b, a);
impl_tuple_collect!(3; A; A, A, A; a, b, c; c, b, a);
impl_tuple_collect!(4; A; A, A, A, A; a, b, c, d; d, c, b, a);<|fim▁end|> | |
<|file_name|>bytenet_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ByteNet tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.data_generators import problem_hparams
from tensor2tensor.models import bytenet
import tensorflow as tf
class ByteNetTest(tf.test.TestCase):
def testByteNet(self):
vocab_size = 9
x = np.random.random_integers(1, high=vocab_size - 1, size=(3, 5, 1, 1))
y = np.random.random_integers(1, high=vocab_size - 1, size=(3, 6, 1, 1))
hparams = bytenet.bytenet_base()
p_hparams = problem_hparams.test_problem_hparams(vocab_size, vocab_size)
with self.test_session() as session:
features = {
"inputs": tf.constant(x, dtype=tf.int32),
"targets": tf.constant(y, dtype=tf.int32),
}
model = bytenet.ByteNet(
hparams, tf.estimator.ModeKeys.TRAIN, p_hparams)
logits, _ = model(features)
session.run(tf.global_variables_initializer())
res = session.run(logits)
self.assertEqual(res.shape, (3, 50, 1, 1, vocab_size))
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | |
<|file_name|>0013_auto_20161216_1359.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-16 12:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('courses', '0012_userlesson'),
]
operations = [
migrations.AddField(
model_name='exercise',<|fim▁hole|> migrations.AddField(
model_name='exercise',
name='prev_exercise',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='prev', to='courses.Exercise'),
),
]<|fim▁end|> | name='next_exercise',
field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='next', to='courses.Exercise'),
), |
<|file_name|>_auth.py<|end_file_name|><|fim▁begin|><|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GRPCAuthMetadataPlugins for standard authentication."""
import inspect
from concurrent import futures
import grpc
def _sign_request(callback, token, error):
metadata = (('authorization', 'Bearer {}'.format(token)),)
callback(metadata, error)
def _create_get_token_callback(callback):
def get_token_callback(future):
try:
access_token = future.result().access_token
except Exception as exception: # pylint: disable=broad-except
_sign_request(callback, None, exception)
else:
_sign_request(callback, access_token, None)
return get_token_callback
class GoogleCallCredentials(grpc.AuthMetadataPlugin):
"""Metadata wrapper for GoogleCredentials from the oauth2client library."""
def __init__(self, credentials):
self._credentials = credentials
self._pool = futures.ThreadPoolExecutor(max_workers=1)
# Hack to determine if these are JWT creds and we need to pass
# additional_claims when getting a token
self._is_jwt = 'additional_claims' in inspect.getargspec(
credentials.get_access_token).args
def __call__(self, context, callback):
# MetadataPlugins cannot block (see grpc.beta.interfaces.py)
if self._is_jwt:
future = self._pool.submit(
self._credentials.get_access_token,
additional_claims={'aud': context.service_url})
else:
future = self._pool.submit(self._credentials.get_access_token)
future.add_done_callback(_create_get_token_callback(callback))
def __del__(self):
self._pool.shutdown(wait=False)
class AccessTokenCallCredentials(grpc.AuthMetadataPlugin):
"""Metadata wrapper for raw access token credentials."""
def __init__(self, access_token):
self._access_token = access_token
def __call__(self, context, callback):
_sign_request(callback, self._access_token, None)<|fim▁end|> | # Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. |
<|file_name|>test_deprecated.py<|end_file_name|><|fim▁begin|># coding: utf-8
import string
import pandas as pd
import pandas.util.testing as tm
import pandas.util._test_decorators as td
import pytest
from numpy.random import randn
import pandas.tools.plotting as plotting
from pandas.tests.plotting.common import TestPlotBase
"""
Test cases for plot functions imported from deprecated
pandas.tools.plotting
"""
@td.skip_if_no_mpl
class TestDeprecatedNameSpace(TestPlotBase):
@pytest.mark.slow
@td.skip_if_no_scipy
def test_scatter_plot_legacy(self):
df = pd.DataFrame(randn(100, 2))
with tm.assert_produces_warning(FutureWarning):
plotting.scatter_matrix(df)
with tm.assert_produces_warning(FutureWarning):
pd.scatter_matrix(df)
@pytest.mark.slow
def test_boxplot_deprecated(self):
df = pd.DataFrame(randn(6, 4),
index=list(string.ascii_letters[:6]),
columns=['one', 'two', 'three', 'four'])
df['indic'] = ['foo', 'bar'] * 3
with tm.assert_produces_warning(FutureWarning):
plotting.boxplot(df, column=['one', 'two'],
by='indic')
@pytest.mark.slow
def test_radviz_deprecated(self, iris):
with tm.assert_produces_warning(FutureWarning):
plotting.radviz(frame=iris, class_column='Name')
@pytest.mark.slow<|fim▁hole|><|fim▁end|> | def test_plot_params(self):
with tm.assert_produces_warning(FutureWarning):
pd.plot_params['xaxis.compat'] = True |
<|file_name|>pre_install_app.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import networkx as nx
import copy
from ryu.base import app_manager
from ryu.controller import ofp_event
from ryu.controller.handler import CONFIG_DISPATCHER, MAIN_DISPATCHER, DEAD_DISPATCHER
from ryu.controller.handler import set_ev_cls
from ryu.ofproto import ofproto_v1_3
from ryu.ofproto.ofproto_v1_3 import OFP_DEFAULT_PRIORITY
from ryu.topology.api import get_all_switch, get_all_link, get_all_host
from ryu.lib.packet import packet
from ryu.lib.packet import ethernet, arp, icmp
from ryu.lib.packet import ether_types
from ryu.lib import hub
'''
###For 2 chapter###
fig 2-8
pre-install flow entries for end-to-end hosts('h1' and 'h2')
----test----
Linear topology
ICMP
'''
class ProactiveApp(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
def __init__(self, *args, **kwargs):
super(ProactiveApp, self).__init__(*args, **kwargs)
self.mac_to_port = {}
self.discover_thread = hub.spawn(self.pre_install)
# {dpid:{port:mac,port:mac,...},dpid:{port:mac,port:mac,...},...} only switches'mac
self.dpids_port_to_mac = dict()
# [dpid,dpid,...]
self.dpids = list()
# {(dpid,port):host_mac,(dpid,port):host_mac,...} only hosts'mac
self.dpids_port_to_host = dict()
#[host_mac,host_mac,host_mac,...]
self.hosts = list()
#{(src_dpid,dst_dpid):(src_port,dst_port),():(),...}
self.links_dpid_to_port = dict()
# [(src_dpid,dst_dpid),(src_dpid,dst_dpid),...]
self.links = list()
self.adjacency_matrix = dict()
self.pre_adjacency_matrix = dict()
# {
# (dpid,dpid):{xxx:[dpid,dpid,dpid],xxx:[dpid,dpid,dpid,dpid],...},
# (dpid,dpid):{xxx:[dpid,dpid,dpid],xxx:[dpid,dpid,dpid,dpid],...},
# ...}
self.path_table = dict()
self.dpid_to_dp = dict()
self.SLEEP_PERIOD = 2 #seconds
@set_ev_cls(ofp_event.EventOFPStateChange,[MAIN_DISPATCHER, DEAD_DISPATCHER])
def state_change_handler(self, ev):
datapath = ev.datapath
if ev.state == MAIN_DISPATCHER:
if not datapath.id in self.dpid_to_dp:
self.logger.info('register datapath: %04x', datapath.id)
self.dpid_to_dp[datapath.id] = datapath
elif ev.state == DEAD_DISPATCHER:
if datapath.id in self.dpid_to_dp:
self.logger.info('un register datapath: %04x', datapath.id)
del self.dpid_to_dp[datapath.id]
@set_ev_cls(ofp_event.EventOFPSwitchFeatures, CONFIG_DISPATCHER)
def switch_features_handler(self, ev):
datapath = ev.msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
# install table-miss flow entry
match = parser.OFPMatch()
actions = [parser.OFPActionOutput(ofproto.OFPP_CONTROLLER,
ofproto.OFPCML_NO_BUFFER)]
self.add_flow(datapath, 0, match, actions)
def add_flow(self, datapath, priority, match, actions, buffer_id=None):
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
inst = [parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS,
actions)]
if buffer_id:
mod = parser.OFPFlowMod(datapath=datapath, buffer_id=buffer_id,
priority=priority, match=match,
instructions=inst)
else:
mod = parser.OFPFlowMod(datapath=datapath, priority=priority,
match=match, instructions=inst)
datapath.send_msg(mod)
def pre_install(self):
while True:
hub.sleep(self.SLEEP_PERIOD)
self.pre_adjacency_matrix = copy.deepcopy(self.adjacency_matrix)
self._update_topology()
self._update_hosts()
if self.pre_adjacency_matrix != self.adjacency_matrix:
self.logger.info('***********discover_topology thread: TOPO UPDATE***********')
self.path_table = self._get_path_table(self.adjacency_matrix)
self.pre_install_flow()
def _update_topology(self):
switch_list = get_all_switch(self)
if len(switch_list) != 0:
self.dpids_port_to_mac = self._get_dpids_port_to_mac(switch_list)
self.dpids = self._get_dpids(switch_list) #[dpid,dpid,dpid,...]
link_dict = get_all_link(self)
if len(link_dict) != 0:
self.links_dpid_to_port = self._get_links_dpid_to_port(link_dict)
self.links = self._get_links(self.links_dpid_to_port) #[(src.dpid,dst.dpid),(src.dpid,dst.dpid),...]
if self.dpids and self.links:
self.adjacency_matrix = self._get_adjacency_matrix(self.dpids, self.links)
def _get_dpids_port_to_mac(self,switch_list):
table = dict()
for switch in switch_list:
dpid = switch.dp.id
table.setdefault(dpid,{})
ports = switch.ports
for port in ports:
table[dpid][port.port_no] = port.hw_addr
return table
def _get_dpids(self,switch_list):
dpid_list = list()
for switch in switch_list:
dpid_list.append(switch.dp.id)
return dpid_list
def _get_links(self,link_ports_table):
return link_ports_table.keys()
def _get_links_dpid_to_port(self,link_dict):
table = dict()
for link in link_dict.keys():
src = link.src #ryu.topology.switches.Port
dst = link.dst
table[(src.dpid,dst.dpid)] = (src.port_no, dst.port_no)
return table
def _get_adjacency_matrix(self,dpids,links):
graph = dict()
for src in dpids:
graph[src] = dict()
for dst in dpids:
graph[src][dst] = float('inf')
if src == dst:
graph[src][dst] = 0
elif (src, dst) in links:
graph[src][dst] = 1
return graph
def _get_path_table(self, matrix):
if matrix:
dpids = matrix.keys()
g = nx.Graph()
g.add_nodes_from(dpids)
for i in dpids:
for j in dpids:
if matrix[i][j] == 1:
g.add_edge(i,j,weight=1)
return self.__graph_to_path(g)
def __graph_to_path(self,g): # {(i,j):[[],[],...],(i,j):[[],[],[],..],...}
all_shortest_paths = dict()<|fim▁hole|> if i == j:
continue
all_shortest_paths[(i,j)] = list()
try:
nx.shortest_path(g,i,j)
except nx.exception.NetworkXNoPath:
continue
for each in nx.all_shortest_paths(g,i,j):
all_shortest_paths[(i,j)].append(each)
return all_shortest_paths
def _update_hosts(self):
host_list = get_all_host(self)
if host_list:
self.dpids_port_to_host = self._get_dpids_port_to_host(host_list)
self.hosts = self._get_hosts(host_list)
def _get_dpids_port_to_host(self,host_list):
table = dict()
for host in host_list:
host_mac = host.mac
host_port = host.port # Port
dpid = host_port.dpid
table[(dpid,host_port.port_no)] = host_mac
return table
def _get_hosts(self,host_list):
hosts = list()
for host in host_list:
hosts.append(host.mac)
return hosts
def pre_install_flow(self):
print("execute pre-install flow")
if len(self.hosts) == 2:
print("host num:",2)
host1 = self.hosts[0]
host2 = self.hosts[1]
self._pre_install_flow(host1,host2)
self._pre_install_flow(host2,host1)
def _pre_install_flow(self,host1,host2):
host1_dpid = None
host2_dpid = None
host1_port = None
host2_port = None
for dpid_port in self.dpids_port_to_host.keys():
if self.dpids_port_to_host[dpid_port] == host1:
host1_dpid = dpid_port[0]
host1_port = dpid_port[1]
elif self.dpids_port_to_host[dpid_port] == host2:
host2_dpid = dpid_port[0]
host2_port = dpid_port[1]
if host1_dpid == host2_dpid:
datapath = self.dpid_to_dp[host1_dpid]
parser = datapath.ofproto_parser
priority = OFP_DEFAULT_PRIORITY
match = parser.OFPMatch(in_port=host1_port,eth_dst=host2) # , eth_dst=host2
actions = [parser.OFPActionOutput(host2_port)]
self.add_flow(datapath, priority, match, actions)
else:
traffic = self.path_table[(host1_dpid,host2_dpid)][0]
length = len(traffic)
for i in range(length):
datapath = self.dpid_to_dp[traffic[i]]
parser = datapath.ofproto_parser
priority = OFP_DEFAULT_PRIORITY
if i == 0:
match = parser.OFPMatch(in_port=host1_port,eth_dst=host2) # , eth_dst=host2
out_port = self.links_dpid_to_port[(traffic[i],traffic[i+1])][0]
actions = [parser.OFPActionOutput(out_port)]
self.add_flow(datapath, priority, match, actions)
elif i == length -1:
in_port = self.links_dpid_to_port[(traffic[i-1],traffic[i])][1]
match = parser.OFPMatch(in_port=in_port,eth_dst=host2) # , eth_dst=host2
actions = [parser.OFPActionOutput(host2_port)]
self.add_flow(datapath, priority, match, actions)
else:
in_port = self.links_dpid_to_port[(traffic[i-1],traffic[i])][1]
out_port = self.links_dpid_to_port[(traffic[i],traffic[i+1])][0]
match = parser.OFPMatch(in_port=in_port,eth_dst=host2) # , eth_dst=host2
actions = [parser.OFPActionOutput(out_port)]
self.add_flow(datapath, priority, match, actions)
@set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER)
def _packet_in_handler(self, ev):
# If you hit this you might want to increase
# the "miss_send_length" of your switch
if ev.msg.msg_len < ev.msg.total_len:
self.logger.debug("packet truncated: only %s of %s bytes",
ev.msg.msg_len, ev.msg.total_len)
msg = ev.msg
datapath = msg.datapath
ofproto = datapath.ofproto
parser = datapath.ofproto_parser
in_port = msg.match['in_port']
pkt = packet.Packet(msg.data)
eth = pkt.get_protocols(ethernet.ethernet)[0]
if eth.ethertype == ether_types.ETH_TYPE_LLDP:
# ignore lldp packet
return
dst = eth.dst
src = eth.src
ar = pkt.get_protocol(arp.arp)
ic = pkt.get_protocol(icmp.icmp)
if isinstance(ar, arp.arp):
print("-----arp packet------")
print("dpid:",datapath.id)
# print("dpid:",datapath.id)
# print(pkt)
# for each in self.mac_to_port:
# print "dpid:",each
# for a in self.mac_to_port[each]:
# print "mac:",a,"->","port:",self.mac_to_port[each][a]
if isinstance(ic, icmp.icmp):
print("-----icmp packet------")
print("dpid:",datapath.id)
# print(pkt)
# for each in self.mac_to_port:
# print "dpid:",each
# for a in self.mac_to_port[each]:
# print "mac:",a,"->","port:",self.mac_to_port[each][a]
dpid = datapath.id
self.mac_to_port.setdefault(dpid, {})
# self.logger.info("packet in %s %s %s %s", dpid, src, dst, in_port)
# learn a mac address to avoid FLOOD next time.
self.mac_to_port[dpid][src] = in_port
if dst in self.mac_to_port[dpid]:
out_port = self.mac_to_port[dpid][dst]
else:
out_port = ofproto.OFPP_FLOOD
actions = [parser.OFPActionOutput(out_port)]
data = None
if msg.buffer_id == ofproto.OFP_NO_BUFFER:
data = msg.data
out = parser.OFPPacketOut(datapath=datapath, buffer_id=msg.buffer_id,
in_port=in_port, actions=actions, data=data)
datapath.send_msg(out)<|fim▁end|> | for i in g.nodes():
for j in g.nodes(): |
<|file_name|>exporter.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions to export object detection inference graph."""
import logging
import os
import tensorflow as tf
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.client import session
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.platform import gfile
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.training import saver as saver_lib
from object_detection.builders import model_builder
from object_detection.core import standard_fields as fields
from object_detection.data_decoders import tf_example_decoder
slim = tf.contrib.slim
# TODO: Replace with freeze_graph.freeze_graph_with_def_protos when
# newer version of Tensorflow becomes more common.
def freeze_graph_with_def_protos(
input_graph_def,
input_saver_def,
input_checkpoint,
output_node_names,
restore_op_name,
filename_tensor_name,
clear_devices,
initializer_nodes,
variable_names_blacklist=''):
"""Converts all variables in a graph and checkpoint into constants."""
del restore_op_name, filename_tensor_name # Unused by updated loading code.
# 'input_checkpoint' may be a prefix if we're using Saver V2 format
if not saver_lib.checkpoint_exists(input_checkpoint):
raise ValueError(
'Input checkpoint "' + input_checkpoint + '" does not exist!')
if not output_node_names:
raise ValueError(
'You must supply the name of a node to --output_node_names.')
# Remove all the explicit device specifications for this node. This helps to
# make the graph more portable.
if clear_devices:<|fim▁hole|>
with session.Session() as sess:
if input_saver_def:
saver = saver_lib.Saver(saver_def=input_saver_def)
saver.restore(sess, input_checkpoint)
else:
var_list = {}
reader = pywrap_tensorflow.NewCheckpointReader(input_checkpoint)
var_to_shape_map = reader.get_variable_to_shape_map()
for key in var_to_shape_map:
try:
tensor = sess.graph.get_tensor_by_name(key + ':0')
except KeyError:
# This tensor doesn't exist in the graph (for example it's
# 'global_step' or a similar housekeeping element) so skip it.
continue
var_list[key] = tensor
saver = saver_lib.Saver(var_list=var_list)
saver.restore(sess, input_checkpoint)
if initializer_nodes:
sess.run(initializer_nodes)
variable_names_blacklist = (variable_names_blacklist.split(',') if
variable_names_blacklist else None)
output_graph_def = graph_util.convert_variables_to_constants(
sess,
input_graph_def,
output_node_names.split(','),
variable_names_blacklist=variable_names_blacklist)
return output_graph_def
def get_frozen_graph_def(inference_graph_def, use_moving_averages,
input_checkpoint, output_node_names):
"""Freezes all variables in a graph definition."""
saver = None
if use_moving_averages:
variable_averages = tf.train.ExponentialMovingAverage(0.0)
variables_to_restore = variable_averages.variables_to_restore()
saver = tf.train.Saver(variables_to_restore)
else:
saver = tf.train.Saver()
frozen_graph_def = freeze_graph_with_def_protos(
input_graph_def=inference_graph_def,
input_saver_def=saver.as_saver_def(),
input_checkpoint=input_checkpoint,
output_node_names=output_node_names,
restore_op_name='save/restore_all',
filename_tensor_name='save/Const:0',
clear_devices=True,
initializer_nodes='')
return frozen_graph_def
# TODO: Support batch tf example inputs.
def _tf_example_input_placeholder():
tf_example_placeholder = tf.placeholder(
tf.string, shape=[], name='tf_example')
tensor_dict = tf_example_decoder.TfExampleDecoder().decode(
tf_example_placeholder)
image = tensor_dict[fields.InputDataFields.image]
return tf.expand_dims(image, axis=0)
def _image_tensor_input_placeholder():
return tf.placeholder(dtype=tf.uint8,
shape=(1, None, None, 3),
name='image_tensor')
def _encoded_image_string_tensor_input_placeholder():
image_str = tf.placeholder(dtype=tf.string,
shape=[],
name='encoded_image_string_tensor')
image_tensor = tf.image.decode_image(image_str, channels=3)
image_tensor.set_shape((None, None, 3))
return tf.expand_dims(image_tensor, axis=0)
input_placeholder_fn_map = {
'image_tensor': _image_tensor_input_placeholder,
'encoded_image_string_tensor':
_encoded_image_string_tensor_input_placeholder,
'tf_example': _tf_example_input_placeholder,
}
def _add_output_tensor_nodes(postprocessed_tensors):
"""Adds output nodes for detection boxes and scores.
Adds the following nodes for output tensors -
* num_detections: float32 tensor of shape [batch_size].
* detection_boxes: float32 tensor of shape [batch_size, num_boxes, 4]
containing detected boxes.
* detection_scores: float32 tensor of shape [batch_size, num_boxes]
containing scores for the detected boxes.
* detection_classes: float32 tensor of shape [batch_size, num_boxes]
containing class predictions for the detected boxes.
* detection_masks: (Optional) float32 tensor of shape
[batch_size, num_boxes, mask_height, mask_width] containing masks for each
detection box.
Args:
postprocessed_tensors: a dictionary containing the following fields
'detection_boxes': [batch, max_detections, 4]
'detection_scores': [batch, max_detections]
'detection_classes': [batch, max_detections]
'detection_masks': [batch, max_detections, mask_height, mask_width]
(optional).
'num_detections': [batch]
Returns:
A tensor dict containing the added output tensor nodes.
"""
label_id_offset = 1
boxes = postprocessed_tensors.get('detection_boxes')
scores = postprocessed_tensors.get('detection_scores')
classes = postprocessed_tensors.get('detection_classes') + label_id_offset
masks = postprocessed_tensors.get('detection_masks')
num_detections = postprocessed_tensors.get('num_detections')
outputs = {}
outputs['detection_boxes'] = tf.identity(boxes, name='detection_boxes')
outputs['detection_scores'] = tf.identity(scores, name='detection_scores')
outputs['detection_classes'] = tf.identity(classes, name='detection_classes')
outputs['num_detections'] = tf.identity(num_detections, name='num_detections')
if masks is not None:
outputs['detection_masks'] = tf.identity(masks, name='detection_masks')
return outputs
def _write_inference_graph(inference_graph_path,
checkpoint_path=None,
use_moving_averages=False,
output_node_names=(
'num_detections,detection_scores,'
'detection_boxes,detection_classes')):
"""Writes inference graph to disk with the option to bake in weights.
If checkpoint_path is not None bakes the weights into the graph thereby
eliminating the need of checkpoint files during inference. If the model
was trained with moving averages, setting use_moving_averages to true
restores the moving averages, otherwise the original set of variables
is restored.
Args:
inference_graph_path: Path to write inference graph.
checkpoint_path: Optional path to the checkpoint file.
use_moving_averages: Whether to export the original or the moving averages
of the trainable variables from the checkpoint.
output_node_names: Output tensor names, defaults are: num_detections,
detection_scores, detection_boxes, detection_classes.
"""
inference_graph_def = tf.get_default_graph().as_graph_def()
if checkpoint_path:
output_graph_def = get_frozen_graph_def(
inference_graph_def=inference_graph_def,
use_moving_averages=use_moving_averages,
input_checkpoint=checkpoint_path,
output_node_names=output_node_names,
)
with gfile.GFile(inference_graph_path, 'wb') as f:
f.write(output_graph_def.SerializeToString())
logging.info('%d ops in the final graph.', len(output_graph_def.node))
return
tf.train.write_graph(inference_graph_def,
os.path.dirname(inference_graph_path),
os.path.basename(inference_graph_path),
as_text=False)
def _write_saved_model(inference_graph_path, inputs, outputs,
checkpoint_path=None, use_moving_averages=False):
"""Writes SavedModel to disk.
If checkpoint_path is not None bakes the weights into the graph thereby
eliminating the need of checkpoint files during inference. If the model
was trained with moving averages, setting use_moving_averages to true
restores the moving averages, otherwise the original set of variables
is restored.
Args:
inference_graph_path: Path to write inference graph.
inputs: The input image tensor to use for detection.
outputs: A tensor dictionary containing the outputs of a DetectionModel.
checkpoint_path: Optional path to the checkpoint file.
use_moving_averages: Whether to export the original or the moving averages
of the trainable variables from the checkpoint.
"""
inference_graph_def = tf.get_default_graph().as_graph_def()
checkpoint_graph_def = None
if checkpoint_path:
output_node_names = ','.join(outputs.keys())
checkpoint_graph_def = get_frozen_graph_def(
inference_graph_def=inference_graph_def,
use_moving_averages=use_moving_averages,
input_checkpoint=checkpoint_path,
output_node_names=output_node_names
)
with tf.Graph().as_default():
with session.Session() as sess:
tf.import_graph_def(checkpoint_graph_def)
builder = tf.saved_model.builder.SavedModelBuilder(inference_graph_path)
tensor_info_inputs = {
'inputs': tf.saved_model.utils.build_tensor_info(inputs)}
tensor_info_outputs = {}
for k, v in outputs.items():
tensor_info_outputs[k] = tf.saved_model.utils.build_tensor_info(v)
detection_signature = (
tf.saved_model.signature_def_utils.build_signature_def(
inputs=tensor_info_inputs,
outputs=tensor_info_outputs,
method_name=signature_constants.PREDICT_METHOD_NAME))
builder.add_meta_graph_and_variables(
sess, [tf.saved_model.tag_constants.SERVING],
signature_def_map={
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
detection_signature,
},
)
builder.save()
def _export_inference_graph(input_type,
detection_model,
use_moving_averages,
checkpoint_path,
inference_graph_path,
export_as_saved_model=False):
"""Export helper."""
if input_type not in input_placeholder_fn_map:
raise ValueError('Unknown input type: {}'.format(input_type))
inputs = tf.to_float(input_placeholder_fn_map[input_type]())
preprocessed_inputs = detection_model.preprocess(inputs)
output_tensors = detection_model.predict(preprocessed_inputs)
postprocessed_tensors = detection_model.postprocess(output_tensors)
outputs = _add_output_tensor_nodes(postprocessed_tensors)
out_node_names = list(outputs.keys())
if export_as_saved_model:
_write_saved_model(inference_graph_path, inputs, outputs, checkpoint_path,
use_moving_averages)
else:
_write_inference_graph(inference_graph_path, checkpoint_path,
use_moving_averages,
output_node_names=','.join(out_node_names))
def export_inference_graph(input_type, pipeline_config, checkpoint_path,
inference_graph_path, export_as_saved_model=False):
"""Exports inference graph for the model specified in the pipeline config.
Args:
input_type: Type of input for the graph. Can be one of [`image_tensor`,
`tf_example`].
pipeline_config: pipeline_pb2.TrainAndEvalPipelineConfig proto.
checkpoint_path: Path to the checkpoint file to freeze.
inference_graph_path: Path to write inference graph to.
export_as_saved_model: If the model should be exported as a SavedModel. If
false, it is saved as an inference graph.
"""
detection_model = model_builder.build(pipeline_config.model,
is_training=False)
_export_inference_graph(input_type, detection_model,
pipeline_config.eval_config.use_moving_averages,
checkpoint_path, inference_graph_path,
export_as_saved_model)<|fim▁end|> | for node in input_graph_def.node:
node.device = ''
_ = importer.import_graph_def(input_graph_def, name='') |
<|file_name|>user.py<|end_file_name|><|fim▁begin|># User info wrapper object
import logging
class User(object):
"""
Wrapper object around an entry in users.json. Behaves like a read-only dictionary if
asked, but adds some useful logic to decouple the front end from the JSON structure.
"""
_NAME_KEYS = ["display_name", "real_name"]
_DEFAULT_IMAGE_KEY = "image_512"
def __init__(self, raw_data):
self._raw = raw_data
def __getitem__(self, key):
return self._raw[key]
@property
def display_name(self):
"""
Find the most appropriate display name for a user: look for a "display_name", then
a "real_name", and finally fall back to the always-present "name".
"""
for k in self._NAME_KEYS:
if self._raw.get(k):
return self._raw[k]
if "profile" in self._raw and self._raw["profile"].get(k):
return self._raw["profile"][k]
return self._raw["name"]
@property
def email(self):
"""
Shortcut property for finding the e-mail address or bot URL.
"""
if "profile" in self._raw:
email = self._raw["profile"].get("email")
elif "bot_url" in self._raw:
email = self._raw["bot_url"]
else:
email = None
if not email:
logging.debug("No email found for %s", self._raw.get("name"))
return email
def image_url(self, pixel_size=None):
"""
Get the URL for the user icon in the desired pixel size, if it exists. If no
size is supplied, give the URL for the full-size image.
"""
if "profile" not in self._raw:
return
profile = self._raw["profile"]
if (pixel_size):
img_key = "image_%s" % pixel_size
if img_key in profile:
return profile[img_key]<|fim▁hole|>def deleted_user(id):
"""
Create a User object for a deleted user.
"""
deleted_user = {
"id": id,
"name": "deleted-" + id,
"deleted": True,
"is_bot": False,
"is_app_user": False,
}
return User(deleted_user)<|fim▁end|> | return profile[self._DEFAULT_IMAGE_KEY]
|
<|file_name|>test_method_caller_py3.py<|end_file_name|><|fim▁begin|>"""MethodCaller provider traversal tests."""
from dependency_injector import providers
def test_traverse():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider = method.call()
all_providers = list(provider.traverse())
assert len(all_providers) == 3
assert provider1 in all_providers<|fim▁hole|> assert provided in all_providers
assert method in all_providers
def test_traverse_args():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call("foo", provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers
def test_traverse_kwargs():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call(foo="foo", bar=provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers
def test_traverse_overridden():
provider1 = providers.Provider()
provided = provider1.provided
method = provided.method
provider2 = providers.Provider()
provider = method.call()
provider.override(provider2)
all_providers = list(provider.traverse())
assert len(all_providers) == 4
assert provider1 in all_providers
assert provider2 in all_providers
assert provided in all_providers
assert method in all_providers<|fim▁end|> | |
<|file_name|>ek-instances.directive.js<|end_file_name|><|fim▁begin|>/*
Copyright 2016 ElasticBox All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and<|fim▁hole|>limitations under the License.
*/
import './ek-instances.less';
import Directive from 'directive';
import Controller from './ek-instances.controller';
import constants from '../constants';
import template from './ek-instances.html';
class InstancesDirective extends Directive {
constructor() {
super({ Controller, template });
}
compile(tElement) {
tElement
.addClass('ek-instances layout-column');
return ($scope) => _.extend($scope, constants);
}
}
export default InstancesDirective;<|fim▁end|> | |
<|file_name|>requiem.py<|end_file_name|><|fim▁begin|>from subprocess import check_call, call, Popen, PIPE
import os
import textwrap
import glob
os.putenv("DEBIAN_FRONTEND", "noninteractive")
#######
## Plumbing
#######
def get_output(cmd, **kwargs):
check = kwargs.pop("check", True)
kwargs["stdout"] = PIPE
p = Popen(cmd, **kwargs)
stdout, stderr = p.communicate()
if check and p.returncode:
raise ValueError("%r return code %s" % (cmd, p.returncode))
return stdout
def sh(cmd):
check_call(cmd, shell=True)
def shh(cmd):
get_output(cmd, shell=True)
#######
## Packages
#######
def add_apt_key(url):
sh("wget -O - %s | apt-key add -" % url)
def add_apt_repo(name, spec):
with file("/etc/apt/sources.list.d/%s.list" % name, "wb") as outf:
outf.write("deb %s\n" % spec)
sh("apt-get update")
def install(*packages):
sh("apt-get install -y --no-install-recommends %s" % " ".join(packages))
def get_packages():
return set(
l.split()[0]
for l in get_output("dpkg --get-selections", shell=True).splitlines()
if l
)
def has_package(*check_packages):
all_packages = get_packages()
return (set(check_packages) <= all_packages)
def setup_apt_cacher_ng(apt_cacher_ng_url):
proxy_config_file = "/etc/apt/apt.conf.d/90proxy"
proxy_url = apt_cacher_ng_url.rstrip("/")
if proxy_url in read(proxy_config_file):
print "Apt proxy already configured"
return
try:
import urllib
data = urllib.urlopen(apt_cacher_ng_url).read()
except:
print "Could not acquire apt proxy settings"
return
if "APT Reconfiguration required" in data: # Looks like a valid apt-cacher-ng page
write(proxy_config_file, """Acquire::http { Proxy "%s"; };""" % proxy_url)
print "Apt proxy activated"
else:
print "Not a proper apt proxy"
#######
## File damagement
#######
def has_file(path):<|fim▁hole|> for spec in specs:
for filename in glob.glob(spec):
if os.path.isfile(filename):
print "nuking: %s" % filename
os.unlink(filename)
def write(filename, content):
with file(filename, "wb") as out_f:
out_f.write(textwrap.dedent(content.strip("\n\r")))
def read(filename):
if os.path.isfile(filename):
with file(filename, "rb") as in_f:
return in_f.read()
return ""
#######
## Services
#######
def restart(service):
sh("service %s restart" % service)
#######
## Macros
#######
def configure_etckeeper():
if not has_package("etckeeper"):
install("etckeeper", "git-core")
write("/etc/etckeeper/etckeeper.conf", """
VCS="git"
GIT_COMMIT_OPTIONS=""
HIGHLEVEL_PACKAGE_MANAGER=apt
LOWLEVEL_PACKAGE_MANAGER=dpkg
""")
sh("etckeeper init")
sh("etckeeper commit initial")
print "etckeeper provisioned"<|fim▁end|> | return os.path.exists(path)
def nuke(*specs): |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup
version = 'y.dev0'
long_description = '\n\n'.join([
open('README.rst').read(),
open('TODO.rst').read(),
open('CREDITS.rst').read(),
open('CHANGES.rst').read(),
])
install_requires = [
'pkginfo',
'setuptools',
'nens',
],
tests_require = [
]
<|fim▁hole|> long_description=long_description,
# Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[],
keywords=[],
author='Pieter Swinkels',
author_email='[email protected]',
url='',
license='GPL',
packages=['timeseries'],
include_package_data=True,
zip_safe=False,
install_requires=install_requires,
tests_require=tests_require,
extras_require = {'test': tests_require},
entry_points={
'console_scripts': [
'ziprelease = adapter.ziprelease:main',
]},
)<|fim▁end|> | setup(name='timeseries',
version=version,
description="Package to implement time series and generic operations on time series.", |
<|file_name|>weekend.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from grazyna.utils import register
@register(cmd='weekend')
def weekend(bot):
"""
Answer to timeless question - are we at .weekend, yet?
"""
current_date = datetime.now()<|fim▁hole|> answer = "Oczywiście %s - jest weekend. Omawiamy tylko lajtowe tematy, ok?" % nick
else:
str_day = datetime.strftime(current_date, "%A")
answer = "%s - dopiero %s, musisz jeszcze poczekać..." % (nick, str_day)
bot.reply(answer)<|fim▁end|> | day = current_date.weekday()
nick = bot.user.nick
if day in (5, 6): |
<|file_name|>punchestableview.cpp<|end_file_name|><|fim▁begin|>#include "punchestableview.h"
#include <qf/core/log.h>
#include <QDrag>
#include <QDragEnterEvent>
#include <QMimeData>
#include <QPainter>
#include <QPixmap>
PunchesTableView::PunchesTableView(QWidget *parent)
: Super(parent)
{
setDropIndicatorShown(false);
}
bool PunchesTableView::edit(const QModelIndex &index, QAbstractItemView::EditTrigger trigger, QEvent *event)
{
Q_UNUSED(event)
if(trigger == QAbstractItemView::EditTrigger::DoubleClicked
|| trigger == QAbstractItemView::EditTrigger::EditKeyPressed) {
qf::core::utils::TableRow row = tableRow(index.row());
int class_id = row.value("classes.id").toInt();
int code = row.value("punches.code").toInt();
qfDebug() << "codeClassActivated:" << class_id << code;
emit codeClassActivated(class_id, code);<|fim▁hole|>/*
void PunchesTableView::mousePressEvent(QMouseEvent *event)
{
qfInfo() << Q_FUNC_INFO;
QModelIndex ix = indexAt(event->pos());
if (!ix.isValid())
return;
qf::core::utils::TableRow row = tableRow(ix.row());
QString class_name = row.value(QStringLiteral("classes.name")).toString();
int code = row.value(QStringLiteral("punches.code")).toInt();
QByteArray item_data;
QDataStream data_stream(&item_data, QIODevice::WriteOnly);
data_stream << ix.row() << ix.column();
QMimeData *mime_data = new QMimeData;
mime_data->setData("application/x-quickevent", item_data);
QDrag *drag = new QDrag(this);
drag->setMimeData(mime_data);
//drag->setPixmap(pixmap);
//drag->setHotSpot(event->pos() - child->pos());
QPixmap px{QSize{10, 10}};
QPainter painter;
QFont f = font();
QFontMetrics fm(f, &px);
QString s = QString("%1 - %2").arg(class_name).arg(code);
QRect bounding_rect = fm.boundingRect(s);
static constexpr int inset = 5;
bounding_rect.adjust(-inset, -inset, inset, inset);
px = QPixmap{bounding_rect.size()};
painter.begin(&px);
painter.setFont(f);
//painter.setPen(Qt::black);
//painter.setBrush(Qt::black);
painter.fillRect(px.rect(), QColor("khaki"));
painter.drawRect(QRect(QPoint(), bounding_rect.size() - QSize(1, 1)));
painter.drawText(QPoint{inset, inset + fm.ascent()}, s);
painter.end();
drag->setPixmap(px);
if (drag->exec(Qt::CopyAction | Qt::MoveAction, Qt::CopyAction) == Qt::MoveAction) {
//child->close();
} else {
//child->show();
//child->setPixmap(pixmap);
}
}
void PunchesTableView::dragEnterEvent(QDragEnterEvent *event)
{
if (event->mimeData()->hasFormat("application/x-quickevent")) {
if (event->source() == this) {
event->setDropAction(Qt::MoveAction);
event->accept();
} else {
event->acceptProposedAction();
}
} else {
event->ignore();
}
}
void PunchesTableView::dragMoveEvent(QDragMoveEvent *event)
{
if (event->mimeData()->hasFormat("application/x-quickevent")) {
if (event->source() == this) {
event->setDropAction(Qt::MoveAction);
event->accept();
} else {
event->acceptProposedAction();
}
} else {
event->ignore();
}
}
*/<|fim▁end|> | }
return false;
} |
<|file_name|>estr-uniq.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// except according to those terms.
#[allow(dead_assignment)];
pub fn main() {
let x : ~str = ~"hello";
let _y : ~str = ~"there";
let mut z = ~"thing";
z = x;
assert_eq!(z[0], ('h' as u8));
assert_eq!(z[4], ('o' as u8));
}<|fim▁end|> | // option. This file may not be copied, modified, or distributed |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import sys
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
requires = [
'blessings >= 1.6, < 2.0',
'sqlalchemy >= 1.3, < 2.0',
'PyYAML >= 5.1, < 6.0',
'python-dateutil >= 2.8, <3.0',
'click >= 6.7, <7.0',
'czech-holidays',
'python-slugify',
]
tests_require = ['pytest']
if sys.version_info < (3, 4):<|fim▁hole|>
setup_args = dict(
name='pyvodb',
version='1.0',
packages=find_packages(),
url='https://github.com/pyvec/pyvodb',
description="""Database of Pyvo meetups""",
author='Petr Viktorin',
author_email='[email protected]',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
],
install_requires=requires,
tests_require=tests_require,
cmdclass={'test': PyTest},
entry_points={
'console_scripts': [
'pyvo=pyvodb.cli:main',
],
},
)
if __name__ == '__main__':
setup(**setup_args)<|fim▁end|> | # pathlib is in the stdlib since Python 3.4
requires.append('pathlib >= 1.0.1, < 2.0') |
<|file_name|>VAB_massdownload.py<|end_file_name|><|fim▁begin|>import urllib2
import urllib
import os, sys
from bs4 import *
argv = sys.argv[1:]
begin = int(argv[0])
count = int(argv[1])
for i in range(begin, begin+count):
try:
url = 'http://danbooru.donmai.us/posts/' + str(i)
request = urllib2.Request(url)
response = urllib2.urlopen(request)
html = response.read()
soup = BeautifulSoup(html)
relURL = soup.select('#image')[0]['src'].split('/data/')[1]
if 'sample' in relURL:
# Image was too big and thus was resized.
relURL = relURL.split('sample-')[1]
<|fim▁hole|> newPath = 'http://danbooru.donmai.us/data/' + relURL
newFile = 'C:\\programming\\vacbooru-master\\dbu\\' + relURL
if not os.path.exists(newFile):
r = urllib.urlopen(newPath).read()
if len(r) > 400:
f = open(newFile,'wb')
f.write(r)
f.close()
print str(i) + " downloaded"
else:
print str(i) + " is a 0 size image"
else:
print str(i) + " already exists"
except Exception as e:
print str(i) + " download failed: " + str(e)
if 'list index out of range' in str(e):
print "\t This is likley a image that needs dbu gold"<|fim▁end|> | |
<|file_name|>restapi.go<|end_file_name|><|fim▁begin|>// Discordgo - Discord bindings for Go
// Available at https://github.com/bwmarrin/discordgo
// Copyright 2015-2016 Bruce Marriner <[email protected]>. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file contains functions for interacting with the Discord REST/JSON API
// at the lowest level.
package discordgo
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"image"
_ "image/jpeg" // For JPEG decoding
_ "image/png" // For PNG decoding
"io"
"io/ioutil"
"log"
"mime/multipart"
"net/http"
"net/url"
"strconv"
"time"
)
// ErrJSONUnmarshal is returned for JSON Unmarshall errors.
var ErrJSONUnmarshal = errors.New("json unmarshal")
// Request makes a (GET/POST/...) Requests to Discord REST API with JSON data.
// All the other Discord REST Calls in this file use this function.
func (s *Session) Request(method, urlStr string, data interface{}) (response []byte, err error) {
if s.Debug {
log.Println("API REQUEST PAYLOAD :: [" + fmt.Sprintf("%+v", data) + "]")
}
var body []byte
if data != nil {
body, err = json.Marshal(data)
if err != nil {
return
}
}
return s.request(method, urlStr, "application/json", body)
}
// request makes a (GET/POST/...) Requests to Discord REST API.
func (s *Session) request(method, urlStr, contentType string, b []byte) (response []byte, err error) {
if s.Debug {
log.Printf("API REQUEST %8s :: %s\n", method, urlStr)
}
req, err := http.NewRequest(method, urlStr, bytes.NewBuffer(b))
if err != nil {
return
}
// Not used on initial login..
// TODO: Verify if a login, otherwise complain about no-token
if s.Token != "" {
req.Header.Set("authorization", s.Token)
}
req.Header.Set("Content-Type", contentType)
// TODO: Make a configurable static variable.
req.Header.Set("User-Agent", fmt.Sprintf("DiscordBot (https://github.com/bwmarrin/discordgo, v%s)", VERSION))
if s.Debug {
for k, v := range req.Header {
log.Printf("API REQUEST HEADER :: [%s] = %+v\n", k, v)
}
}
client := &http.Client{Timeout: (20 * time.Second)}
resp, err := client.Do(req)
if err != nil {
return
}
defer func() {
err := resp.Body.Close()
if err != nil {
log.Println("error closing resp body")
}
}()
response, err = ioutil.ReadAll(resp.Body)
if err != nil {
return
}
if s.Debug {
log.Printf("API RESPONSE STATUS :: %s\n", resp.Status)
for k, v := range resp.Header {
log.Printf("API RESPONSE HEADER :: [%s] = %+v\n", k, v)
}
log.Printf("API RESPONSE BODY :: [%s]\n", response)
}
switch resp.StatusCode {
case http.StatusOK:
case http.StatusCreated:
case http.StatusNoContent:
// TODO check for 401 response, invalidate token if we get one.
case 429: // TOO MANY REQUESTS - Rate limiting
rl := RateLimit{}
err = json.Unmarshal(response, &rl)
if err != nil {
err = fmt.Errorf("Request unmarshal rate limit error : %+v", err)
return
}
time.Sleep(rl.RetryAfter)
response, err = s.request(method, urlStr, contentType, b)
default: // Error condition
err = fmt.Errorf("HTTP %s, %s", resp.Status, response)
}
return
}
func unmarshal(data []byte, v interface{}) error {
err := json.Unmarshal(data, v)
if err != nil {
return ErrJSONUnmarshal
}
return nil
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Sessions
// ------------------------------------------------------------------------------------------------
// Login asks the Discord server for an authentication token.
func (s *Session) Login(email, password string) (err error) {
data := struct {
Email string `json:"email"`
Password string `json:"password"`
}{email, password}
response, err := s.Request("POST", LOGIN, data)
if err != nil {
return
}
<|fim▁hole|>
err = unmarshal(response, &temp)
if err != nil {
return
}
s.Token = temp.Token
return
}
// Register sends a Register request to Discord, and returns the authentication token
// Note that this account is temporary and should be verified for future use.
// Another option is to save the authentication token external, but this isn't recommended.
func (s *Session) Register(username string) (token string, err error) {
data := struct {
Username string `json:"username"`
}{username}
response, err := s.Request("POST", REGISTER, data)
if err != nil {
return
}
temp := struct {
Token string `json:"token"`
}{}
err = unmarshal(response, &temp)
if err != nil {
return
}
token = temp.Token
return
}
// Logout sends a logout request to Discord.
// This does not seem to actually invalidate the token. So you can still
// make API calls even after a Logout. So, it seems almost pointless to
// even use.
func (s *Session) Logout() (err error) {
// _, err = s.Request("POST", LOGOUT, fmt.Sprintf(`{"token": "%s"}`, s.Token))
if s.Token == "" {
return
}
data := struct {
Token string `json:"token"`
}{s.Token}
_, err = s.Request("POST", LOGOUT, data)
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Users
// ------------------------------------------------------------------------------------------------
// User returns the user details of the given userID
// userID : A user ID or "@me" which is a shortcut of current user ID
func (s *Session) User(userID string) (st *User, err error) {
body, err := s.Request("GET", USER(userID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserAvatar returns an image.Image of a users Avatar.
// userID : A user ID or "@me" which is a shortcut of current user ID
func (s *Session) UserAvatar(userID string) (img image.Image, err error) {
u, err := s.User(userID)
if err != nil {
return
}
body, err := s.Request("GET", USER_AVATAR(userID, u.Avatar), nil)
if err != nil {
return
}
img, _, err = image.Decode(bytes.NewReader(body))
return
}
// UserUpdate updates a users settings.
func (s *Session) UserUpdate(email, password, username, avatar, newPassword string) (st *User, err error) {
// NOTE: Avatar must be either the hash/id of existing Avatar or
// data:image/png;base64,BASE64_STRING_OF_NEW_AVATAR_PNG
// to set a new avatar.
// If left blank, avatar will be set to null/blank
data := struct {
Email string `json:"email"`
Password string `json:"password"`
Username string `json:"username"`
Avatar string `json:"avatar,omitempty"`
NewPassword string `json:"new_password,omitempty"`
}{email, password, username, avatar, newPassword}
body, err := s.Request("PATCH", USER("@me"), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserSettings returns the settings for a given user
func (s *Session) UserSettings() (st *Settings, err error) {
body, err := s.Request("GET", USER_SETTINGS("@me"), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserChannels returns an array of Channel structures for all private
// channels.
func (s *Session) UserChannels() (st []*Channel, err error) {
body, err := s.Request("GET", USER_CHANNELS("@me"), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserChannelCreate creates a new User (Private) Channel with another User
// recipientID : A user ID for the user to which this channel is opened with.
func (s *Session) UserChannelCreate(recipientID string) (st *Channel, err error) {
data := struct {
RecipientID string `json:"recipient_id"`
}{recipientID}
body, err := s.Request("POST", USER_CHANNELS("@me"), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserGuilds returns an array of Guild structures for all guilds.
func (s *Session) UserGuilds() (st []*Guild, err error) {
body, err := s.Request("GET", USER_GUILDS("@me"), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// UserChannelPermissions returns the permission of a user in a channel.
// userID : The ID of the user to calculate permissions for.
// channelID : The ID of the channel to calculate permission for.
func (s *Session) UserChannelPermissions(userID, channelID string) (apermissions int, err error) {
channel, err := s.Channel(channelID)
if err != nil {
return
}
guild, err := s.Guild(channel.GuildID)
if err != nil {
return
}
if userID == guild.OwnerID {
apermissions = PermissionAll
return
}
member, err := s.GuildMember(guild.ID, userID)
if err != nil {
return
}
for _, role := range guild.Roles {
for _, roleID := range member.Roles {
if role.ID == roleID {
apermissions |= role.Permissions
break
}
}
}
if apermissions&PermissionManageRoles > 0 {
apermissions |= PermissionAll
}
// Member overwrites can override role overrides, so do two passes
for _, overwrite := range channel.PermissionOverwrites {
for _, roleID := range member.Roles {
if overwrite.Type == "role" && roleID == overwrite.ID {
apermissions &= ^overwrite.Deny
apermissions |= overwrite.Allow
break
}
}
}
for _, overwrite := range channel.PermissionOverwrites {
if overwrite.Type == "member" && overwrite.ID == userID {
apermissions &= ^overwrite.Deny
apermissions |= overwrite.Allow
break
}
}
if apermissions&PermissionManageRoles > 0 {
apermissions |= PermissionAllChannel
}
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Guilds
// ------------------------------------------------------------------------------------------------
// Guild returns a Guild structure of a specific Guild.
// guildID : The ID of a Guild
func (s *Session) Guild(guildID string) (st *Guild, err error) {
if s.StateEnabled {
// Attempt to grab the guild from State first.
st, err = s.State.Guild(guildID)
if err == nil {
return
}
}
body, err := s.Request("GET", GUILD(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildCreate creates a new Guild
// name : A name for the Guild (2-100 characters)
func (s *Session) GuildCreate(name string) (st *Guild, err error) {
data := struct {
Name string `json:"name"`
}{name}
body, err := s.Request("POST", GUILDS, data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildEdit edits a new Guild
// guildID : The ID of a Guild
// g : A GuildParams struct with the values Name, Region and VerificationLevel defined.
func (s *Session) GuildEdit(guildID string, g GuildParams) (st *Guild, err error) {
// Bounds checking for VerificationLevel, interval: [0, 3]
if g.VerificationLevel != nil {
val := *g.VerificationLevel
if val < 0 || val > 3 {
err = errors.New("VerificationLevel out of bounds, should be between 0 and 3")
return
}
}
//Bounds checking for regions
if g.Region != "" {
isValid := false
regions, _ := s.VoiceRegions()
for _, r := range regions {
if g.Region == r.ID {
isValid = true
}
}
if !isValid {
var valid []string
for _, r := range regions {
valid = append(valid, r.ID)
}
err = fmt.Errorf("Region not a valid region (%q)", valid)
return
}
}
data := struct {
Name string `json:"name,omitempty"`
Region string `json:"region,omitempty"`
VerificationLevel *VerificationLevel `json:"verification_level,omitempty"`
}{g.Name, g.Region, g.VerificationLevel}
body, err := s.Request("PATCH", GUILD(guildID), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildDelete deletes a Guild.
// guildID : The ID of a Guild
func (s *Session) GuildDelete(guildID string) (st *Guild, err error) {
body, err := s.Request("DELETE", GUILD(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildLeave leaves a Guild.
// guildID : The ID of a Guild
func (s *Session) GuildLeave(guildID string) (err error) {
_, err = s.Request("DELETE", USER_GUILD("@me", guildID), nil)
return
}
// GuildBans returns an array of User structures for all bans of a
// given guild.
// guildID : The ID of a Guild.
func (s *Session) GuildBans(guildID string) (st []*User, err error) {
body, err := s.Request("GET", GUILD_BANS(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildBanCreate bans the given user from the given guild.
// guildID : The ID of a Guild.
// userID : The ID of a User
// days : The number of days of previous comments to delete.
func (s *Session) GuildBanCreate(guildID, userID string, days int) (err error) {
uri := GUILD_BAN(guildID, userID)
if days > 0 {
uri = fmt.Sprintf("%s?delete-message-days=%d", uri, days)
}
_, err = s.Request("PUT", uri, nil)
return
}
// GuildBanDelete removes the given user from the guild bans
// guildID : The ID of a Guild.
// userID : The ID of a User
func (s *Session) GuildBanDelete(guildID, userID string) (err error) {
_, err = s.Request("DELETE", GUILD_BAN(guildID, userID), nil)
return
}
// GuildMembers returns a list of members for a guild.
// guildID : The ID of a Guild.
// offset : A number of members to skip
// limit : max number of members to return (max 1000)
func (s *Session) GuildMembers(guildID string, offset, limit int) (st []*Member, err error) {
uri := GUILD_MEMBERS(guildID)
v := url.Values{}
if offset > 0 {
v.Set("offset", strconv.Itoa(offset))
}
if limit > 0 {
v.Set("limit", strconv.Itoa(limit))
}
if len(v) > 0 {
uri = fmt.Sprintf("%s?%s", uri, v.Encode())
}
body, err := s.Request("GET", uri, nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildMember returns a member of a guild.
// guildID : The ID of a Guild.
// userID : The ID of a User
func (s *Session) GuildMember(guildID, userID string) (st *Member, err error) {
body, err := s.Request("GET", GUILD_MEMBER(guildID, userID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildMemberDelete removes the given user from the given guild.
// guildID : The ID of a Guild.
// userID : The ID of a User
func (s *Session) GuildMemberDelete(guildID, userID string) (err error) {
_, err = s.Request("DELETE", GUILD_MEMBER(guildID, userID), nil)
return
}
// GuildMemberEdit edits the roles of a member.
// guildID : The ID of a Guild.
// userID : The ID of a User.
// roles : A list of role ID's to set on the member.
func (s *Session) GuildMemberEdit(guildID, userID string, roles []string) (err error) {
data := struct {
Roles []string `json:"roles"`
}{roles}
_, err = s.Request("PATCH", GUILD_MEMBER(guildID, userID), data)
if err != nil {
return
}
return
}
// GuildMemberMove moves a guild member from one voice channel to another/none
// guildID : The ID of a Guild.
// userID : The ID of a User.
// channelID : The ID of a channel to move user to, or null?
// NOTE : I am not entirely set on the name of this function and it may change
// prior to the final 1.0.0 release of Discordgo
func (s *Session) GuildMemberMove(guildID, userID, channelID string) (err error) {
data := struct {
ChannelID string `json:"channel_id"`
}{channelID}
_, err = s.Request("PATCH", GUILD_MEMBER(guildID, userID), data)
if err != nil {
return
}
return
}
// GuildChannels returns an array of Channel structures for all channels of a
// given guild.
// guildID : The ID of a Guild.
func (s *Session) GuildChannels(guildID string) (st []*Channel, err error) {
body, err := s.Request("GET", GUILD_CHANNELS(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildChannelCreate creates a new channel in the given guild
// guildID : The ID of a Guild.
// name : Name of the channel (2-100 chars length)
// ctype : Tpye of the channel (voice or text)
func (s *Session) GuildChannelCreate(guildID, name, ctype string) (st *Channel, err error) {
data := struct {
Name string `json:"name"`
Type string `json:"type"`
}{name, ctype}
body, err := s.Request("POST", GUILD_CHANNELS(guildID), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildInvites returns an array of Invite structures for the given guild
// guildID : The ID of a Guild.
func (s *Session) GuildInvites(guildID string) (st []*Invite, err error) {
body, err := s.Request("GET", GUILD_INVITES(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildRoles returns all roles for a given guild.
// guildID : The ID of a Guild.
func (s *Session) GuildRoles(guildID string) (st []*Role, err error) {
body, err := s.Request("GET", GUILD_ROLES(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return // TODO return pointer
}
// GuildRoleCreate returns a new Guild Role.
// guildID: The ID of a Guild.
func (s *Session) GuildRoleCreate(guildID string) (st *Role, err error) {
body, err := s.Request("POST", GUILD_ROLES(guildID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildRoleEdit updates an existing Guild Role with new values
// guildID : The ID of a Guild.
// roleID : The ID of a Role.
// name : The name of the Role.
// color : The color of the role (decimal, not hex).
// hoist : Whether to display the role's users separately.
// perm : The permissions for the role.
func (s *Session) GuildRoleEdit(guildID, roleID, name string, color int, hoist bool, perm int) (st *Role, err error) {
data := struct {
Name string `json:"name"` // The color the role should have (as a decimal, not hex)
Color int `json:"color"` // Whether to display the role's users separately
Hoist bool `json:"hoist"` // The role's name (overwrites existing)
Permissions int `json:"permissions"` // The overall permissions number of the role (overwrites existing)
}{name, color, hoist, perm}
body, err := s.Request("PATCH", GUILD_ROLE(guildID, roleID), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildRoleReorder reoders guild roles
// guildID : The ID of a Guild.
// roles : A list of ordered roles.
func (s *Session) GuildRoleReorder(guildID string, roles []*Role) (st []*Role, err error) {
body, err := s.Request("PATCH", GUILD_ROLES(guildID), roles)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// GuildRoleDelete deletes an existing role.
// guildID : The ID of a Guild.
// roleID : The ID of a Role.
func (s *Session) GuildRoleDelete(guildID, roleID string) (err error) {
_, err = s.Request("DELETE", GUILD_ROLE(guildID, roleID), nil)
return
}
// GuildIcon returns an image.Image of a guild icon.
// guildID : The ID of a Guild.
func (s *Session) GuildIcon(guildID string) (img image.Image, err error) {
g, err := s.Guild(guildID)
if err != nil {
return
}
if g.Icon == "" {
err = errors.New("Guild does not have an icon set.")
return
}
body, err := s.Request("GET", GUILD_ICON(guildID, g.Icon), nil)
if err != nil {
return
}
img, _, err = image.Decode(bytes.NewReader(body))
return
}
// GuildSplash returns an image.Image of a guild splash image.
// guildID : The ID of a Guild.
func (s *Session) GuildSplash(guildID string) (img image.Image, err error) {
g, err := s.Guild(guildID)
if err != nil {
return
}
if g.Splash == "" {
err = errors.New("Guild does not have a splash set.")
return
}
body, err := s.Request("GET", GUILD_SPLASH(guildID, g.Splash), nil)
if err != nil {
return
}
img, _, err = image.Decode(bytes.NewReader(body))
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Channels
// ------------------------------------------------------------------------------------------------
// Channel returns a Channel strucutre of a specific Channel.
// channelID : The ID of the Channel you want returned.
func (s *Session) Channel(channelID string) (st *Channel, err error) {
body, err := s.Request("GET", CHANNEL(channelID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelEdit edits the given channel
// channelID : The ID of a Channel
// name : The new name to assign the channel.
func (s *Session) ChannelEdit(channelID, name string) (st *Channel, err error) {
data := struct {
Name string `json:"name"`
}{name}
body, err := s.Request("PATCH", CHANNEL(channelID), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelDelete deletes the given channel
// channelID : The ID of a Channel
func (s *Session) ChannelDelete(channelID string) (st *Channel, err error) {
body, err := s.Request("DELETE", CHANNEL(channelID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelTyping broadcasts to all members that authenticated user is typing in
// the given channel.
// channelID : The ID of a Channel
func (s *Session) ChannelTyping(channelID string) (err error) {
_, err = s.Request("POST", CHANNEL_TYPING(channelID), nil)
return
}
// ChannelMessages returns an array of Message structures for messages within
// a given channel.
// channelID : The ID of a Channel.
// limit : The number messages that can be returned. (max 100)
// beforeID : If provided all messages returned will be before given ID.
// afterID : If provided all messages returned will be after given ID.
func (s *Session) ChannelMessages(channelID string, limit int, beforeID, afterID string) (st []*Message, err error) {
uri := CHANNEL_MESSAGES(channelID)
v := url.Values{}
if limit > 0 {
v.Set("limit", strconv.Itoa(limit))
}
if afterID != "" {
v.Set("after", afterID)
}
if beforeID != "" {
v.Set("before", beforeID)
}
if len(v) > 0 {
uri = fmt.Sprintf("%s?%s", uri, v.Encode())
}
body, err := s.Request("GET", uri, nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelMessageAck acknowledges and marks the given message as read
// channeld : The ID of a Channel
// messageID : the ID of a Message
func (s *Session) ChannelMessageAck(channelID, messageID string) (err error) {
_, err = s.Request("POST", CHANNEL_MESSAGE_ACK(channelID, messageID), nil)
return
}
// channelMessageSend sends a message to the given channel.
// channelID : The ID of a Channel.
// content : The message to send.
// tts : Whether to send the message with TTS.
func (s *Session) channelMessageSend(channelID, content string, tts bool) (st *Message, err error) {
// TODO: nonce string ?
data := struct {
Content string `json:"content"`
TTS bool `json:"tts"`
}{content, tts}
// Send the message to the given channel
response, err := s.Request("POST", CHANNEL_MESSAGES(channelID), data)
if err != nil {
return
}
err = unmarshal(response, &st)
return
}
// ChannelMessageSend sends a message to the given channel.
// channelID : The ID of a Channel.
// content : The message to send.
func (s *Session) ChannelMessageSend(channelID string, content string) (st *Message, err error) {
return s.channelMessageSend(channelID, content, false)
}
// ChannelMessageSendTTS sends a message to the given channel with Text to Speech.
// channelID : The ID of a Channel.
// content : The message to send.
func (s *Session) ChannelMessageSendTTS(channelID string, content string) (st *Message, err error) {
return s.channelMessageSend(channelID, content, true)
}
// ChannelMessageEdit edits an existing message, replacing it entirely with
// the given content.
// channeld : The ID of a Channel
// messageID : the ID of a Message
func (s *Session) ChannelMessageEdit(channelID, messageID, content string) (st *Message, err error) {
data := struct {
Content string `json:"content"`
}{content}
response, err := s.Request("PATCH", CHANNEL_MESSAGE(channelID, messageID), data)
if err != nil {
return
}
err = unmarshal(response, &st)
return
}
// ChannelMessageDelete deletes a message from the Channel.
func (s *Session) ChannelMessageDelete(channelID, messageID string) (err error) {
_, err = s.Request("DELETE", CHANNEL_MESSAGE(channelID, messageID), nil)
return
}
// ChannelFileSend sends a file to the given channel.
// channelID : The ID of a Channel.
// io.Reader : A reader for the file contents.
func (s *Session) ChannelFileSend(channelID, name string, r io.Reader) (st *Message, err error) {
body := &bytes.Buffer{}
bodywriter := multipart.NewWriter(body)
writer, err := bodywriter.CreateFormFile("file", name)
if err != nil {
return nil, err
}
_, err = io.Copy(writer, r)
if err != nil {
return
}
err = bodywriter.Close()
if err != nil {
return
}
response, err := s.request("POST", CHANNEL_MESSAGES(channelID), bodywriter.FormDataContentType(), body.Bytes())
if err != nil {
return
}
err = unmarshal(response, &st)
return
}
// ChannelInvites returns an array of Invite structures for the given channel
// channelID : The ID of a Channel
func (s *Session) ChannelInvites(channelID string) (st []*Invite, err error) {
body, err := s.Request("GET", CHANNEL_INVITES(channelID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelInviteCreate creates a new invite for the given channel.
// channelID : The ID of a Channel
// i : An Invite struct with the values MaxAge, MaxUses, Temporary,
// and XkcdPass defined.
func (s *Session) ChannelInviteCreate(channelID string, i Invite) (st *Invite, err error) {
data := struct {
MaxAge int `json:"max_age"`
MaxUses int `json:"max_uses"`
Temporary bool `json:"temporary"`
XKCDPass bool `json:"xkcdpass"`
}{i.MaxAge, i.MaxUses, i.Temporary, i.XkcdPass}
body, err := s.Request("POST", CHANNEL_INVITES(channelID), data)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ChannelPermissionSet creates a Permission Override for the given channel.
// NOTE: This func name may changed. Using Set instead of Create because
// you can both create a new override or update an override with this function.
func (s *Session) ChannelPermissionSet(channelID, targetID, targetType string, allow, deny int) (err error) {
data := struct {
ID string `json:"id"`
Type string `json:"type"`
Allow int `json:"allow"`
Deny int `json:"deny"`
}{targetID, targetType, allow, deny}
_, err = s.Request("PUT", CHANNEL_PERMISSION(channelID, targetID), data)
return
}
// ChannelPermissionDelete deletes a specific permission override for the given channel.
// NOTE: Name of this func may change.
func (s *Session) ChannelPermissionDelete(channelID, targetID string) (err error) {
_, err = s.Request("DELETE", CHANNEL_PERMISSION(channelID, targetID), nil)
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Invites
// ------------------------------------------------------------------------------------------------
// Invite returns an Invite structure of the given invite
// inviteID : The invite code (or maybe xkcdpass?)
func (s *Session) Invite(inviteID string) (st *Invite, err error) {
body, err := s.Request("GET", INVITE(inviteID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// InviteDelete deletes an existing invite
// inviteID : the code (or maybe xkcdpass?) of an invite
func (s *Session) InviteDelete(inviteID string) (st *Invite, err error) {
body, err := s.Request("DELETE", INVITE(inviteID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// InviteAccept accepts an Invite to a Guild or Channel
// inviteID : The invite code (or maybe xkcdpass?)
func (s *Session) InviteAccept(inviteID string) (st *Invite, err error) {
body, err := s.Request("POST", INVITE(inviteID), nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Voice
// ------------------------------------------------------------------------------------------------
// VoiceRegions returns the voice server regions
func (s *Session) VoiceRegions() (st []*VoiceRegion, err error) {
body, err := s.Request("GET", VOICE_REGIONS, nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// VoiceICE returns the voice server ICE information
func (s *Session) VoiceICE() (st *VoiceICE, err error) {
body, err := s.Request("GET", VOICE_ICE, nil)
if err != nil {
return
}
err = unmarshal(body, &st)
return
}
// ------------------------------------------------------------------------------------------------
// Functions specific to Discord Websockets
// ------------------------------------------------------------------------------------------------
// Gateway returns the a websocket Gateway address
func (s *Session) Gateway() (gateway string, err error) {
response, err := s.Request("GET", GATEWAY, nil)
if err != nil {
return
}
temp := struct {
URL string `json:"url"`
}{}
err = unmarshal(response, &temp)
if err != nil {
return
}
gateway = temp.URL
return
}<|fim▁end|> | temp := struct {
Token string `json:"token"`
}{} |
<|file_name|>StandardsSubscriptionRequest.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/securityhub/model/StandardsSubscriptionRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace SecurityHub
{
namespace Model
{
StandardsSubscriptionRequest::StandardsSubscriptionRequest() :
m_standardsArnHasBeenSet(false),
m_standardsInputHasBeenSet(false)
{
}
StandardsSubscriptionRequest::StandardsSubscriptionRequest(JsonView jsonValue) :
m_standardsArnHasBeenSet(false),
m_standardsInputHasBeenSet(false)
{
*this = jsonValue;
}
StandardsSubscriptionRequest& StandardsSubscriptionRequest::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("StandardsArn"))
{
m_standardsArn = jsonValue.GetString("StandardsArn");
m_standardsArnHasBeenSet = true;
}
<|fim▁hole|> Aws::Map<Aws::String, JsonView> standardsInputJsonMap = jsonValue.GetObject("StandardsInput").GetAllObjects();
for(auto& standardsInputItem : standardsInputJsonMap)
{
m_standardsInput[standardsInputItem.first] = standardsInputItem.second.AsString();
}
m_standardsInputHasBeenSet = true;
}
return *this;
}
JsonValue StandardsSubscriptionRequest::Jsonize() const
{
JsonValue payload;
if(m_standardsArnHasBeenSet)
{
payload.WithString("StandardsArn", m_standardsArn);
}
if(m_standardsInputHasBeenSet)
{
JsonValue standardsInputJsonMap;
for(auto& standardsInputItem : m_standardsInput)
{
standardsInputJsonMap.WithString(standardsInputItem.first, standardsInputItem.second);
}
payload.WithObject("StandardsInput", std::move(standardsInputJsonMap));
}
return payload;
}
} // namespace Model
} // namespace SecurityHub
} // namespace Aws<|fim▁end|> | if(jsonValue.ValueExists("StandardsInput"))
{ |
<|file_name|>customoperation_2.go<|end_file_name|><|fim▁begin|>//This file is generated by btsgen. DO NOT EDIT.
//operation sample data for OperationTypeCustom
package samples
func init() {
sampleDataCustomOperation[2] = `{
"data": "466f6f626172",<|fim▁hole|> "asset_id": "1.3.0"
},
"id": 16,
"payer": "1.2.30127",
"required_auths": [
"1.2.30127"
]
}`
}
//end of file<|fim▁end|> | "fee": {
"amount": 164678, |
<|file_name|>DfIdentityException.java<|end_file_name|><|fim▁begin|>/* $Id$ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>*/
package com.documentum.fc.client;
/** Stub interface to allow the connector to build fully.
*/
public class DfIdentityException extends DfServiceException
{
}<|fim▁end|> | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. |
<|file_name|>h17d_raw.cpp<|end_file_name|><|fim▁begin|>#include "h17disk.h"
#include <stdio.h>
static int usage(char *progName) {
fprintf(stderr,"Usage: %s old_h17disk_file new_h17disk_file\n",progName);
return 1;
}
int main(int argc, char *argv[]) {
H17Disk *image = new(H17Disk);
if (argc < 2 || argc > 3)
{
usage(argv[0]);
return 1;
}
std::string infile(argv[1]);
image->loadFile(infile.c_str());
std::string outfile;
if (argc == 2)
{
outfile.assign(argv[1], infile.rfind("."));
outfile.append(".h17raw");
}
else
{
outfile.assign(argv[2]);
}
printf("------------------------\n");
printf(" Read Complete\n");
printf("------------------------\n");
image->analyze();
image->saveAsRaw(outfile.c_str());
if (image)
{
delete image;<|fim▁hole|>}<|fim▁end|> | }
return 0; |
<|file_name|>nibblegen.py<|end_file_name|><|fim▁begin|>'''
Nibblegen: A script to convert LaTex text to html usable in Nibbleblog Forked from the latex2wp project (the licenceing for which is below).
Copyright (C) 2014 Theodore Jones
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
"""
Copyright 2009 Luca Trevisan
Additional contributors: Radu Grigore
LaTeX2WP version 0.6.2
This file is part of LaTeX2WP, a program that converts
a LaTeX document into a format that is ready to be
copied and pasted into WordPress.
You are free to redistribute and/or modify LaTeX2WP under the
terms of the GNU General Public License (GPL), version 3
or (at your option) any later version.
I hope you will find LaTeX2WP useful, but be advised that
it comes WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GPL for more details.
You should have received a copy of the GNU General Public
License along with LaTeX2WP. If you can't find it,
see <http://www.gnu.org/licenses/>.
"""
import re
from sys import argv
from latex2wpstyle import *
# prepare variables computed from the info in latex2wpstyle
count = dict()
for thm in ThmEnvs:
count[T[thm]] = 0
count["section"] = count["subsection"] = count["equation"] = 0
ref={}
endlatex = "&fg="+textcolor
if HTML : endproof = ""
inthm = ""
"""
At the beginning, the commands \$, \% and \& are temporarily
replaced by placeholders (the second entry in each 4-tuple).
At the end, The placeholders in text mode are replaced by
the third entry, and the placeholders in math mode are
replaced by the fourth entry.
"""
esc = [["\\$","_dollar_","$","\\$"],
["\\%","_percent_","%","\\%"],
["\\&","_amp_","&","\\&"],
[">","_greater_",">",">"],
["<","_lesser_","<","<"]]
M = M + [ ["\\more","<!--more-->"],
["\\newblock","\\\\"],
["\\sloppy",""],
["\\S","§"]]
Mnomath =[["\\\\","<br/>\n"],
["\\ "," "],
["\\`a","à"],
["\\'a","á"],
["\\\"a","ä"],
["\\aa ","å"],
["{\\aa}","å"],
["\\`e","è"],
["\\'e","é"],
["\\\"e","ë"],
["\\`i","ì"],
["\\'i","í"],
["\\\"i","ï"],
["\\`o","ò"],
["\\'o","ó"],
["\\\"o","ö"],
["\\`o","ò"],
["\\'o","ó"],
["\\\"o","ö"],
["\\H o","ö"],
["\\`u","ù"],
["\\'u","ú"],
["\\\"u","ü"],
["\\`u","ù"],
["\\'u","ú"],
["\\\"u","ü"],
["\\v{C}","Č"]]
cb = re.compile("\\{|}")
def extractbody(m) :
begin = re.compile("\\\\begin\s*")
m= begin.sub("\\\\begin",m)
end = re.compile("\\\\end\s*")
m = end.sub("\\\\end",m)
beginenddoc = re.compile("\\\\begin\\{document}"
"|\\\\end\\{document}")
parse = beginenddoc.split(m)
if len(parse)== 1 :
m = parse[0]
else :
m = parse[1]
"""
removes comments, replaces double returns with <p> and
other returns and multiple spaces by a single space.
"""
for e in esc :
m = m.replace(e[0],e[1])
comments = re.compile("%.*?\n")
m=comments.sub(" ",m)
multiplereturns = re.compile("\n\n+")
m= multiplereturns.sub ("<p>",m)
spaces=re.compile("(\n|[ ])+")
m=spaces.sub(" ",m)
"""
removes text between \iffalse ... \fi and
between \iftex ... \fi keeps text between
\ifblog ... \fi
"""
ifcommands = re.compile("\\\\iffalse|\\\\ifblog|\\\\iftex|\\\\fi")
L=ifcommands.split(m)
I=ifcommands.findall(m)
m= L[0]
for i in range(1,(len(L)+1)/2) :
if (I[2*i-2]=="\\ifblog") :
m=m+L[2*i-1]
m=m+L[2*i]
"""
changes $$ ... $$ into \[ ... \] and reformats
eqnarray* environments as regular array environments
"""
doubledollar = re.compile("\\$\\$")
L=doubledollar.split(m)
m=L[0]
for i in range(1,(len(L)+1)/2) :
m = m+ "\\[" + L[2*i-1] + "\\]" + L[2*i]
m=m.replace("\\begin{eqnarray*}","\\[ \\begin{array}{rcl} ")
m=m.replace("\\end{eqnarray*}","\\end{array} \\]")
return m
def convertsqb(m) :
r = re.compile("\\\\item\\s*\\[.*?\\]")
Litems = r.findall(m)
Lrest = r.split(m)
<|fim▁hole|> for i in range(0,len(Litems)) :
s= Litems[i]
s=s.replace("\\item","\\nitem")
s=s.replace("[","{")
s=s.replace("]","}")
m=m+s+Lrest[i+1]
r = re.compile("\\\\begin\\s*\\{\\w+}\\s*\\[.*?\\]")
Lthms = r.findall(m)
Lrest = r.split(m)
m = Lrest[0]
for i in range(0,len(Lthms)) :
s= Lthms[i]
s=s.replace("\\begin","\\nbegin")
s=s.replace("[","{")
s=s.replace("]","}")
m=m+s+Lrest[i+1]
return m
def converttables(m) :
retable = re.compile("\\\\begin\s*\\{tabular}.*?\\\\end\s*\\{tabular}"
"|\\\\begin\s*\\{btabular}.*?\\\\end\s*\\{btabular}")
tables = retable.findall(m)
rest = retable.split(m)
m = rest[0]
for i in range(len(tables)) :
if tables[i].find("{btabular}") != -1 :
m = m + convertonetable(tables[i],True)
else :
m = m + convertonetable(tables[i],False)
m = m + rest[i+1]
return m
def convertmacros(m) :
comm = re.compile("\\\\[a-zA-Z]*")
commands = comm.findall(m)
rest = comm.split(m)
r= rest[0]
for i in range( len (commands) ) :
for s1,s2 in M :
if s1==commands[i] :
commands[i] = s2
r=r+commands[i]+rest[i+1]
return(r)
def convertonetable(m,border) :
tokens = re.compile("\\\\begin\\{tabular}\s*\\{.*?}"
"|\\\\end\\{tabular}"
"|\\\\begin\\{btabular}\s*\\{.*?}"
"|\\\\end\\{btabular}"
"|&|\\\\\\\\")
align = { "c" : "center", "l" : "left" , "r" : "right" }
T = tokens.findall(m)
C = tokens.split(m)
L = cb.split(T[0])
format = L[3]
columns = len(format)
if border :
m = "<table border=\"1\" align=center>"
else :
m="<table align = center><tr>"
p=1
i=0
while T[p-1] != "\\end{tabular}" and T[p-1] != "\\end{btabular}":
m = m + "<td align="+align[format[i]]+">" + C[p] + "</td>"
p=p+1
i=i+1
if T[p-1]=="\\\\" :
for i in range (p,columns) :
m=m+"<td></td>"
m=m+"</tr><tr>"
i=0
m = m+ "</tr></table>"
return (m)
def separatemath(m) :
mathre = re.compile("\\$.*?\\$"
"|\\\\begin\\{equation}.*?\\\\end\\{equation}"
"|\\\\\\[.*?\\\\\\]")
math = mathre.findall(m)
text = mathre.split(m)
return(math,text)
def processmath( M ) :
R = []
counteq=0
global ref
mathdelim = re.compile("\\$"
"|\\\\begin\\{equation}"
"|\\\\end\\{equation}"
"|\\\\\\[|\\\\\\]")
label = re.compile("\\\\label\\{.*?}")
for m in M :
md = mathdelim.findall(m)
mb = mathdelim.split(m)
"""
In what follows, md[0] contains the initial delimiter,
which is either \begin{equation}, or $, or \[, and
mb[1] contains the actual mathematical equation
"""
if md[0] == "$" :
if HTML :
m=m.replace("$","")
m="$$"+m+""+endlatex+"$$"
else :
m="$$ {"+mb[1]+"}"+endlatex+"$$"
else :
if md[0].find("\\begin") != -1 :
count["equation"] += 1
mb[1] = mb[1] + "\\ \\ \\ \\ \\ ("+str(count["equation"])+")"
if HTML :
m = "<p align=center>$$" + mb[1] +endlatex+"$$" + "</p>\n"
else :
m = "<p align=center>$$ " + mb[1] +endlatex+"$$</p>\n"
if m.find("\\label") != -1 :
mnolab = label.split(m)
mlab = label.findall(m)
"""
Now the mathematical equation, which has already
been formatted for WordPress, is the union of
the strings mnolab[0] and mnolab[1]. The content
of the \label{...} command is in mlab[0]
"""
lab = mlab[0]
lab=cb.split(lab)[1]
lab=lab.replace(":","")
ref[lab]=count["equation"]
m="<a name=\""+lab+"\">"+mnolab[0]+mnolab[1]+"</a>"
R= R + [m]
return R
def convertcolors(m,c) :
if m.find("begin") != -1 :
return("<span style=\"color:#"+colors[c]+";\">")
else :
return("</span>")
def convertitm(m) :
if m.find("begin") != -1 :
return ("\n\n<ul>")
else :
return ("\n</ul>\n\n")
def convertenum(m) :
if m.find("begin") != -1 :
return ("\n\n<ol>")
else :
return ("\n</ol>\n\n")
def convertbeginnamedthm(thname,thm) :
global inthm
count[T[thm]] +=1
inthm = thm
t = beginnamedthm.replace("_ThmType_",thm.capitalize())
t = t.replace("_ThmNumb_",str(count[T[thm]]))
t = t.replace("_ThmName_",thname)
return(t)
def convertbeginthm(thm) :
global inthm
count[T[thm]] +=1
inthm = thm
t = beginthm.replace("_ThmType_",thm.capitalize())
t = t.replace("_ThmNumb_",str(count[T[thm]]))
return(t)
def convertendthm(thm) :
global inthm
inthm = ""
return(endthm)
def convertlab(m) :
global inthm
global ref
m=cb.split(m)[1]
m=m.replace(":","")
if inthm != "" :
ref[m]=count[T[inthm]]
else :
ref[m]=count["section"]
return("<a name=\""+m+"\"></a>")
def convertproof(m) :
if m.find("begin") != -1 :
return(beginproof)
else :
return(endproof)
def convertsection (m) :
L=cb.split(m)
"""
L[0] contains the \\section or \\section* command, and
L[1] contains the section name
"""
if L[0].find("*") == -1 :
t=section
count["section"] += 1
count["subsection"]=0
else :
t=sectionstar
t=t.replace("_SecNumb_",str(count["section"]) )
t=t.replace("_SecName_",L[1])
return(t)
def convertsubsection (m) :
L=cb.split(m)
if L[0].find("*") == -1 :
t=subsection
else :
t=subsectionstar
count["subsection"] += 1
t=t.replace("_SecNumb_",str(count["section"]) )
t=t.replace("_SubSecNumb_",str(count["subsection"]) )
t=t.replace("_SecName_",L[1])
return(t)
def converturl (m) :
L = cb.split(m)
return ("<a href=\""+L[1]+"\">"+L[3]+"</a>")
def converturlnosnap (m) :
L = cb.split(m)
return ("<a class=\"snap_noshots\" href=\""+L[1]+"\">"+L[3]+"</a>")
def convertimage (m) :
L = cb.split (m)
return ("<p align=center><img "+L[1] + " src=\""+L[3]
+"\"></p>")
def convertstrike (m) :
L=cb.split(m)
return("<s>"+L[1]+"</s>")
def processtext ( t ) :
p = re.compile("\\\\begin\\{\\w+}"
"|\\\\nbegin\\{\\w+}\\s*\\{.*?}"
"|\\\\end\\{\\w+}"
"|\\\\item"
"|\\\\nitem\\s*\\{.*?}"
"|\\\\label\\s*\\{.*?}"
"|\\\\section\\s*\\{.*?}"
"|\\\\section\\*\\s*\\{.*?}"
"|\\\\subsection\\s*\\{.*?}"
"|\\\\subsection\\*\\s*\\{.*?}"
"|\\\\href\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\hrefnosnap\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\image\\s*\\{.*?}\\s*\\{.*?}\\s*\\{.*?}"
"|\\\\sout\\s*\\{.*?}")
for s1, s2 in Mnomath :
t=t.replace(s1,s2)
ttext = p.split(t)
tcontrol = p.findall(t)
w = ttext[0]
i=0
while i < len(tcontrol) :
if tcontrol[i].find("{itemize}") != -1 :
w=w+convertitm(tcontrol[i])
elif tcontrol[i].find("{enumerate}") != -1 :
w= w+convertenum(tcontrol[i])
elif tcontrol[i][0:5]=="\\item" :
w=w+"<li>"
elif tcontrol[i][0:6]=="\\nitem" :
lb = tcontrol[i][7:].replace("{","")
lb = lb.replace("}","")
w=w+"<li>"+lb
elif tcontrol[i].find("\\hrefnosnap") != -1 :
w = w+converturlnosnap(tcontrol[i])
elif tcontrol[i].find("\\href") != -1 :
w = w+converturl(tcontrol[i])
elif tcontrol[i].find("{proof}") != -1 :
w = w+convertproof(tcontrol[i])
elif tcontrol[i].find("\\subsection") != -1 :
w = w+convertsubsection(tcontrol[i])
elif tcontrol[i].find("\\section") != -1 :
w = w+convertsection(tcontrol[i])
elif tcontrol[i].find("\\label") != -1 :
w=w+convertlab(tcontrol[i])
elif tcontrol[i].find("\\image") != -1 :
w = w+convertimage(tcontrol[i])
elif tcontrol[i].find("\\sout") != -1 :
w = w+convertstrike(tcontrol[i])
elif tcontrol[i].find("\\begin") !=-1 and tcontrol[i].find("{center}")!= -1 :
w = w+"<p align=center>"
elif tcontrol[i].find("\\end")!= -1 and tcontrol[i].find("{center}") != -1 :
w = w+"</p>"
else :
for clr in colorchoice :
if tcontrol[i].find("{"+clr+"}") != -1:
w=w + convertcolors(tcontrol[i],clr)
for thm in ThmEnvs :
if tcontrol[i]=="\\end{"+thm+"}" :
w=w+convertendthm(thm)
elif tcontrol[i]=="\\begin{"+thm+"}":
w=w+convertbeginthm(thm)
elif tcontrol[i].find("\\nbegin{"+thm+"}") != -1:
L=cb.split(tcontrol[i])
thname=L[3]
w=w+convertbeginnamedthm(thname,thm)
w += ttext[i+1]
i += 1
return processfontstyle(w)
def processfontstyle(w) :
close = dict()
ww = ""
level = i = 0
while i < len(w):
special = False
for k, v in fontstyle.items():
l = len(k)
if w[i:i+l] == k:
level += 1
ww += '<' + v + '>'
close[level] = '</' + v + '>'
i += l
special = True
if not special:
if w[i] == '{':
ww += '{'
level += 1
close[level] = '}'
elif w[i] == '}' and level > 0:
ww += close[level]
level -= 1
else:
ww += w[i]
i += 1
return ww
def convertref(m) :
global ref
p=re.compile("\\\\ref\s*\\{.*?}|\\\\eqref\s*\\{.*?}")
T=p.split(m)
M=p.findall(m)
w = T[0]
for i in range(len(M)) :
t=M[i]
lab=cb.split(t)[1]
lab=lab.replace(":","")
if t.find("\\eqref") != -1 :
w=w+"<a href=\"#"+lab+"\">("+str(ref[lab])+")</a>"
else :
w=w+"<a href=\"#"+lab+"\">"+str(ref[lab])+"</a>"
w=w+T[i+1]
return w
"""
The program makes several passes through the input.
In a first clean-up, all text before \begin{document}
and after \end{document}, if present, is removed,
all double-returns are converted
to <p>, and all remaining returns are converted to
spaces.
The second step implements a few simple macros. The user can
add support for more macros if desired by editing the
convertmacros() procedure.
Then the program separates the mathematical
from the text parts. (It assumes that the document does
not start with a mathematical expression.)
It makes one pass through the text part, translating
environments such as theorem, lemma, proof, enumerate, itemize,
\em, and \bf. Along the way, it keeps counters for the current
section and subsection and for the current numbered theorem-like
environment, as well as a flag that tells whether one is
inside a theorem-like environment or not. Every time a \label{xx}
command is encountered, we give ref[xx] the value of the section
in which the command appears, or the number of the theorem-like
environment in which it appears (if applicable). Each appearence
of \label is replace by an html "name" tag, so that later we can
replace \ref commands by clickable html links.
The next step is to make a pass through the mathematical environments.
Displayed equations are numbered and centered, and when a \label{xx}
command is encountered we give ref[xx] the number of the current
equation.
A final pass replaces \ref{xx} commands by the number in ref[xx],
and a clickable link to the referenced location.
"""
import sys
s = ""
while True:
char = sys.stdin.read(1)
if not char:
break
if char:
s = s + char
"""
extractbody() takes the text between a \begin{document}
and \end{document}, if present, (otherwise it keeps the
whole document), normalizes the spacing, and removes comments
"""
s=extractbody(s)
# formats tables
s=converttables(s)
# reformats optional parameters passed in square brackets
s=convertsqb(s)
#implement simple macros
s=convertmacros(s)
# extracts the math parts, and replaces the with placeholders
# processes math and text separately, then puts the processed
# math equations in place of the placeholders
(math,text) = separatemath(s)
s=text[0]
for i in range(len(math)) :
s=s+"__math"+str(i)+"__"+text[i+1]
s = processtext ( s )
math = processmath ( math )
# converts escape sequences such as \$ to HTML codes
# This must be done after formatting the tables or the '&' in
# the HTML codes will create problems
for e in esc :
s=s.replace(e[1],e[2])
for i in range ( len ( math ) ) :
math[i] = math[i].replace(e[1],e[3])
# puts the math equations back into the text
for i in range(len(math)) :
s=s.replace("__math"+str(i)+"__",math[i])
# translating the \ref{} commands
s=convertref(s)
if HTML :
s="<head><style>body{max-width:55em;}a:link{color:#4444aa;}a:visited{color:#4444aa;}a:hover{background-color:#aaaaFF;}</style></head><body>"+s+"</body></html>"
s = s.replace("<p>","\n<p>\n")
print s<|fim▁end|> | m = Lrest[0] |
<|file_name|>admin-date-preview.js<|end_file_name|><|fim▁begin|>/**
* Provides live preview facilities for the event date format fields, akin
* to (and using the same ajax mechanism as) the date format preview in WP's
* general settings screen.
*/
jQuery( document ).ready( function( $ ) {
// Whenever the input field for a date format changes, update the matching
// live preview area
$( ".live-date-preview" ).siblings( "input" ).change( function() {
var $format_field = $( this );
var new_format = $format_field.val();
var $preview_field = $format_field.siblings( ".live-date-preview" );
/**
* Update the preview field when we get our response back from WP.
*/
var show_update = function( preview_text ) {
preview_text = $( "<div/>" ).html( preview_text ).text(); // Escaping!
$preview_field.html( preview_text );
}
// Before making the request, show the spinner (this should naturally be "wiped"
// when the response is rendered)
$preview_field.append( "<span class='spinner'></span>" );
$preview_field.find( ".spinner" ).css( "visibility", "visible" );
var request = {
action: "date_format",
date: new_format
}
$.post( ajaxurl, request, show_update, "text" );
} );<|fim▁hole|><|fim▁end|> | } ); |
<|file_name|>user.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from django.contrib.auth import get_user_model
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from akvo.rsr.forms import (check_password_minimum_length, check_password_has_number,
check_password_has_upper, check_password_has_lower,
check_password_has_symbol)
from akvo.rsr.models import ProjectHierarchy
from .employment import EmploymentSerializer
from .organisation import (
OrganisationExtraSerializer, OrganisationBasicSerializer, UserManagementOrgSerializer)
from .program import ProgramSerializer
from .rsr_serializer import BaseRSRSerializer
class UserRawSerializer(BaseRSRSerializer):
"""
Raw user serializer.
"""
class Meta:
model = get_user_model()
fields = (
'id',
'first_name',
'last_name',
'email',
'is_active',
'is_staff',
'is_admin',
'is_support',
'is_superuser',
)
class UserSerializer(BaseRSRSerializer):
# Needed to show only the first organisation of the user
organisation = OrganisationExtraSerializer(source='first_organisation', required=False,)
organisations = OrganisationExtraSerializer(many=True, required=False,)
user_management_organisations = UserManagementOrgSerializer(many=True, required=False)
approved_employments = EmploymentSerializer(many=True, required=False,)
api_key = serializers.ReadOnlyField(source='get_api_key')
# Legacy fields to support Tastypie API emulation
legacy_org = serializers.SerializerMethodField()
username = serializers.SerializerMethodField()
can_manage_users = serializers.SerializerMethodField()
programs = serializers.SerializerMethodField()
class Meta:
model = get_user_model()
fields = (
'id',
'first_name',
'last_name',
'email',
'username',
'is_active',
'is_staff',
'is_admin',
'is_support',
'is_superuser',
'can_manage_users',
'organisation',
'organisations',
'approved_employments',
'api_key',
'legacy_org',
'programs',
'user_management_organisations',
'seen_announcements',
)
def __init__(self, *args, **kwargs):
""" Delete the 'absolute_url' field added in BaseRSRSerializer.__init__().
It's neither correct nor do we want this data to be visible.
Remove the fields "legacy_org" and "username" that are only present to support older
versions of Up calling the Tastypie API endpoints that we now emulate using DRF
"""
super(UserSerializer, self).__init__(*args, **kwargs)
del self.fields['absolute_url']
# Remove the fields unless we're called via Tastypie URLs
request = kwargs.get("context", {}).get("request", None)
if request and "/api/v1/" not in request.path:
del self.fields['legacy_org']
del self.fields['username']
def get_legacy_org(self, obj):
""" Up needs the last tag to be the user's org, it only needs the org ID
"""
if obj.first_organisation():
return {"object": {"id": obj.first_organisation().id}}
return None
def get_username(self, obj):
return obj.email
def get_can_manage_users(self, obj):
return obj.has_perm('rsr.user_management')
def get_programs(self, user):<|fim▁hole|> .prefetch_related('root_project__partners').all()
if not (user.is_superuser or user.is_admin):
hierarchies = hierarchies.filter(root_project__in=user.my_projects()).distinct()
return ProgramSerializer(hierarchies, many=True, context=self.context).data
class UserPasswordSerializer(serializers.Serializer):
"""Change password serializer"""
old_password = serializers.CharField(
help_text='Current Password',
)
new_password1 = serializers.CharField(
help_text='New Password',
)
new_password2 = serializers.CharField(
help_text='New Password (confirmation)',
)
class Meta:
fields = '__all__'
def validate_old_password(self, value):
"""Check for current password"""
if not self.instance.check_password(value):
raise serializers.ValidationError(_('Old password is not correct.'))
return value
def validate(self, data):
"""Check if password1 and password2 match"""
if data['new_password1'] != data['new_password2']:
raise serializers.ValidationError(_('Passwords do not match.'))
password = data['new_password1']
check_password_minimum_length(password)
check_password_has_number(password)
check_password_has_upper(password)
check_password_has_lower(password)
check_password_has_symbol(password)
return data
def update(self, instance, validated_data):
instance.set_password(validated_data.get('new_password2', instance.password))
return instance
class UserDetailsSerializer(BaseRSRSerializer):
approved_organisations = OrganisationBasicSerializer(many=True, required=False)
email = serializers.ReadOnlyField()
class Meta:
model = get_user_model()
fields = (
'id',
'email',
'first_name',
'last_name',
'approved_organisations',
)
def __init__(self, *args, **kwargs):
""" Delete the 'absolute_url' field added in BaseRSRSerializer.__init__().
It's neither correct nor do we want this data to be visible.
"""
super(UserDetailsSerializer, self).__init__(*args, **kwargs)
del self.fields['absolute_url']<|fim▁end|> | hierarchies = ProjectHierarchy.objects.select_related('root_project')\ |
<|file_name|>mpi_helper.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import time
<|fim▁hole|>from python_compat import range
comm = MPI.COMM_WORLD
def r_print(*args):
"""
print message on the root node (rank 0)
:param args:
:return:
"""
if comm.rank == 0:
print('ROOT:', end=' ')
for i in args:
print(i, end=' ')
# noinspection PyArgumentList
print()
def l_print(*args):
"""
print message on each node, synchronized
:param args:
:return:
"""
for rank in range(0, comm.size):
comm.Barrier()
if rank == comm.rank:
l_print_no_barrier(*args)
comm.Barrier()
def l_print_no_barrier(*args):
"""
print message on each node
:param args:
:return:
"""
print(comm.rank, ':', end=' ')
for i in args:
print(i, end=' ')
# noinspection PyArgumentList
print()
def get_chunks(num_items, num_steps):
"""
divide items into n=num_steps chunks
:param num_items:
:param num_steps:
:return: chunk sizes, chunk offsets
"""
chunk_sizes = np.zeros(num_steps, dtype=int)
chunk_sizes[:] = num_items // num_steps
chunk_sizes[:num_items % num_steps] += 1
chunk_offsets = np.roll(np.cumsum(chunk_sizes), 1)
chunk_offsets[0] = 0
return chunk_sizes, chunk_offsets
def barrier_sleep(mpi_comm=comm, tag=1747362612, sleep=0.1, use_yield=False):
"""
As suggested by Lisandro Dalcin at:
https://groups.google.com/forum/?fromgroups=#!topic/mpi4py/nArVuMXyyZI
"""
size = mpi_comm.Get_size()
if size == 1:
return
rank = mpi_comm.Get_rank()
mask = 1
while mask < size:
dst = (rank + mask) % size
src = (rank - mask + size) % size
req = mpi_comm.isend(None, dst, tag)
while not mpi_comm.Iprobe(src, tag):
if use_yield:
yield False
time.sleep(sleep)
mpi_comm.recv(None, src, tag)
req.Wait()
mask <<= 1
if use_yield:
yield True<|fim▁end|> | import numpy as np
from mpi4py import MPI
|
<|file_name|>DriverMethods.py<|end_file_name|><|fim▁begin|>from PerfectMatchingData import *
from Face import *
from Vertex import *
from Graph import *
from VertexList import *
from Output import *
from KekuleanMethods import *
from Checkers import *
from RequiredEdgeMethods import *
from Tkinter import *
from AppInformation import *
from random import randint
import time
import os
import shutil
import multiprocessing as mp
import threading
Break = False
BreakLoop = False
#These methods the main drivers of the program. Some of their helper methods are also present here.
settings = {}
#function that reads in the graph returns a 2D string list of the graph
def getInput(fileName):
faceGraph = []
inputFile = open(fileName, 'r')
row = inputFile.readline()
y = 0
while len(row) > 0:
row = row.replace('\n', '')
row = row.split(" ")
for i in range(len(row)):
x = row[i]
faceGraph.append((Face(int(x), y)))
row = inputFile.readline()
y += 1
inputFile.close()
return faceGraph
def getSettings():
fileName = "settings.txt"
inputFile = open(fileName, 'r')
lineNumber = 0
minW = 0
maxW = 0
minH = 0
maxH = 0
line = inputFile.readline()
while len(line) > 0:
line = line.replace('\n', '')
settings[lineNumber] = float(line)
line = inputFile.readline()
lineNumber += 1
inputFile.close()
def resetGraph(root,appInfo,submitGraph,graphNumberEntry,view):
submitGraph.destroy()
view.destroy()
graphNumberEntry.destroy()
def analyzeGraph(root,appInfo):
root.geometry("600x400")
selection = StringVar()
choiceEntry = Entry(root, textvariable = selection)
choice = selection.get()
def callback(root,appInfo,choice,selection,choiceEntry,fileName = "graph.txt"):
loading = Label(root, text="Analyzing graph data, this may take a few minutes.")
loading.pack()
fileName = fileName
faceGraph = getInput(fileName)
#check for connectedness
connected = isConnected(faceGraphToInts(faceGraph))
if connected == True:
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
loading.destroy()
choiceEntry.pack()
typeSelection = Label(root, text="Would you like to view the graphs ranked by Fries or Clars?")
typeSelection.pack()
submit = Button(root, text ="Submit", command = lambda: userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry))
submit.pack(side = BOTTOM)
while True:
choice = selection.get()
flag = False
exit = False
if choice != 'fries' and choice != 'clars' and choice != "":
againSelection = Label(root, text="That file does not exist, please try again.")
againSelection.pack()
print "again"
flag = True
while choice != 'fries' and choice != 'clars':
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
againSelection.update_idletasks()
choice = selection.get()
if exit == True:
againSelection.destroy()
break
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
t = threading.Thread(target = lambda: callback(root,appInfo,choice,selection,choiceEntry))
t.setDaemon(True)
appInfo.setThreads(t)
t.start()
def userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry):
structureNumber = IntVar()
submit.destroy()
typeSelection.destroy()
choiceEntry.destroy()
def displayClarFries(structureNumber,structures,choice):
structures.sort()
if choice == 'clars':
Graph.comparison = 'clars'
elif choice == 'fries':
Graph.comparison = 'fries'
structures.reverse()
structures[structureNumber].displayGraph()
view = Label(root, text="There are " + str(len(structures)) + " distince Kekule structures avaiable. Which would you like to view?")
view.pack()
graphNumberEntry = Entry(root, textvariable = structureNumber)
graphNumberEntry.pack()
number = structureNumber.get()
submitGraph = Button(root, text ="Submit Structure", command = lambda: displayClarFries(number,structures,choice))
submitGraph.pack(side = BOTTOM)
def deleteB(button):
button.destroy()
reset = Button(root, text ="Quit", command = lambda: resetB(root,appInfo,submitGraph,graphNumberEntry,view))
reset.pack(side = BOTTOM)
def resetB(root,appInfo,submitGraph,graphNumberEntry,view):
deleteB(reset)
resetGraph(root,appInfo,submitGraph,graphNumberEntry,view)
#A user-entered number of graphs are generated and tested for Kekulean-ness and written to their proper text files
def randomIntoFiles():
kekuleanFile = open("Kekuleans.txt", "w")
notKekuleanFile = open("NotKekulean.txt", "w")
numK = 0
numNotK = 0
trials = int(raw_input("How many graphs would you like to create? "))
print "\n" #just to provide some visual space
t1 = time.time()
for i in range(trials):
faceGraph = createRandomConnectedGraph()
vGraph = makeVertexGraph(faceGraph)
randGraph = Graph(faceGraph, vGraph)
if isKekulean(randGraph) == True:
numK += 1
kekuleanFile.write("Graph #" + str(numK) + "\n")
kekuleanFile.write(randGraph.simpleToString() + '\n')
else:
numNotK += 1
notKekuleanFile.write("Graph #" + str(numNotK) + "\n")
notKekuleanFile.write(randGraph.simpleToString() + '\n')
#print randGraph
#print "\n"
t2 = time.time()
print "\n" + str(numK) + " Kekulean graph(s) were found.\n" + str(numNotK) + " non-Kekulean graph(s) were found."
print "Time elapsed (in seconds): " + str(t2 - t1) + "\n"
kekuleanFile.close()
notKekuleanFile.close()
#creates a random Kekulean graph ands does stuff with it and saves it to an png
def createRandomKekulean():
#creates a face graphs
randomFaces = createRandomGraph()
randomGraph = _createRandomKekulean()
print "There are", len(randomGraph.getVertexGraph()), "vertices"
graphs = assignMatching(randomGraph)
graphs.sort()
if len(graphs) > 0:
#save graphs as PNG file
savePNG(graphs, "graphs - Fries.png")
Graph.comparison = 'clars'
graphs.sort()
savePNG(graphs, "graphs - Clars.png")
while True:
choice = raw_input("Would you like to view the graphs ranked by Fries or Clars? (or quit?) ")
while choice.lower() != 'fries' and choice.lower() != 'clars' and choice.lower() != 'quit':
choice = raw_input("Would you like to view the graphs ranked by Fries or Clars? (or quit?) ")
if choice.lower() == 'clars':
Graph.comparison = 'clars'
elif choice.lower() == 'fries':
Graph.comparison = 'fries'
else:
break
graphs.sort()
graphs.reverse()
print "There are", len(graphs), "Kekulean structures"
displayGraphs(graphs)
else:
print "error - Graph is Kekulean but has no perfect matching - see error.txt for graph"
errorFile = open("error.txt", "w")
errorFile.write(randomGraph.simpleToString() + '\n')
#Creates a random planar graph, which may not be connected
def createRandomGraph():
height = randint(settings[2], settings[3])
randGraph = []
for i in range(height):
rowLength = randint(settings[0], settings[1])
row = getRow(rowLength, i)
while len(row) == 0:
row = getRow(rowLength, i)
randGraph.extend(row)
if checkAlignment(randGraph) == False:
randGraph = createRandomGraph()
return randGraph
def checkAlignment(graph):
for face in graph:
if face.getX() == 0:
break
else:
#there is no face on the y-axis
return False
for face in graph:
if face.getY() == 0:
break
else:
#there is no face on the x-axis
return False
#there is a face on the x-axis
return True
def createRandomConnectedGraph():
g = createRandomGraph()
while isConnected(faceGraphToInts(g)) == False:
g = createRandomGraph()
return g
#generates a row for the the createRandomGraph method
def getRow(rl, rowNum):
r = []
for j in range(rl):
chance = randint(0, 100)
if chance > settings[4] * 100:
r.append(Face(j, rowNum))
return r
def _createRandomKekulean():
#creates a face graphs
randomFaces = createRandomGraph()
while isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
while isKekulean(randomGraph) == False:
#print "making K"
randomFaces = createRandomGraph()
while isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
if isKekulean(randomGraph):
return randomGraph
else:
return _createRandomKekulean()
def createManyKekuleans():
graphs = [] #list of kekulean graphs
graphList = [] #list of the Kekulean graphs with their matchings, and Fries/Clars Faces
trials = int(raw_input("How many graphs would you like to create? "))
pool = mp.Pool(mp.cpu_count())
results = [pool.apply_async(_createRandomKekulean) for x in range(trials)]
graphs = [r.get() for r in results]
for g in graphs:
graphList.extend(assignMatching(g))
graphList.sort()
if len(graphList) > 0:
print "There are", len(graphList), "Kekulean structures"
displayGraphs(graphList)
def testKekuleanThms():
conflictFile = open("conflict.txt", "w")
interval = float(raw_input("How many hours would you like to run the program?"))
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
counter = 0
while t2 - t1 < timeLimit:
print "graph #" + str(counter)
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
nelsonThm = isOldKekulean(randomGraph)
perfectMatchingThm = isKekulean(randomGraph)
<|fim▁hole|> conflictFile.write("Perfect matching: " + str(perfectMatchingThm) + " Nelson Thm: " + str(nelsonThm) + "\n")
conflictFile.write(randomGraph.simpleToString())
conflictFile.write("\n")
t2 = time.time()
counter += 1
conflictFile.close()
#takes a row and returns a the number of vertical edges in that row
def getRowEdgeCount(row):
edgeCount = 0
f = 0
for i in range(len(row)):
edgeCount += 1
try:
f = row[i+1]
except:
f = None
if row[i] + 1 != f or f == None:
edgeCount += 1
return edgeCount
def getMinRows(g):
minRows = {}
index = 0
minEdges = sys.maxint
for r in g:
edgeCount = getRowEdgeCount(r)
if edgeCount < minEdges:
minEdges = edgeCount
minRows.clear()
minRows[index] = r
elif edgeCount == minEdges:
minRows[index] = r
index += 1
return minRows
#counts up the number of peaks above each row and stores those values in a list at indexes that correspond to the the row of the graph
def getPeaksAboveRows(g):
peaksAboveRow = [0]*(len(g))
for r in range(len(g)):
#print "r: " + str(r)
row = g[r]
if r > 0:
peaksAboveRow[r] += peaksAboveRow[r-1]
for col in range(len(row)):
face = row[col]
if searchRow(face, True, g, r) == True:
peaksAboveRow[r] += 1
#print "Peak at: " + str(r) + ", " + str(col)
if searchRow(face, False, g, r) == True and r < len(g)-1:
peaksAboveRow[r+1] -= 1
#print "Valley at: " + str(r) + ", " + str(col)
peaksAboveRow[r] = abs(peaksAboveRow[r])
return peaksAboveRow
#Theorem I devoloped
def NelsonThm(peaks, g):
kekulean = True
minRows = getMinRows(g)
for i, row in minRows.items():
if peaks[i] > getRowEdgeCount(row):
kekulean = False
break
return kekulean
#ckesks of a graph is Kekulean and returns a boolean
def isOldKekulean(graph):
fg = faceGraphToInts(graph.getFaceGraph())
peaksAbove = getPeaksAboveRows(fg)
#print peaksAbove
kekulean = NelsonThm(peaksAbove, fg)
return kekulean
def getUpperBounds(graph):
#faceGraph = getInput(filename)
#vertexGraph = makeVertexGraph(faceGraph)
#graph = Graph(faceGraph, vertexGraph)
kekulean = isKekulean(graph)
if kekulean == True:
rowCount = [0] * graph.getNumberOfRows()
whiteCount = [0] * graph.getNumberOfRows()
blackCount = [0] * graph.getNumberOfRows()
print "len:", len(whiteCount)
for v in graph.getVertexGraph():
#even y numbers mean the vertex is marked white on the graph
if v.getY() % 2 == 0:
index = v.getY() / 2
if index < len(whiteCount):
whiteCount[index] += 1
#The else implies that the vertex's y is odd, and thus the verex is marked black
else:
index = (v.getY() - 1) / 2
if index < len(blackCount):
blackCount[index] += 1
print "Upper Bonds of the graph per row:"
for index in range(len(rowCount)):
count = abs(sum(whiteCount[0:index+1]) - sum(blackCount[0:index+1]))
print count
rowCount[index] = count
totalUpperBonds = sum(rowCount)
print "Upper bond of the graph:", totalUpperBonds
else:
print "The graph is not Kekulean"
def testConjectureSameFaces(root,interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
graphList = []
graphNumber = 0
counter = 0
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#must be 'fries' or 'clars'
Graph.comparison = 'clars'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
#h.setString(structures[0].simpleToString())
#is the data right?
#print "Verts:", h.getNumVertices()
#print "Structures:", h.getNumStructures()
#print "Clar:", h.getFriesNumber()
for g in graphList:
if(h.getFaces() == g.getFaces()):
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
#first part
if h.getClarsNumber() > g.getClarsNumber():
print 'Conjecture is false:'
drawConflictsCC(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
graphNumber += 1
text.update_idletasks()
quit.update_idletasks()
scrollbar.update_idletasks()
text.destroy()
scrollbar.destroy()
quit.destroy()
#second part
def testConjectureSameFacesKKFF(root, interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
graphList = []
graphNumber = 0
counter = 0
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#must be 'fries' or 'clars'
Graph.comparison = 'fries'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
clarNumberStructure = []
friesNumberStructure = []
for g in graphList:
if(h.getFaces() == g.getFaces()):
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
if h.getFriesNumber() > g.getFriesNumber():
drawConflictsKKFF(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
graphNumber += 1
text.update_idletasks()
quit.update_idletasks()
scrollbar.update_idletasks()
text.destroy()
scrollbar.destroy()
quit.destroy()
def testConjectureSameFacesFFCC(root, interval):
clarNumberStructures = []
friesNumberStructures = []
graphs = []
graphList = []
temp = 0
graphNumber = 0
counter = 0
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT, fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command = text.yview)
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
if Break == True:
Break = False
quit.destroy()
break
text.insert(CURRENT, "Graph " + str(graphNumber) + "\n")
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
randomGraph.setMaxClarManual(setMaxClar(randomGraph))
randomGraph.setMaxFriesManual(setMaxFries(randomGraph))
h = structures[-1]
graphs.append(randomGraph)
h.setMaxClarManual(setMaxClar(randomGraph))
h.setMaxFriesManual(setMaxFries(randomGraph))
h.setNumStructures(len(structures))
h.setFaces(getNumFaces(faceGraphToInts(randomFaces)))
graphCount = 0
graphNumber += 1
for g in graphList:
if(g.getFaces() == h.getFaces()):
if g.getNumVertices() == h.getNumVertices():
if g.getNumStructures() < h.getNumStructures():
if g.getMaxClar() > h.getMaxClar():
if g.getMaxFries() < h.getMaxFries():
print 'Conjecture is false:\n'
saveClarFaceFFCC(graphs[graphCount],randomGraph,temp)
saveFriesFaceFFCC(graphs[graphCount],randomGraph,temp)
folderName = "FFCCConjectureConflicts"
fileName = folderName + "/" + str(randomGraph.getNumVertices()) + "_" + str(temp)+ "/info" + ".txt"
f = open(fileName,'w')
f.write("C1: " + str(g.getMaxClar()) + " C2: " + str(h.getMaxClar()) + " F1: " + str(g.getMaxFries()) + " F2: " + str(h.getMaxFries()) + "\n")
f.write(str(faceGraphToInts(g.getFaceGraph())) + "\n")
f.write(str(faceGraphToInts(h.getFaceGraph())) + "\n")
f.close()
temp += 1
graphCount += 1
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
def setMaxFries(graph):
g = graph.getFaceGraph()
v = makeVertexGraph(g)
G = Graph(g,v)
structures = assignMatching(G)
Graph.comparison = 'fries'
structures.sort()
return structures[-1].getFriesNumber()
def setMaxClar(graph):
g = graph.getFaceGraph()
v = makeVertexGraph(g)
G = Graph(g,v)
structures = assignMatching(G)
Graph.comparison = 'clars'
structures.sort()
return structures[-1].getClarsNumber()
def saveClarFaceFFCC(graph1,graph2,count):
g1 = graph1.getFaceGraph()
g2 = graph2.getFaceGraph()
v1 = makeVertexGraph(g1)
v2 = makeVertexGraph(g2)
G1 = Graph(g1,v1)
G2 = Graph(g2,v2)
structures1 = assignMatching(G1)
structures2 = assignMatching(G2)
Graph.comparison = 'clars'
structures1.sort()
structures2.sort()
h1 = structures1[-1]
h2 = structures2[-1]
if not os.path.exists("FFCCConjectureConflicts"):
os.mkdir("FFCCConjectureConflicts")
folderName = "FFCCConjectureConflicts/" + str(G1.getNumVertices()) + "_" + str(count)
#setup folder
if not os.path.exists(folderName):
os.mkdir(folderName)
#print "adding"
fileName1 = folderName + "/clar1" + ".png"
fileName2 = folderName + "/clar2" + ".png"
#print fileName1
saveSinglePNG(h1,fileName1)
saveSinglePNG(h2,fileName2)
def saveFriesFaceFFCC(graph1,graph2,count):
g1 = graph1.getFaceGraph()
g2 = graph2.getFaceGraph()
v1 = makeVertexGraph(g1)
v2 = makeVertexGraph(g2)
G1 = Graph(g1,v1)
G2 = Graph(g2,v2)
structures1 = assignMatching(G1)
structures2 = assignMatching(G2)
Graph.comparison = 'fries'
structures1.sort()
structures2.sort()
h1 = structures1[-1]
h2 = structures2[-1]
if not os.path.exists("FFCCConjectureConflicts"):
os.mkdir("FFCCConjectureConflicts")
folderName = "FFCCConjectureConflicts/" + str(G1.getNumVertices()) + "_" + str(count)
#setup folder
if not os.path.exists(folderName):
os.mkdir(folderName)
#print "adding"
fileName1 = folderName + "/fries1" + ".png"
fileName2 = folderName + "/fries2" + ".png"
#print fileName1
saveSinglePNG(h1,fileName1)
saveSinglePNG(h2,fileName2)
def testConjectureDifferentFaces(hours=0):
graphList = []
results = open("results.txt", "w")
results.write("The program actually run!")
if hours == 0:
interval = float(raw_input("How many hours would you like to run the program? "))
else:
interval = hours
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
counter = 0
while t2 - t1 < timeLimit:
print "graph #" + str(counter)
#creates a face graphs
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
for f in randomGraph.getFaceGraph():
pairs = randomGraph.getBondedVertices(f)
print str(pairs)
#must be 'fries' or 'clars'
Graph.comparison = 'clars'
structures.sort()
h = structures[-1]
h.setNumStructures(len(structures))
#h.setString(structures[0].simpleToString())
#is the data right?
#print "Verts:", h.getNumVertices()
#print "Structures:", h.getNumStructures()
#print "Clar:", h.getFriesNumber()
for g in graphList:
if h.getNumVertices() == g.getNumVertices() :#and h.getNumVertices() <= 26:
if h.getNumStructures() < g.getNumStructures():
#first part
if h.getClarsNumber() > g.getClarsNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Clars: ' + str(h.getClarsNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Clars: ' + str(g.getClarsNumber()) + " Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsCC(g, h)
#second part
if h.getFriesNumber() > g.getFriesNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Fries: ' + str(h.getFriesNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Fries: ' + str(g.getFriesNumber()) + " Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsKKFF(g, h)
#third part
if h.getClarsNumber() > g.getClarsNumber():
if h.getFriesNumber() < g.getFriesNumber():
print 'Conjecture is false:'
results.write('\ngraph H: Clars: ' + str(h.getClarsNumber()) + "graph H: Fries: " + str(h.getFriesNumber()) + " Number of Structures: " + str(h.getNumStructures()) + " Number of vertices: " + str(h.getNumVertices()) + "\n")
results.write(str(h))
results.write('\ngraph G: Clars: ' + str(g.getClarsNumber()) + "graph G: Fries: " + str(g.getFriesNumber()) +" Number of Structures: " + str(g.getNumStructures()) + " Number of vertices: " + str(g.getNumVertices()) + "\n")
results.write(str(g))
results.write("\n\n")
drawConflictsFFCC(g, h)
#only adds graphs to list if it under some number of vertices
graphList.append(h)
t2 = time.time()
counter += 1
def findHighestClars(graphs):
clars = 0
for g in graphs:
if g.getClarsNumber() > clars:
clars = g.getClarsNumber()
return clars
def _findRequiredEdges(graphs):
masterSet = getRequiredSet(graphs)
if len(masterSet) > 0:
for edge in masterSet:
v1, v2 = edge
v1.required = True
v2.required = True
return True
else:
return False
def findRequiredEdges(hours=0):
if not os.path.exists("requiredEdges"):
os.mkdir("requiredEdges")
edgeFile = open("requiredEdges/RequiredEdges.txt", "w")
graphNumber = 0
rqNum = 0
flag = False
if hours == 0:
interval = float(raw_input("How many hours would you like to run the program? "))
else:
interval = hours
timeLimit = 3600 * interval
print "limit:", timeLimit
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
print "graph", graphNumber
flag = False
graph = _createRandomKekulean()
graphs = assignMatching(graph)
for f in graph.getFaceGraph():
pairs = graph.getBondedVertices(f)
print str(pairs)
flag = _findRequiredEdges(graphs)
if flag == True:
print "Found graph with required edges"
edgeFile.write("Graph: " + str(rqNum) + "\n")
edgeFile.write(graph.simpleToString())
edgeFile.write("\n\n")
#save PNG's
fileName = "requiredEdges/Graph" + str(rqNum) + ".png"
saveSinglePNG(graphs[0], fileName)
rqNum += 1
graphNumber += 1
t2 = time.time()
def BreakModule():
global Break
Break = True
def BreakLoop():
global BreakLoop
BreakLoop = True
def combineGraphs(root,interval):
global Break
Break = False
quit = Button(root, text ="Quit", command = BreakModule)
quit.pack(side = LEFT)
graphNumber = 0
superGraphNumber = 0
deletedCount = 0
scrollbar = Scrollbar(root)
scrollbar.pack(side = RIGHT,fill = Y)
text = Text(root,yscrollcommand = scrollbar.set)
text.pack()
scrollbar.config(command=text.yview)
storedGraphs = {}
timeLimit = 3600 * interval
t1 = time.time()
t2 = time.time()
while t2 - t1 < timeLimit:
text.insert(CURRENT,"graph: " + str(graphNumber) + "\n")
if Break == True:
Break = False
quit.destroy()
break
flag = False
#new stuff
randomFaces = createRandomGraph()
vertexGraph = []
#Finds connected graph
while len(vertexGraph) % 2 != 0 or len(vertexGraph) == 0 or countPeaksAndValleys(randomFaces) == False or isConnected(faceGraphToInts(randomFaces)) == False:
randomFaces = createRandomGraph()
vertexGraph = makeVertexGraph(randomFaces)
randomGraph = Graph(randomFaces, vertexGraph)
perfectMatchingThm = isKekulean(randomGraph)
if perfectMatchingThm == True:
structures = assignMatching(randomGraph)
#end new stuff
Graph.comparison = 'clars'
structures.sort()
randomGraph.maxClars = structures[-1].getClarsNumber()
req_edges = getRequiredSet(structures)
externalEdges = getExternalEdges(req_edges)
if len(externalEdges) > 0:
#add graph and edges to list
storedGraphs[randomGraph] = externalEdges
for g, edges in storedGraphs.items():
complements = getComplements(externalEdges, edges)
for edge, compEdge in complements:
faceA = (edge[0].getFaces() & edge[1].getFaces()).pop()
faceB = (compEdge[0].getFaces() & compEdge[1].getFaces()).pop()
x = faceA.getX() - faceB.getX()
y = faceA.getY() - faceB.getY()
if edge[2] == "TOP_RIGHT" and compEdge[2] == "BOTTOM_LEFT":
newGraph = offsetFaces(g, x, y + 1);
elif edge[2] == "RIGHT" and compEdge[2] == "LEFT":
newGraph = offsetFaces(g, x + 1, y);
elif edge[2] == "TOP_LEFT" and compEdge[2] == "BOTTOM_RIGHT":
newGraph = offsetFaces(g, x + 1, y + 1);
elif edge[2] == "BOTTOM_LEFT" and compEdge[2] == "TOP_RIGHT":
newGraph = offsetFaces(g, x, y - 1);
elif edge[2] == "LEFT" and compEdge[2] == "RIGHT":
newGraph = offsetFaces(g, x - 1, y);
elif edge[2] == "BOTTOM_RIGHT" and compEdge[2] == "TOP_LEFT":
newGraph = offsetFaces(g, x - 1, y - 1);
overlap = checkFaceOverlap(randomGraph, newGraph)
#print overlap
if overlap is False:
faceGraph = combineFaces(randomGraph, newGraph)
faceGraph = adjustForNegatives(faceGraph)
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
#start new stuff
if len(structures) > 0:
#setup folder
folderName = "CombinedTemps"
if not os.path.exists(folderName):
os.mkdir(folderName)
fileName = folderName + "/superGraph.txt"
f = open(folderName + "/superGraph" + str(superGraphNumber) + ".txt" ,'w')
f.write(str(superGraph) + '\n')
f.close()
Graph.comparison = 'clars'
structures.sort()
if not os.path.exists("CombinedGraphs"):
os.mkdir("CombinedGraphs")
folderNameCG = "CombinedGraphs/superGraph" + str(superGraphNumber)
#setup folder
if not os.path.exists(folderNameCG):
os.mkdir(folderNameCG)
superName = folderNameCG + "/superGraph" + str(superGraphNumber) + ".png"
saveSinglePNG(structures[0], superName)
addCombinationsPNG(randomGraph, newGraph,superGraph, superGraphNumber, deletedCount)
superGraphNumber += 1
graphNumber += 1
t2 = time.time()
quit.update_idletasks()
quit.destroy()
def resetCombinedGraphs(root,appInfo,submitGraph,graphNumberEntry,view):
submitGraph.destroy()
view.destroy()
graphNumberEntry.destroy()
def analyzeCombinedGraphsSetup(root,appInfo,path = "CombinedTemps",extension = ".txt"):
runningApps = []
root.geometry("600x400")
graphNumber = IntVar()
entry = Entry(root, textvariable = graphNumber)
entry.pack()
runningApps.append(entry)
if not os.path.exists(path):
os.mkdir(path)
num_files = len([f for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))])
num_files -= 1
#for i in range(0,num_files):
#oldFilename = path + "/superGraph" + str(k+1) + extension
#os.rename(oldFilename, path + "/superGraph" + str(i) + extension)
label = Label(root, text="There are " + str(num_files) + " files in the directory. Which wuold you like to look at?")
label.pack()
runningApps.append(label)
i = 0
submit = Button(root, text ="Submit", command = lambda: checkAnalyze(root,appInfo,num_files,quit,entry,label,i,graphNumber,submit,runningApps))
submit.pack(side = BOTTOM)
while i == 0:
i = graphNumber.get()
submit.update_idletasks()
entry.update_idletasks()
label.update_idletasks()
def checkAnalyze(root,appInfo,num_files,quit,entry,label,i,graphNumber,submit,runningApps):
submit.destroy()
again = Label(root, text="That file does not exist, please try again.")
submit = Button(root, text ="Submit", command = lambda: analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry))
submit.pack(side = BOTTOM)
if i < -1 or i > num_files:
again.pack()
else:
analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry)
while (i < -1 or i > num_files):
submit.update_idletasks()
entry.update_idletasks()
label.update_idletasks()
again.update_idletasks()
i = graphNumber.get()
def analyzeCombinedGraphs(root,appInfo,i,runningApps,submit,again,label,entry):
submit.destroy()
again.destroy()
label.destroy()
entry.destroy()
selection = StringVar()
choiceEntry = Entry(root, textvariable = selection)
choice = selection.get()
def callback(root,appInfo,i,choice,selection,choiceEntry,extension = ".txt",path = "CombinedTemps"):
loading = Label(root, text="Analyzing graph data, this may take a few minutes.")
loading.pack()
fileName = "/superGraph" + str(i) + extension
faceGraph = getInput(path + "/superGraph" + str(i) + extension)
#check for connectedness
connected = isConnected(faceGraphToInts(faceGraph))
if connected == True:
vertexGraph = makeVertexGraph(faceGraph)
superGraph = Graph(faceGraph, vertexGraph)
structures = assignMatching(superGraph)
_findRequiredEdges(structures)
loading.destroy()
choiceEntry.pack()
typeSelection = Label(root, text="Would you like to view the graphs ranked by Fries or Clars?")
typeSelection.pack()
submit = Button(root, text ="Submit", command = lambda: userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry))
submit.pack(side = BOTTOM)
while True:
choice = selection.get()
flag = False
exit = False
if choice != 'fries' and choice != 'clars' and choice != "":
againSelection = Label(root, text="That file does not exist, please try again.")
againSelection.pack()
print "again"
flag = True
while choice != 'fries' and choice != 'clars':
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
againSelection.update_idletasks()
choice = selection.get()
if exit == True:
againSelection.destroy()
break
submit.update_idletasks()
choiceEntry.update_idletasks()
typeSelection.update_idletasks()
t = threading.Thread(target = lambda: callback(root,appInfo,i,choice,selection,choiceEntry))
t.setDaemon(True)
appInfo.setThreads(t)
t.start()
def userInputStructures(root,appInfo,structures,choice,submit,typeSelection,choiceEntry):
structureNumber = IntVar()
submit.destroy()
typeSelection.destroy()
choiceEntry.destroy()
def displayCombinedClarFries(structureNumber,structures,choice):
structures.sort()
if choice == 'clars':
Graph.comparison = 'clars'
elif choice == 'fries':
Graph.comparison = 'fries'
structures.reverse()
structures[structureNumber].displayGraph()
view = Label(root, text="There are " + str(len(structures)) + " distince Kekule structures avaiable. Which would you like to view?")
view.pack()
graphNumberEntry = Entry(root, textvariable = structureNumber)
graphNumberEntry.pack()
number = structureNumber.get()
submitGraph = Button(root, text ="Submit Structure", command = lambda: displayCombinedClarFries(number,structures,choice))
submitGraph.pack(side = BOTTOM)
def deleteB(button):
button.destroy()
reset = Button(root, text ="Quit", command = lambda: resetB(root,appInfo,submitGraph,graphNumberEntry,view))
reset.pack(side = BOTTOM)
def resetB(root,appInfo,submitGraph,graphNumberEntry,view):
deleteB(reset)
resetCombinedGraphs(root,appInfo,submitGraph,graphNumberEntry,view)
def addCombinationsPNG(graph,newGraph,superGraph,superGraphNumber,deletedCount):
new1 = graph.getFaceGraph()
new2 = newGraph.getFaceGraph()
vertexG1 = makeVertexGraph(new1)
vertexG2 = makeVertexGraph(new2)
g1 = Graph(new1,vertexG1)
g2 = Graph(new2,vertexG2)
firstStructures = assignMatching(g1)
secondStructures = assignMatching(g2)
_findRequiredEdges(firstStructures)
_findRequiredEdges(secondStructures)
Graph.comparison = 'clars'
firstStructures.sort()
secondStructures.sort()
if(isKekulean(g2) == True and isKekulean(g1) == True):
folderNameCG = "CombinedGraphs/superGraph" + str(superGraphNumber)
firstName = folderNameCG + "/Graph" + str(1) + ".png"
secondName = folderNameCG + "/Graph" + str(2) + ".png"
saveSinglePNG(firstStructures[0], firstName)
saveSinglePNG(secondStructures[0], secondName)
else:
directoryName = "CombinedDeleted"
if not os.path.exists(directoryName):
os.mkdir(directoryName)
folderName = "CombinedDeleted/superGraph" + str(superGraphNumber) + "_" + str(deletedCount)
if not os.path.exists(folderName):
os.mkdir(folderName)
f = superGraph.getFaceGraph()
v3 = makeVertexGraph(f)
g3 = Graph(f,v3)
superGraphStructure = assignMatching(g3)
fileName = folderName + "/superDeleted" + str(superGraphNumber) + ".png"
firstName = folderName + "/Graph" + str(1) + ".png"
secondName = folderName + "/Graph" + str(2) + ".png"
saveSinglePNG(superGraphStructure[0], fileName)
saveSinglePNG(firstStructures[0], firstName)
saveSinglePNG(secondStructures[0], secondName)
shutil.rmtree("CombinedGraphs/superGraph" + str(superGraphNumber))
superGraphNumber -= 1
deletedCount += 1
def removeCombinedDuplicates(path = "CombinedTemps",extension = ".txt"):
num_files = len([f for f in os.listdir(path)
if os.path.isfile(os.path.join(path, f))])
print num_files
num_files -= 7
print num_files
masterFaceGraph = []
for i in range(0,num_files):
filename = "/superGraph" + str(i) + extension
faceGraph = getInput(path + "/superGraph" + str(i) + extension)
masterFaceGraph.append(faceGraphToInts(faceGraph))
for f in range(0, len(masterFaceGraph)):
for k in range(f+1, len(masterFaceGraph)):
flag = True
for h in range(0,len(masterFaceGraph[f])):
a = masterFaceGraph[f][h]
b = masterFaceGraph[k][h]
if len(a) != len(b):
flag = False
break
for t in range(0,len(masterFaceGraph[f][h])):
c = a[t]
d = b[t]
if c != d:
flag = False
break
if flag == False:
break
if (flag == True):
masterFaceGraph.remove(masterFaceGraph[k])
shutil.rmtree("CombinedGraphs/superGraph" + str(k))
os.remove("CombinedTemps/superGraph" + str(k) + extension)
for i in range(k+1,num_files):
path1 = "CombinedGraphs"
path2 = "CombinedTemps"
oldFilename1 = path1 + "/superGraph" + str(i)
oldFilename2 = path2 + "/superGraph" + str(i) + extension
os.rename(oldFilename1 + "/superGraph" + str(i) + ".png", oldFilename1 + "/superGraph" + str(i-1) + ".png")
os.rename(oldFilename1, path1 + "/superGraph" + str(i-1))
os.rename(oldFilename2, path2 + "/superGraph" + str(i-1) + extension)
num_files -= 1<|fim▁end|> | if nelsonThm != perfectMatchingThm:
|
<|file_name|>lint-shorthand-field.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(bad_style, unused_variables)]
#![deny(non_shorthand_field_patterns)]
struct Foo {
x: isize,
y: isize,
}
fn main() {
{
let Foo {
x: x, //~ ERROR the `x:` in this pattern is redundant
y: ref y, //~ ERROR the `y:` in this pattern is redundant
} = Foo { x: 0, y: 0 };
let Foo {
x,
ref y,
} = Foo { x: 0, y: 0 };
}
{
const x: isize = 1;
<|fim▁hole|> }
{
struct Bar {
x: x,
}
struct x;
match (Bar { x: x }) {
Bar { x: x } => {},
}
}
{
struct Bar {
x: Foo,
}
enum Foo { x }
match (Bar { x: Foo::x }) {
Bar { x: Foo::x } => {},
}
}
}<|fim▁end|> | match (Foo { x: 1, y: 1 }) {
Foo { x: x, ..} => {},
_ => {},
} |
<|file_name|>Minimap-binding.js<|end_file_name|><|fim▁begin|>LeafletWidget.methods.addMiniMap =
function(tilesURL, tilesProvider, position,
width, height, collapsedWidth, collapsedHeight , zoomLevelOffset,
zoomLevelFixed, centerFixed, zoomAnimation , toggleDisplay, autoToggleDisplay,
minimized, aimingRectOptions, shadowRectOptions, strings, mapOptions) {<|fim▁hole|> }
// determin the tiles for the minimap
// default to OSM tiles
layer = new L.tileLayer('http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png');
if(tilesProvider) {
// use a custom tiles provider if specified.
layer = new L.tileLayer.provider(tilesProvider);
} else if(tilesURL) {
// else use a custom tiles URL if specified.
layer = new L.tileLayer(tilesURL);
}
this.minimap = new L.Control.MiniMap(layer, {
position: position,
width: width,
height: height,
collapsedWidth: collapsedWidth,
collapsedHeight: collapsedWidth,
zoomLevelOffset: zoomLevelOffset,
zoomLevelFixed: zoomLevelFixed,
centerFixed: centerFixed,
zoomAnimation: zoomAnimation,
toggleDisplay: toggleDisplay,
autoToggleDisplay: autoToggleDisplay,
minimized: minimized,
aimingRectOptions: aimingRectOptions,
shadowRectOptions: shadowRectOptions,
strings: strings,
mapOptions: mapOptions
});
this.minimap.addTo(this);
}).call(this);
};<|fim▁end|> |
(function() {
if(this.minimap) {
this.minimap.removeFrom( this ); |
<|file_name|>factory.py<|end_file_name|><|fim▁begin|>"""Session class factory methods."""<|fim▁hole|>
import logging
from cachecontrol import CacheControlAdapter
from cachecontrol.cache import DictCache
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
def add_cache_control(session, cache_control_config):
"""Add cache_control adapter to session object."""
adapter = CacheControlAdapter(
DictCache(),
cache_etags=cache_control_config.get('cache_etags', True),
serializer=cache_control_config.get('serializer', None),
heuristic=cache_control_config.get('heuristic', None),
)
session.mount('http://', adapter)
session.mount('https://', adapter)
session.cache_controller = adapter.controller<|fim▁end|> | from __future__ import unicode_literals |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main<|fim▁hole|>import (
"log"
"net/http"
)
func droneServer(w http.ResponseWriter, req *http.Request) {
w.Write([]byte("Built by Drone in Kubernetes!"))
}
func main() {
http.HandleFunc("/", droneServer)
err := http.ListenAndServe(":8080", nil)
if err != nil {
log.Fatal("ListenAndServe: ", err)
}
}<|fim▁end|> | |
<|file_name|>_version.py<|end_file_name|><|fim▁begin|>#! -*- coding: utf-8 -*-
"""
Retrieval of version number
This file helps to compute a version number in source trees obtained from
git-archive tarball (such as those provided by githubs download-from-tag
feature). Distribution tarballs (built by setup.py sdist) and build
directories (produced by setup.py build) will contain a much shorter file
that just contains the computed version number.
This file was generated by PyScaffold.
"""
import inspect
import os
import re
import subprocess
import sys
__location__ = os.path.join(os.getcwd(), os.path.dirname(
inspect.getfile(inspect.currentframe())))
# these strings will be replaced by git during git-archive
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
# general settings
tag_prefix = 'v' # tags are like v1.2.0
package = "bonfire"
namespace = []
root_pkg = namespace[0] if namespace else package
if namespace:
pkg_path = os.path.join(*namespace[-1].split('.') + [package])
else:
pkg_path = package
class ShellCommand(object):
def __init__(self, command, shell=True, cwd=None):
self._command = command
self._shell = shell
self._cwd = cwd
def __call__(self, *args):
command = "{cmd} {args}".format(cmd=self._command,
args=subprocess.list2cmdline(args))
output = subprocess.check_output(command,
shell=self._shell,
cwd=self._cwd,
stderr=subprocess.STDOUT,
universal_newlines=True)
return self._yield_output(output)
def _yield_output(self, msg):
for line in msg.splitlines():
yield line
def get_git_cmd(**args):
if sys.platform == "win32":
for cmd in ["git.cmd", "git.exe"]:
git = ShellCommand(cmd, **args)
try:
git("--version")
except (subprocess.CalledProcessError, OSError):
continue
return git
return None
else:
git = ShellCommand("git", **args)
try:
git("--version")
except (subprocess.CalledProcessError, OSError):
return None
return git
def version_from_git(tag_prefix, root, verbose=False):
# this runs 'git' from the root of the source tree. This only gets called
# if the git-archive 'subst' keywords were *not* expanded, and
# _version.py hasn't already been rewritten with a short version string,
# meaning we're inside a checked out source tree.
git = get_git_cmd(cwd=root)
if not git:
print("no git found")
return None
try:
tag = next(git("describe", "--tags", "--dirty", "--always"))
except subprocess.CalledProcessError:
return None
if not tag.startswith(tag_prefix):
if verbose:
print("tag '{}' doesn't start with prefix '{}'".format(tag,
tag_prefix))
return None
tag = tag[len(tag_prefix):]
sha1 = next(git("rev-parse", "HEAD"))
full = sha1.strip()
if tag.endswith("-dirty"):
full += "-dirty"
return {"version": tag, "full": full}
def get_keywords(versionfile_abs):
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = dict()
try:
with open(versionfile_abs, "r") as fh:
for line in fh.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
except EnvironmentError:
return None
return keywords
def version_from_keywords(keywords, tag_prefix, verbose=False):
if not keywords:
return None # keyword-finding function failed to find keywords
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
return None # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '{}', no digits".format(",".join(refs-tags)))
if verbose:
print("likely tags: {}".format(",".join(sorted(tags))))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking {}".format(r))
return {"version": r,
"full": keywords["full"].strip()}
else:
if verbose:
print("no suitable tags, using full revision id")
return {"version": keywords["full"].strip(),<|fim▁hole|> # Source tarballs conventionally unpack into a directory that includes
# both the project name and a version string.
dirname = os.path.basename(root)
if not dirname.startswith(parentdir_prefix):
if verbose:
print("guessing rootdir is '{}', but '{}' doesn't start with "
"prefix '{}'".format(root, dirname, parentdir_prefix))
return None
version = dirname[len(parentdir_prefix):].split('-')[0]
return {"version": version, "full": ""}
def git2pep440(ver_str):
dash_count = ver_str.count('-')
if dash_count == 0:
return ver_str
elif dash_count == 1:
return ver_str.split('-')[0] + "+dirty"
elif dash_count == 2:
tag, commits, sha1 = ver_str.split('-')
return "{}.post0.dev{}+{}".format(tag, commits, sha1)
elif dash_count == 3:
tag, commits, sha1, _ = ver_str.split('-')
return "{}.post0.dev{}+{}.dirty".format(tag, commits, sha1)
else:
raise RuntimeError("Invalid version string")
def get_versions(verbose=False):
vcs_kwds = {"refnames": git_refnames, "full": git_full}
parentdir = package + '-'
root = __location__
# pkg_path is the relative path from the top of the source
# tree (where the .git directory might live) to this file.
# Invert this to find the root of our package.
for _ in pkg_path.split(os.sep):
root = os.path.dirname(root)
# different version retrieval methods as (method, args, comment)
ver_retrieval = [
(version_from_keywords, (vcs_kwds, tag_prefix, verbose),
'expanded keywords'),
(version_from_parentdir, (parentdir, root, verbose), 'parentdir'),
(version_from_git, (tag_prefix, root, verbose), 'git')
]
for method, args, comment in ver_retrieval:
ver = method(*args)
if ver:
if verbose:
print("got version from {}".format(comment))
break
else:
ver = {"version": "unknown", "full": ""}
ver['version'] = git2pep440(ver['version'])
return ver<|fim▁end|> | "full": keywords["full"].strip()}
def version_from_parentdir(parentdir_prefix, root, verbose=False): |
<|file_name|>pilotTools.py<|end_file_name|><|fim▁begin|>########################################################################
# $Id$
########################################################################
""" A set of common tools to be used in pilot commands
"""
import sys
import time
import os
import pickle
import getopt
import imp
import types
import urllib2
import signal
__RCSID__ = '$Id$'
def printVersion( log ):
log.info( "Running %s" % " ".join( sys.argv ) )
try:
with open( "%s.run" % sys.argv[0], "w" ) as fd:
pickle.dump( sys.argv[1:], fd )
except OSError:
pass
log.info( "Version %s" % __RCSID__ )
def pythonPathCheck():
try:
os.umask( 18 ) # 022
pythonpath = os.getenv( 'PYTHONPATH', '' ).split( ':' )
print 'Directories in PYTHONPATH:', pythonpath
for p in pythonpath:
if p == '':
continue
try:
if os.path.normpath( p ) in sys.path:
# In case a given directory is twice in PYTHONPATH it has to removed only once
sys.path.remove( os.path.normpath( p ) )
except Exception, x:
print x
print "[EXCEPTION-info] Failing path:", p, os.path.normpath( p )
print "[EXCEPTION-info] sys.path:", sys.path
raise x
except Exception, x:
print x
print "[EXCEPTION-info] sys.executable:", sys.executable
print "[EXCEPTION-info] sys.version:", sys.version
print "[EXCEPTION-info] os.uname():", os.uname()
raise x
def alarmTimeoutHandler( *args ):
raise Exception( 'Timeout' )
def retrieveUrlTimeout( url, fileName, log, timeout = 0 ):
"""
Retrieve remote url to local file, with timeout wrapper
"""
urlData = ''
if timeout:
signal.signal( signal.SIGALRM, alarmTimeoutHandler )
# set timeout alarm
signal.alarm( timeout + 5 )
try:
remoteFD = urllib2.urlopen( url )
expectedBytes = 0
# Sometimes repositories do not return Content-Length parameter
try:
expectedBytes = long( remoteFD.info()[ 'Content-Length' ] )
except Exception as x:
expectedBytes = 0
data = remoteFD.read()
if fileName:
with open( fileName + '-local', "wb" ) as localFD:
localFD.write( data )
else:
urlData += data
remoteFD.close()
if len( data ) != expectedBytes and expectedBytes > 0:
log.error( 'URL retrieve: expected size does not match the received one' )
return False
if timeout:
signal.alarm( 0 )
if fileName:
return True
else:
return urlData
except urllib2.HTTPError, x:
if x.code == 404:
log.error( "URL retrieve: %s does not exist" % url )
if timeout:
signal.alarm( 0 )
return False
except urllib2.URLError:
log.error( 'Timeout after %s seconds on transfer request for "%s"' % ( str( timeout ), url ) )
return False
except Exception, x:
if x == 'Timeout':
log.error( 'Timeout after %s seconds on transfer request for "%s"' % ( str( timeout ), url ) )
if timeout:
signal.alarm( 0 )
raise x
class ObjectLoader( object ):
""" Simplified class for loading objects from a DIRAC installation.
Example:
ol = ObjectLoader()
object, modulePath = ol.loadObject( 'pilot', 'LaunchAgent' )
"""
def __init__( self, baseModules, log ):
""" init
"""
self.__rootModules = baseModules
self.log = log
def loadModule( self, modName, hideExceptions = False ):
""" Auto search which root module has to be used
"""
for rootModule in self.__rootModules:
impName = modName
if rootModule:
impName = "%s.%s" % ( rootModule, impName )
self.log.debug( "Trying to load %s" % impName )
module, parentPath = self.__recurseImport( impName, hideExceptions = hideExceptions )
#Error. Something cannot be imported. Return error
if module is None:
return None, None<|fim▁hole|> return module, parentPath
#Nothing found, continue
#Return nothing found
return None, None
def __recurseImport( self, modName, parentModule = None, hideExceptions = False ):
""" Internal function to load modules
"""
if type( modName ) in types.StringTypes:
modName = modName.split( '.' )
try:
if parentModule:
impData = imp.find_module( modName[0], parentModule.__path__ )
else:
impData = imp.find_module( modName[0] )
impModule = imp.load_module( modName[0], *impData )
if impData[0]:
impData[0].close()
except ImportError, excp:
if str( excp ).find( "No module named %s" % modName[0] ) == 0:
return None, None
errMsg = "Can't load %s in %s" % ( ".".join( modName ), parentModule.__path__[0] )
if not hideExceptions:
self.log.exception( errMsg )
return None, None
if len( modName ) == 1:
return impModule, parentModule.__path__[0]
return self.__recurseImport( modName[1:], impModule,
hideExceptions = hideExceptions )
def loadObject( self, package, moduleName, command ):
""" Load an object from inside a module
"""
loadModuleName = '%s.%s' % ( package, moduleName )
module, parentPath = self.loadModule( loadModuleName )
if module is None:
return None, None
try:
commandObj = getattr( module, command )
return commandObj, os.path.join( parentPath, moduleName )
except AttributeError, e:
self.log.error( 'Exception: %s' % str(e) )
return None, None
def getCommand( params, commandName, log ):
""" Get an instantiated command object for execution.
Commands are looked in the following modules in the order:
1. <CommandExtension>Commands
2. pilotCommands
3. <Extension>.WorkloadManagementSystem.PilotAgent.<CommandExtension>Commands
4. <Extension>.WorkloadManagementSystem.PilotAgent.pilotCommands
5. DIRAC.WorkloadManagementSystem.PilotAgent.<CommandExtension>Commands
6. DIRAC.WorkloadManagementSystem.PilotAgent.pilotCommands
Note that commands in 3.-6. can only be used of the the DIRAC installation
has been done. DIRAC extensions are taken from -e ( --extraPackages ) option
of the pilot script.
"""
extensions = params.commandExtensions
modules = [ m + 'Commands' for m in extensions + ['pilot'] ]
commandObject = None
# Look for commands in the modules in the current directory first
for module in modules:
try:
impData = imp.find_module( module )
commandModule = imp.load_module( module, *impData )
commandObject = getattr( commandModule, commandName )
except Exception, _e:
pass
if commandObject:
return commandObject( params ), module
if params.diracInstalled:
diracExtensions = []
for ext in params.extensions:
if not ext.endswith( 'DIRAC' ):
diracExtensions.append( ext + 'DIRAC' )
else:
diracExtensions.append( ext )
diracExtensions += ['DIRAC']
ol = ObjectLoader( diracExtensions, log )
for module in modules:
commandObject, modulePath = ol.loadObject( 'WorkloadManagementSystem.PilotAgent',
module,
commandName )
if commandObject:
return commandObject( params ), modulePath
# No command could be instantitated
return None, None
class Logger( object ):
""" Basic logger object, for use inside the pilot. Just using print.
"""
def __init__( self, name = 'Pilot', debugFlag = False, pilotOutput = 'pilot.out' ):
self.debugFlag = debugFlag
self.name = name
self.out = pilotOutput
def __outputMessage( self, msg, level, header ):
if self.out:
with open( self.out, 'a' ) as outputFile:
for _line in msg.split( "\n" ):
if header:
outLine = "%s UTC %s [%s] %s" % ( time.strftime( '%Y-%m-%d %H:%M:%S', time.gmtime() ),
level,
self.name,
_line )
print outLine
if self.out:
outputFile.write( outLine + '\n' )
else:
print _line
outputFile.write( _line + '\n' )
sys.stdout.flush()
def setDebug( self ):
self.debugFlag = True
def debug( self, msg, header = True ):
if self.debugFlag:
self.__outputMessage( msg, "DEBUG", header )
def error( self, msg, header = True ):
self.__outputMessage( msg, "ERROR", header )
def warn( self, msg, header = True ):
self.__outputMessage( msg, "WARN", header )
def info( self, msg, header = True ):
self.__outputMessage( msg, "INFO", header )
class CommandBase( object ):
""" CommandBase is the base class for every command in the pilot commands toolbox
"""
def __init__( self, pilotParams, dummy='' ):
""" c'tor
Defines the logger and the pilot parameters
"""
self.pp = pilotParams
self.log = Logger( self.__class__.__name__ )
self.debugFlag = False
for o, _ in self.pp.optList:
if o == '-d' or o == '--debug':
self.log.setDebug()
self.debugFlag = True
self.log.debug( "\n\n Initialized command %s" % self.__class__ )
def executeAndGetOutput( self, cmd, environDict = None ):
""" Execute a command on the worker node and get the output
"""
self.log.info( "Executing command %s" % cmd )
try:
import subprocess # spawn new processes, connect to their input/output/error pipes, and obtain their return codes.
_p = subprocess.Popen( "%s" % cmd, shell = True, env=environDict, stdout = subprocess.PIPE,
stderr = subprocess.PIPE, close_fds = False )
# standard output
outData = _p.stdout.read().strip()
for line in outData:
sys.stdout.write( line )
sys.stdout.write( '\n' )
for line in _p.stderr:
sys.stdout.write( line )
sys.stdout.write( '\n' )
# return code
returnCode = _p.wait()
self.log.debug( "Return code of %s: %d" % ( cmd, returnCode ) )
return (returnCode, outData)
except ImportError:
self.log.error( "Error importing subprocess" )
def exitWithError( self, errorCode ):
""" Wrapper around sys.exit()
"""
self.log.info( "List of child processes of current PID:" )
retCode, _outData = self.executeAndGetOutput( "ps --forest -o pid,%%cpu,%%mem,tty,stat,time,cmd -g %d" % os.getpid() )
if retCode:
self.log.error( "Failed to issue ps [ERROR %d] " % retCode )
sys.exit( errorCode )
class PilotParams( object ):
""" Class that holds the structure with all the parameters to be used across all the commands
"""
MAX_CYCLES = 10
def __init__( self ):
""" c'tor
param names and defaults are defined here
"""
self.rootPath = os.getcwd()
self.originalRootPath = os.getcwd()
self.pilotRootPath = os.getcwd()
self.workingDir = os.getcwd()
self.optList = {}
self.keepPythonPath = False
self.debugFlag = False
self.local = False
self.commandExtensions = []
self.commands = ['GetPilotVersion', 'CheckWorkerNode', 'InstallDIRAC', 'ConfigureBasics', 'CheckCECapabilities',
'CheckWNCapabilities', 'ConfigureSite', 'ConfigureArchitecture', 'ConfigureCPURequirements',
'LaunchAgent']
self.extensions = []
self.tags = []
self.reqtags = []
self.site = ""
self.setup = ""
self.configServer = ""
self.installation = ""
self.ceName = ""
self.ceType = ''
self.queueName = ""
self.queueParameters = {}
self.platform = ""
self.minDiskSpace = 2560 #MB
self.jobCPUReq = 900
self.pythonVersion = '27'
self.userGroup = ""
self.userDN = ""
self.maxCycles = self.MAX_CYCLES
self.flavour = 'DIRAC'
self.gridVersion = ''
self.pilotReference = ''
self.releaseVersion = ''
self.releaseProject = ''
self.gateway = ""
self.useServerCertificate = False
self.pilotScriptName = ''
self.genericOption = ''
# DIRAC client installation environment
self.diracInstalled = False
self.diracExtensions = []
# Some commands can define environment necessary to execute subsequent commands
self.installEnv = os.environ
# If DIRAC is preinstalled this file will receive the updates of the local configuration
self.localConfigFile = ''
self.executeCmd = False
self.configureScript = 'dirac-configure'
self.architectureScript = 'dirac-platform'
self.certsLocation = '%s/etc/grid-security' % self.workingDir
self.pilotCFGFile = 'pilot.json'
self.pilotCFGFileLocation = 'http://lhcbproject.web.cern.ch/lhcbproject/dist/DIRAC3/defaults/'
# Pilot command options
self.cmdOpts = ( ( 'b', 'build', 'Force local compilation' ),
( 'd', 'debug', 'Set debug flag' ),
( 'e:', 'extraPackages=', 'Extra packages to install (comma separated)' ),
( 'E:', 'commandExtensions=', 'Python module with extra commands' ),
( 'X:', 'commands=', 'Pilot commands to execute commands' ),
( 'g:', 'grid=', 'lcg tools package version' ),
( 'h', 'help', 'Show this help' ),
( 'i:', 'python=', 'Use python<26|27> interpreter' ),
( 'k', 'keepPP', 'Do not clear PYTHONPATH on start' ),
( 'l:', 'project=', 'Project to install' ),
( 'p:', 'platform=', 'Use <platform> instead of local one' ),
( 'u:', 'url=', 'Use <url> to download tarballs' ),
( 'r:', 'release=', 'DIRAC release to install' ),
( 'n:', 'name=', 'Set <Site> as Site Name' ),
( 'D:', 'disk=', 'Require at least <space> MB available' ),
( 'M:', 'MaxCycles=', 'Maximum Number of JobAgent cycles to run' ),
( 'N:', 'Name=', 'CE Name' ),
( 'Q:', 'Queue=', 'Queue name' ),
( 'y:', 'CEType=', 'CE Type (normally InProcess)' ),
( 'S:', 'setup=', 'DIRAC Setup to use' ),
( 'C:', 'configurationServer=', 'Configuration servers to use' ),
( 'T:', 'CPUTime', 'Requested CPU Time' ),
( 'G:', 'Group=', 'DIRAC Group to use' ),
( 'O:', 'OwnerDN', 'Pilot OwnerDN (for private pilots)' ),
( 'U', 'Upload', 'Upload compiled distribution (if built)' ),
( 'V:', 'installation=', 'Installation configuration file' ),
( 'W:', 'gateway=', 'Configure <gateway> as DIRAC Gateway during installation' ),
( 's:', 'section=', 'Set base section for relative parsed options' ),
( 'o:', 'option=', 'Option=value to add' ),
( 'c', 'cert', 'Use server certificate instead of proxy' ),
( 'C:', 'certLocation=', 'Specify server certificate location' ),
( 'L:', 'pilotCFGLocation=', 'Specify pilot CFG location' ),
( 'F:', 'pilotCFGFile=', 'Specify pilot CFG file' ),
( 'R:', 'reference=', 'Use this pilot reference' ),
( 'x:', 'execute=', 'Execute instead of JobAgent' ),
)
self.__initOptions()
def __initOptions( self ):
""" Parses and interpret options on the command line
"""
self.optList, __args__ = getopt.getopt( sys.argv[1:],
"".join( [ opt[0] for opt in self.cmdOpts ] ),
[ opt[1] for opt in self.cmdOpts ] )
for o, v in self.optList:
if o == '-E' or o == '--commandExtensions':
self.commandExtensions = v.split( ',' )
elif o == '-X' or o == '--commands':
self.commands = v.split( ',' )
elif o == '-e' or o == '--extraPackages':
self.extensions = v.split( ',' )
elif o == '-n' or o == '--name':
self.site = v
elif o == '-N' or o == '--Name':
self.ceName = v
elif o == '-y' or o == '--CEType':
self.ceType = v
elif o == '-Q' or o == '--Queue':
self.queueName = v
elif o == '-R' or o == '--reference':
self.pilotReference = v
elif o == '-k' or o == '--keepPP':
self.keepPythonPath = True
elif o == '-d' or o == '--debug':
self.debugFlag = True
elif o in ( '-S', '--setup' ):
self.setup = v
elif o in ( '-C', '--configurationServer' ):
self.configServer = v
elif o in ( '-G', '--Group' ):
self.userGroup = v
elif o in ( '-x', '--execute' ):
self.executeCmd = v
elif o in ( '-O', '--OwnerDN' ):
self.userDN = v
elif o in ( '-V', '--installation' ):
self.installation = v
elif o == '-p' or o == '--platform':
self.platform = v
elif o == '-D' or o == '--disk':
try:
self.minDiskSpace = int( v )
except ValueError:
pass
elif o == '-r' or o == '--release':
self.releaseVersion = v.split(',',1)[0]
elif o in ( '-l', '--project' ):
self.releaseProject = v
elif o in ( '-W', '--gateway' ):
self.gateway = v
elif o == '-c' or o == '--cert':
self.useServerCertificate = True
elif o == '-C' or o == '--certLocation':
self.certsLocation = v
elif o == '-L' or o == '--pilotCFGLocation':
self.pilotCFGFileLocation = v
elif o == '-F' or o == '--pilotCFGFile':
self.pilotCFGFile = v
elif o == '-M' or o == '--MaxCycles':
try:
self.maxCycles = min( self.MAX_CYCLES, int( v ) )
except ValueError:
pass
elif o in ( '-T', '--CPUTime' ):
self.jobCPUReq = v
elif o in ( '-o', '--option' ):
self.genericOption = v<|fim▁end|> | #Huge success!
else: |
<|file_name|>QueryEditor.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import * as t from '../types';
import {logger} from '../utils/log';
import {TextEditor} from './TextEditor';
const log = logger('QueryEditor', {count: ['render']});
export interface Props extends React.ClassAttributes<any> {
query: t.Query;
sources: t.DataSource[];
updateTitle(id: string, title: string): void;
updateRaw(id: string, raw: string): void;
updateSourceId(id: string, sourceId: string): void;
}
export class QueryEditor extends React.PureComponent<Props> {
render(): JSX.Element {
log('render', this);
const {query, sources} = this.props;
return (
<div className="QueryEditor">
<div>
<select value={query.sourceId} onChange={this.doChangeSourceId}>
{sources.map(s => <option key={s.id} value={s.id}>{s.uri}</option>)}
</select>
</div>
<div>
<h3>
<input
type="text"
value={query.title}
onChange={this.doChangeTitle}
placeholder="query title"
/>
</h3>
</div>
<div style={{height: 400}}>
<TextEditor
text={query.raw}
onChange={this.doTextEditorChange}
ref={this.assignTextEditorRef}
/>
</div>
</div>
);
}<|fim▁hole|> doChangeSourceId = (e: React.ChangeEvent<HTMLSelectElement>): void => {
this.props.updateSourceId(this.props.query.id, e.target.value);
};
doChangeTitle = (e: React.ChangeEvent<HTMLInputElement>) => {
this.props.updateTitle(this.props.query.id, e.target.value);
};
textEditorRef: TextEditor;
assignTextEditorRef = (r: TextEditor) => (this.textEditorRef = r);
doTextEditorChange = (text: string): void => {
// the editor's `onChange` gets fired when we set a new value,
// so check that it doesn't already match what's in the store
// or it'll perform a redundant action/render
if (this.props.query.raw !== text) {
this.props.updateRaw(this.props.query.id, text);
}
};
}<|fim▁end|> | |
<|file_name|>pymem.rs<|end_file_name|><|fim▁begin|>use libc::{c_void, size_t};
#[cfg_attr(windows, link(name = "pythonXY"))]<|fim▁hole|> pub fn PyMem_Realloc(p: *mut c_void, n: size_t) -> *mut c_void;
pub fn PyMem_Free(p: *mut c_void);
}<|fim▁end|> | extern "C" {
pub fn PyMem_Malloc(n: size_t) -> *mut c_void; |
<|file_name|>batch.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>
package goleveldb
import (
store "github.com/blevesearch/upsidedown_store_api"
"github.com/syndtr/goleveldb/leveldb"
)
type Batch struct {
store *Store
merge *store.EmulatedMerge
batch *leveldb.Batch
}
func (b *Batch) Set(key, val []byte) {
b.batch.Put(key, val)
}
func (b *Batch) Delete(key []byte) {
b.batch.Delete(key)
}
func (b *Batch) Merge(key, val []byte) {
b.merge.Merge(key, val)
}
func (b *Batch) Reset() {
b.batch.Reset()
b.merge = store.NewEmulatedMerge(b.store.mo)
}
func (b *Batch) Close() error {
b.batch.Reset()
b.batch = nil
b.merge = nil
return nil
}<|fim▁end|> | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License. |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// animating the scroll effect
$('.screenshots').on('click', function(e){<|fim▁hole|><|fim▁end|> | e.preventDefault();
$("html, body").animate({ scrollTop: "950px", duration: 500 });
}); |
<|file_name|>TimeTextBox.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2008, The Dojo Foundation
All Rights Reserved.
Licensed under the Academic Free License version 2.1 or above OR the<|fim▁hole|> modified BSD license. For more information on Dojo licensing, see:
http://dojotoolkit.org/book/dojo-book-0-9/introduction/licensing
*/
if(!dojo._hasResource["dijit.form.TimeTextBox"]){
dojo._hasResource["dijit.form.TimeTextBox"]=true;
dojo.provide("dijit.form.TimeTextBox");
dojo.require("dijit._TimePicker");
dojo.require("dijit.form._DateTimeTextBox");
dojo.declare("dijit.form.TimeTextBox",dijit.form._DateTimeTextBox,{popupClass:"dijit._TimePicker",_selector:"time"});
}<|fim▁end|> | |
<|file_name|>General.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2017 Team Kodi
* http://kodi.tv
*
* This Program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2, or (at your option)
* any later version.
*
* This Program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with KODI; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*
*/
#include "General.h"
#include "addons/kodi-addon-dev-kit/include/kodi/General.h"
#include "Application.h"
#include "CompileInfo.h"
#include "ServiceBroker.h"
#include "addons/binary-addons/AddonDll.h"
#include "addons/binary-addons/BinaryAddonManager.h"
#include "addons/settings/GUIDialogAddonSettings.h"
#include "dialogs/GUIDialogKaiToast.h"
#include "filesystem/Directory.h"
#include "filesystem/SpecialProtocol.h"
#include "guilib/LocalizeStrings.h"
#ifdef TARGET_POSIX
#include "linux/XMemUtils.h"
#endif
#include "settings/Settings.h"
#include "utils/CharsetConverter.h"
#include "utils/log.h"
#include "utils/LangCodeExpander.h"
#include "utils/md5.h"
#include "utils/StringUtils.h"
#include "utils/URIUtils.h"
#include <string.h>
using namespace kodi; // addon-dev-kit namespace
namespace ADDON
{
void Interface_General::Init(AddonGlobalInterface* addonInterface)
{
addonInterface->toKodi->kodi = static_cast<AddonToKodiFuncTable_kodi*>(malloc(sizeof(AddonToKodiFuncTable_kodi)));
addonInterface->toKodi->kodi->get_addon_info = get_addon_info;
addonInterface->toKodi->kodi->open_settings_dialog = open_settings_dialog;
addonInterface->toKodi->kodi->get_localized_string = get_localized_string;
addonInterface->toKodi->kodi->unknown_to_utf8 = unknown_to_utf8;
addonInterface->toKodi->kodi->get_language = get_language;
addonInterface->toKodi->kodi->queue_notification = queue_notification;
addonInterface->toKodi->kodi->get_md5 = get_md5;
addonInterface->toKodi->kodi->get_temp_path = get_temp_path;
addonInterface->toKodi->kodi->get_region = get_region;
addonInterface->toKodi->kodi->get_free_mem = get_free_mem;
addonInterface->toKodi->kodi->get_global_idle_time = get_global_idle_time;
addonInterface->toKodi->kodi->get_current_skin_id = get_current_skin_id;
addonInterface->toKodi->kodi->kodi_version = kodi_version;
}
void Interface_General::DeInit(AddonGlobalInterface* addonInterface)
{
if (addonInterface->toKodi && /* <-- needed as long as the old addon way is used */
addonInterface->toKodi->kodi)
{
free(addonInterface->toKodi->kodi);
addonInterface->toKodi->kodi = nullptr;
}
}
char* Interface_General::get_addon_info(void* kodiBase, const char* id)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || id == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', id='%p')", __FUNCTION__, addon, id);
return nullptr;
}
std::string str;
if (strcmpi(id, "author") == 0)
str = addon->Author();
else if (strcmpi(id, "changelog") == 0)
str = addon->ChangeLog();
else if (strcmpi(id, "description") == 0)
str = addon->Description();
else if (strcmpi(id, "disclaimer") == 0)
str = addon->Disclaimer();
else if (strcmpi(id, "fanart") == 0)
str = addon->FanArt();
else if (strcmpi(id, "icon") == 0)
str = addon->Icon();
else if (strcmpi(id, "id") == 0)
str = addon->ID();
else if (strcmpi(id, "name") == 0)
str = addon->Name();
else if (strcmpi(id, "path") == 0)
str = addon->Path();
else if (strcmpi(id, "profile") == 0)
str = addon->Profile();
else if (strcmpi(id, "summary") == 0)
str = addon->Summary();
else if (strcmpi(id, "type") == 0)
str = ADDON::CAddonInfo::TranslateType(addon->Type());
else if (strcmpi(id, "version") == 0)
str = addon->Version().asString();
else
{
CLog::Log(LOGERROR, "Interface_General::%s - add-on '%s' requests invalid id '%s'",
__FUNCTION__, addon->Name().c_str(), id);
return nullptr;
}
char* buffer = strdup(str.c_str());
return buffer;
}
bool Interface_General::open_settings_dialog(void* kodiBase)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p')", __FUNCTION__, addon);
return false;
}
// show settings dialog
AddonPtr addonInfo;
if (CAddonMgr::GetInstance().GetAddon(addon->ID(), addonInfo))
{
CLog::Log(LOGERROR, "Interface_General::%s - Could not get addon information for '%s'", __FUNCTION__, addon->ID().c_str());
return false;
}
return CGUIDialogAddonSettings::ShowForAddon(addonInfo);
}
char* Interface_General::get_localized_string(void* kodiBase, long label_id)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (!addon)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p')", __FUNCTION__, addon);
return nullptr;
}
if (g_application.m_bStop)
return nullptr;
std::string label = g_localizeStrings.GetAddonString(addon->ID(), label_id);
if (label.empty())
label = g_localizeStrings.Get(label_id);
char* buffer = strdup(label.c_str());
return buffer;
}
char* Interface_General::unknown_to_utf8(void* kodiBase, const char* source, bool* ret, bool failOnBadChar)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (!addon || !source || !ret)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', source='%p', ret='%p')", __FUNCTION__, addon, source, ret);
return nullptr;
}
std::string string;
*ret = g_charsetConverter.unknownToUTF8(source, string, failOnBadChar);
char* buffer = strdup(string.c_str());
return buffer;
}
char* Interface_General::get_language(void* kodiBase, int format, bool region)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (!addon)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p')", __FUNCTION__, addon);
return nullptr;
}
std::string string = g_langInfo.GetEnglishLanguageName();
switch (format)
{
case LANG_FMT_ISO_639_1:
{
std::string langCode;
g_LangCodeExpander.ConvertToISO6391(string, langCode);
string = langCode;
if (region)
{
std::string region2Code;
g_LangCodeExpander.ConvertToISO6391(g_langInfo.GetRegionLocale(), region2Code);
if (!region2Code.empty())
string += "-" + region2Code;
}
break;
}
case LANG_FMT_ISO_639_2:
{
std::string langCode;
g_LangCodeExpander.ConvertToISO6392B(string, langCode);
string = langCode;
if (region)
{
std::string region3Code;
g_LangCodeExpander.ConvertToISO6392B(g_langInfo.GetRegionLocale(), region3Code);
if (!region3Code.empty())
string += "-" + region3Code;
}
break;
}
case LANG_FMT_ENGLISH_NAME:
default:
{
if (region)
string += "-" + g_langInfo.GetCurrentRegion();
break;
}
}
char* buffer = strdup(string.c_str());
return buffer;
}
bool Interface_General::queue_notification(void* kodiBase, int type, const char* header,
const char* message, const char* imageFile,
unsigned int displayTime, bool withSound,
unsigned int messageTime)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || message == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', message='%p')", __FUNCTION__, addon, message);
return false;
}
std::string usedHeader;
if (header && strlen(header) > 0)
usedHeader = header;
else
usedHeader = addon->Name();
QueueMsg qtype = static_cast<QueueMsg>(type);
if (qtype != QUEUE_OWN_STYLE)
{
CGUIDialogKaiToast::eMessageType usedType;
switch (qtype)
{
case QUEUE_WARNING:
usedType = CGUIDialogKaiToast::Warning;
withSound = true;
CLog::Log(LOGDEBUG, "Interface_General::%s - %s - Warning Message: '%s'", __FUNCTION__, addon->Name().c_str(), message);
break;
case QUEUE_ERROR:
usedType = CGUIDialogKaiToast::Error;
withSound = true;
CLog::Log(LOGDEBUG, "Interface_General::%s - %s - Error Message : '%s'", __FUNCTION__, addon->Name().c_str(), message);
break;
case QUEUE_INFO:
default:
usedType = CGUIDialogKaiToast::Info;
withSound = false;
CLog::Log(LOGDEBUG, "Interface_General::%s - %s - Info Message : '%s'", __FUNCTION__, addon->Name().c_str(), message);
break;
}
if (imageFile && strlen(imageFile) > 0)
{
CLog::Log(LOGERROR, "Interface_General::%s - To use given image file '%s' must be type value set to 'QUEUE_OWN_STYLE'", __FUNCTION__, imageFile);
}
CGUIDialogKaiToast::QueueNotification(usedType, usedHeader, message, 3000, withSound);
}
else
{
CGUIDialogKaiToast::QueueNotification(imageFile, usedHeader, message, displayTime, withSound, messageTime);
}
return true;
}
void Interface_General::get_md5(void* kodiBase, const char* text, char* md5)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || text == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', text='%p')", __FUNCTION__, addon, text);
return;
}
std::string md5Int = XBMC::XBMC_MD5::GetMD5(std::string(text));
strncpy(md5, md5Int.c_str(), 40);
}<|fim▁hole|> CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - called with empty kodi instance pointer", __FUNCTION__);
return nullptr;
}
const std::string tempPath = URIUtils::AddFileToFolder(CServiceBroker::GetBinaryAddonManager().GetTempAddonBasePath(), addon->ID());
if (!XFILE::CDirectory::Exists(tempPath))
XFILE::CDirectory::Create(tempPath);
return strdup(CSpecialProtocol::TranslatePath(tempPath).c_str());
}
char* Interface_General::get_region(void* kodiBase, const char* id)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || id == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', id='%p')", __FUNCTION__, addon, id);
return nullptr;
}
std::string result;
if (strcmpi(id, "datelong") == 0)
{
result = g_langInfo.GetDateFormat(true);
StringUtils::Replace(result, "DDDD", "%A");
StringUtils::Replace(result, "MMMM", "%B");
StringUtils::Replace(result, "D", "%d");
StringUtils::Replace(result, "YYYY", "%Y");
}
else if (strcmpi(id, "dateshort") == 0)
{
result = g_langInfo.GetDateFormat(false);
StringUtils::Replace(result, "MM", "%m");
StringUtils::Replace(result, "DD", "%d");
#ifdef TARGET_WINDOWS
StringUtils::Replace(result, "M", "%#m");
StringUtils::Replace(result, "D", "%#d");
#else
StringUtils::Replace(result, "M", "%-m");
StringUtils::Replace(result, "D", "%-d");
#endif
StringUtils::Replace(result, "YYYY", "%Y");
}
else if (strcmpi(id, "tempunit") == 0)
result = g_langInfo.GetTemperatureUnitString();
else if (strcmpi(id, "speedunit") == 0)
result = g_langInfo.GetSpeedUnitString();
else if (strcmpi(id, "time") == 0)
{
result = g_langInfo.GetTimeFormat();
StringUtils::Replace(result, "H", "%H");
StringUtils::Replace(result, "h", "%I");
StringUtils::Replace(result, "mm", "%M");
StringUtils::Replace(result, "ss", "%S");
StringUtils::Replace(result, "xx", "%p");
}
else if (strcmpi(id, "meridiem") == 0)
result = StringUtils::Format("%s/%s",
g_langInfo.GetMeridiemSymbol(MeridiemSymbolAM).c_str(),
g_langInfo.GetMeridiemSymbol(MeridiemSymbolPM).c_str());
else
{
CLog::Log(LOGERROR, "Interface_General::%s - add-on '%s' requests invalid id '%s'",
__FUNCTION__, addon->Name().c_str(), id);
return nullptr;
}
char* buffer = strdup(result.c_str());
return buffer;
}
void Interface_General::get_free_mem(void* kodiBase, long* free, long* total, bool as_bytes)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || free == nullptr || total == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', free='%p', total='%p')", __FUNCTION__, addon, free, total);
return;
}
MEMORYSTATUSEX stat;
stat.dwLength = sizeof(MEMORYSTATUSEX);
GlobalMemoryStatusEx(&stat);
*free = static_cast<long>(stat.ullAvailPhys);
*total = static_cast<long>(stat.ullTotalPhys);
if (!as_bytes)
{
*free = *free / ( 1024 * 1024 );
*total = *total / ( 1024 * 1024 );
}
}
int Interface_General::get_global_idle_time(void* kodiBase)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p')", __FUNCTION__, addon);
return -1;
}
return g_application.GlobalIdleTime();
}
char* Interface_General::get_current_skin_id(void* kodiBase)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p')", __FUNCTION__, addon);
return nullptr;
}
return strdup(CServiceBroker::GetSettings().GetString(CSettings::SETTING_LOOKANDFEEL_SKIN).c_str());
}
void Interface_General::kodi_version(void* kodiBase, char** compile_name, int* major, int* minor, char** revision, char** tag, char** tagversion)
{
CAddonDll* addon = static_cast<CAddonDll*>(kodiBase);
if (addon == nullptr || compile_name == nullptr || major == nullptr || minor == nullptr ||
revision == nullptr || tag == nullptr || tagversion == nullptr)
{
CLog::Log(LOGERROR, "Interface_General::%s - invalid data (addon='%p', compile_name='%p', major='%p', minor='%p', revision='%p', tag='%p', tagversion='%p')",
__FUNCTION__, addon, compile_name, major, minor, revision, tag, tagversion);
return;
}
*compile_name = strdup(CCompileInfo::GetAppName());
*major = CCompileInfo::GetMajor();
*minor = CCompileInfo::GetMinor();
*revision = strdup(CCompileInfo::GetSCMID());
std::string tagStr = CCompileInfo::GetSuffix();
if (StringUtils::StartsWithNoCase(tagStr, "alpha"))
{
*tag = strdup("alpha");
*tagversion = strdup(StringUtils::Mid(tagStr, 5).c_str());
}
else if (StringUtils::StartsWithNoCase(tagStr, "beta"))
{
*tag = strdup("beta");
*tagversion = strdup(StringUtils::Mid(tagStr, 4).c_str());
}
else if (StringUtils::StartsWithNoCase(tagStr, "rc"))
{
*tag = strdup("releasecandidate");
*tagversion = strdup(StringUtils::Mid(tagStr, 2).c_str());
}
else if (tagStr.empty())
*tag = strdup("stable");
else
*tag = strdup("prealpha");
}
} /* namespace ADDON */<|fim▁end|> |
char* Interface_General::get_temp_path(void* kodiBase)
{ |
<|file_name|>BatchFile.cpp<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------*
* Copyright (C) 2012 Daniel Bolaños - www.bltek.com - Boulder Language Technologies *
* *
* www.bavieca.org is the website of the Bavieca Speech Recognition Toolkit *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); *
* you may not use this file except in compliance with the License. *
* You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software *
* distributed under the License is distributed on an "AS IS" BASIS, *
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. *
* See the License for the specific language governing permissions and *
* limitations under the License. *
*---------------------------------------------------------------------------------------------*/
#include "BatchFile.h"
#include "FileInput.h"
#include "IOBase.h"
#include <stdlib.h>
#include <limits.h>
namespace Bavieca {
// constructor
BatchFile::BatchFile(const char *strFile, const char *strType) {
m_strFile = strFile;
m_strType = strType;
m_iColumns = UINT_MAX;
}
// destructor
BatchFile::~BatchFile() {
for(VBatchEntry::iterator it = m_vBatchEntry.begin() ; it != m_vBatchEntry.end() ; ++it) {
delete *it;
}
}
// load the content of the batch file
void BatchFile::load() {
// get the column names
int iColumn = 0;
char *strColumnName = new char[m_strType.length()+1];
int iIndex = 0;
const char *str = m_strType.c_str();
while(1) {
if ((*str == '|') || (*str == 0)) {
if (iIndex < 1) {
BVC_ERROR<< "wrong type" << endl;
}
strColumnName[iIndex] = 0;
m_mColumnName.insert(map<string,int>::value_type(strColumnName,iColumn++));
iIndex = 0;
} else {
strColumnName[iIndex++] = *str;
}
if (*str == 0) {
break;
}
++str;
}
delete [] strColumnName;
m_iColumns = iColumn;
FileInput file(m_strFile.c_str(),false);
file.open();
int iLine = 1;
string strLine;
while(std::getline(file.getStream(),strLine)) {
if (strLine.empty()) {
break;
}
std::stringstream s(strLine);
BatchEntry *batchEntry = new BatchEntry();
for(unsigned int i=0 ; i < m_iColumns ; ++i) {
string strField;
IOBase::readString(s,strField);
batchEntry->vStrElement.push_back(strField);
}
if (batchEntry->vStrElement.size() != m_iColumns) {
BVC_ERROR<< "wrong number of columns in line :" << iLine << endl;
}
m_vBatchEntry.push_back(batchEntry);<|fim▁hole|>
file.close();
}
// return the field in the given entry and column
const char *BatchFile::getField(unsigned int iEntry, unsigned int iColumn) {
assert(iEntry < m_vBatchEntry.size());
assert(iColumn < m_iColumns);
return m_vBatchEntry[iEntry]->vStrElement[iColumn].c_str();
}
// return the field in the given entry by its name
const char *BatchFile::getField(unsigned int iEntry, const char *strColumnName) {
// get the column by its name
map<string,int>::iterator it = m_mColumnName.find(strColumnName);
assert(it != m_mColumnName.end());
return getField(iEntry,it->second);
}
}; // end-of-namespace<|fim▁end|> | ++iLine;
} |
<|file_name|>RandomUtilTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2018.
* J. Melzer
*/
package com.jmelzer.jitty.utl;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Created by J. Melzer on 27.03.2018.
*/
public class RandomUtilTest {
@Test<|fim▁hole|>
for (int i = 0; i < 20; i++) {
int n = RandomUtil.randomIntFromInterval(0, 10);
assertTrue("must be between 0 and 10 not " + n, n > -1 && n < 11);
}
}
@Test
public void nextInt() {
RandomUtil randomUtil = new RandomUtil(0, 10);
for (int i = 0; i < 11; i++) {
assertTrue(randomUtil.hasMoreNumbers());
int n = randomUtil.nextInt();
assertTrue("must be between 0 and 10 not " + n, n > -1 && n < 11);
}
assertFalse(randomUtil.hasMoreNumbers());
}
}<|fim▁end|> | public void randomIntFromInterval() { |
<|file_name|>RequestHelper.java<|end_file_name|><|fim▁begin|>/*
* nassh-relay - Relay Server for tunneling ssh through a http endpoint
*
* Website: https://github.com/zyclonite/nassh-relay
*
* Copyright 2014-2020 zyclonite networx
* http://zyclonite.net
* Developer: Lukas Prettenthaler
*/
package net.zyclonite.nassh.util;
import io.vertx.core.http.HttpServerRequest;
public class RequestHelper {
private RequestHelper() {
//
}
public static String getHost(final HttpServerRequest request) {
if (request.headers().contains("X-Forwarded-Host")) {
return request.headers().get("X-Forwarded-Host");<|fim▁hole|>
public static String getRemoteHost(final HttpServerRequest request) {
if (request.headers().contains("X-Real-IP")) {
return request.headers().get("X-Real-IP");
} else {
return request.remoteAddress().host();
}
}
}<|fim▁end|> | } else {
return request.host();
}
} |
<|file_name|>create.js<|end_file_name|><|fim▁begin|>var middleware = require('../../middleware')
;
function handler (req, res, next) {
var profile = { };
res.send(200, res.profile);
next( );
return;
}
var endpoint = {
path: '/users/:user/create'
, method: 'get'<|fim▁hole|> , handler: handler
};
module.exports = function configure (opts, server) {
function mount (server) {
server.get(endpoint.path, endpoint.middleware, updateUser, endpoint.handler);
}
var userInfo = middleware.minUser(opts, server);
var mandatory = middleware.mandatory(opts, server);
endpoint.middleware = mandatory.concat(userInfo);
function updateUser (req, res, next) {
var profile = { };
var name = req.params.user;
var update = req.params;
server.updateUser(name, update, {save:false, create:true}, function (result) {
server.log.debug('UPDATED user', arguments);
res.profile = result;
next( );
});
}
endpoint.mount = mount;
return endpoint;
};
module.exports.endpoint = endpoint;<|fim▁end|> | |
<|file_name|>fetcher.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015-2019 Jack Morton <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import random
from bs4 import BeautifulSoup
from urllib.request import urlopen, Request
import nhlscrappo.constants as C
from nhlscrappo import GameType, ReportType
class ReportFetcher(object):
"""Responsible for fetching and validating the report fields"""
__docroot = "http://www.nhl.com/"
def __init__(self, season, game_num, game_type, report_type):
self.season = season
self.game_num = game_num
self.game_type = game_type
self.report_type = report_type
self.soup = None
def __random_user_agent(self):
user_agent_list = [ \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, " \
"like Gecko) Chrome/22.0.1207.1 Safari/537.1", \
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 " \
"(KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 "\
"(KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like " \
"Gecko) Chrome/20.0.1090.0 Safari/536.6", \
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, " \
"like Gecko) Chrome/19.77.34.5 Safari/537.1", \
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like " \
"Gecko) Chrome/19.0.1084.9 Safari/536.5", \
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like " \
"Gecko) Chrome/19.0.1084.36 Safari/536.5", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1063.0 Safari/536.3",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3" \
" (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1062.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1062.0 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, " \
"like Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.1 Safari/536.3", \
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like " \
"Gecko) Chrome/19.0.1061.0 Safari/536.3", \
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like " \
"Gecko) Chrome/19.0.1055.1 Safari/535.24", \
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, " \
"like Gecko) Chrome/19.0.1055.1 Safari/535.24"]
return random.choice(user_agent_list)
def __load_html(self, url):
if "http://" in url:
req = Request(url, headers = {
"User-Agent": self.__random_user_agent(), \
"Accept": "text/html,application/xhtml+xml,application/" \
"xml;q=0.9,*/*;q=0.8", \
"Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.3", \
"Accept-Encoding": "none", \
"Accept-Language": "en-US,en;q=0.8", \
"Connection": "keep-alive"})
with urlopen(req) as handle:
html = handle.read()
handle.close()
return BeautifulSoup(html.decode("utf-8", "lxml"))
else:
with open(url, "r") as handle:
html = handle.read()
handle.close()
return BeautifulSoup(html, features="lxml")
def make_soup(self, local = None):
if local:
self.soup = self.__load_html(local)
else:
url = self.__docroot + "scores/htmlreports/" + str(self.season) + \
str(self.season + 1) + "/" + self.report_type.value + "0" + \
str(self.game_type.value) + ("%04i" % self.game_num) + ".HTM"
self.soup = self.__load_html(url)
return self.soup
@property
def season(self):
return self._season
@season.setter
def season(self, value):
if not isinstance(value, int):
raise TypeError("season must be of type int")<|fim▁hole|> str(C.MIN_SEASON) + " until " + str(C.MAX_SEASON) + \
" are supported")
self._season = int(value)
@property
def game_num(self):
return self._game_num
@game_num.setter
def game_num(self, value):
if not isinstance(value, int):
raise TypeError("game_num must be of type int")
self._game_num = value
@property
def game_type(self):
return self._game_type
@game_type.setter
def game_type(self, value):
if value in GameType:
self._game_type = value
else:
raise TypeError("game_type must be of type GameType")
@property
def report_type(self):
return self._report_type
@report_type.setter
def report_type(self, value):
if value in ReportType:
self._report_type = value
else:
raise TypeError("report_type must be of type ReportType")
@property
def soup(self):
return self._soup
@soup.setter
def soup(self, value):
if value is not None and not isinstance(value, BeautifulSoup):
raise TypeError("soup must be of type BeautifulSoup")
self._soup = value<|fim▁end|> | if value < C.MIN_SEASON or value > C.MAX_SEASON:
raise ValueError("Only seasons starting from " + \ |
<|file_name|>HTMLTableCaptionElement.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 1997 Martin Jones ([email protected])
* (C) 1997 Torben Weis ([email protected])
* (C) 1998 Waldo Bastian ([email protected])
* (C) 1999 Lars Knoll ([email protected])
* (C) 1999 Antti Koivisto ([email protected])
* Copyright (C) 2003, 2004, 2005, 2006, 2010 Apple Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public License
* along with this library; see the file COPYING.LIB. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "config.h"
#include "HTMLTableCaptionElement.h"
#include "Attribute.h"
#include "CSSPropertyNames.h"
#include "HTMLNames.h"
namespace WebCore {
using namespace HTMLNames;
inline HTMLTableCaptionElement::HTMLTableCaptionElement(const QualifiedName& tagName, Document& document)
: HTMLElement(tagName, document)
{
ASSERT(hasTagName(captionTag));
}
PassRefPtr<HTMLTableCaptionElement> HTMLTableCaptionElement::create(const QualifiedName& tagName, Document& document)
{
return adoptRef(new HTMLTableCaptionElement(tagName, document));
}
bool HTMLTableCaptionElement::isPresentationAttribute(const QualifiedName& name) const
{
if (name == alignAttr)
return true;
return HTMLElement::isPresentationAttribute(name);
}
void HTMLTableCaptionElement::collectStyleForPresentationAttribute(const QualifiedName& name, const AtomicString& value, MutableStylePropertySet* style)
{
if (name == alignAttr) {
if (!value.isEmpty())
addPropertyToPresentationAttributeStyle(style, CSSPropertyCaptionSide, value);
} else
HTMLElement::collectStyleForPresentationAttribute(name, value, style);<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>JustcoinBasePollingService.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2012 - 2014 Xeiam LLC http://xeiam.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.xeiam.xchange.justcoin.service.polling;
import java.io.IOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import si.mazi.rescu.RestProxyFactory;
import com.xeiam.xchange.ExchangeSpecification;
import com.xeiam.xchange.currency.CurrencyPair;
import com.xeiam.xchange.justcoin.Justcoin;
import com.xeiam.xchange.justcoin.JustcoinAdapters;
import com.xeiam.xchange.justcoin.dto.marketdata.JustcoinTicker;
import com.xeiam.xchange.service.BaseExchangeService;
import com.xeiam.xchange.utils.AuthUtils;
public class JustcoinBasePollingService<T extends Justcoin> extends BaseExchangeService {
protected final T justcoin;
private final Set<CurrencyPair> currencyPairs = new HashSet<CurrencyPair>();
/**
* Constructor
*
* @param exchangeSpecification The {@link ExchangeSpecification}
*/
public JustcoinBasePollingService(Class<T> type, ExchangeSpecification exchangeSpecification) {
super(exchangeSpecification);
this.justcoin = RestProxyFactory.createProxy(type, exchangeSpecification.getSslUri());
}
@Override
public Collection<CurrencyPair> getExchangeSymbols() throws IOException {
if (currencyPairs.isEmpty()) {
for (final JustcoinTicker ticker : justcoin.getTickers()) {
final CurrencyPair currencyPair = JustcoinAdapters.adaptCurrencyPair(ticker.getId());
currencyPairs.add(currencyPair);
}
}
return currencyPairs;
}
protected String getBasicAuthentication() {
<|fim▁hole|><|fim▁end|> | return AuthUtils.getBasicAuth(exchangeSpecification.getUserName(), exchangeSpecification.getPassword());
}
} |
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::error::Error;
/// Error type returned by _try methods
#[derive(Debug,PartialEq)]
pub struct PatternError(pub String);
impl fmt::Display for PatternError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,"{}",self.0)
}
}
impl Error for PatternError {
fn description(&self) -> &str {<|fim▁hole|><|fim▁end|> | &self.0
}
} |
<|file_name|>celleditable.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! Interface for widgets which can are used for editing cells
use gtk::{mod, ffi};
use gtk::cast::GTK_CELL_EDITABLE;
pub trait CellEditableTrait : gtk::WidgetTrait {
fn editing_done(&self) {
unsafe { ffi::gtk_cell_editable_editing_done(GTK_CELL_EDITABLE(self.get_widget())) }<|fim▁hole|>
fn remove_widget(&self) {
unsafe { ffi::gtk_cell_editable_remove_widget(GTK_CELL_EDITABLE(self.get_widget())) }
}
}<|fim▁end|> | } |
<|file_name|>collector_region_info.cpp<|end_file_name|><|fim▁begin|>#include "generator/regions/collector_region_info.hpp"
#include "generator/feature_builder.hpp"
#include "generator/osm_element.hpp"
#include "coding/file_writer.hpp"
#include "base/assert.hpp"
#include "base/logging.hpp"
#include "base/macros.hpp"
#include <map>
namespace generator
{
namespace regions
{
std::string const CollectorRegionInfo::kDefaultExt = ".regions.bin";
uint8_t const CollectorRegionInfo::kVersion = 0;
PlaceType EncodePlaceType(std::string const & place)
{
static std::map<std::string, PlaceType> const m = {
{"city", PlaceType::City},
{"town", PlaceType::Town},
{"village", PlaceType::Village},
{"suburb", PlaceType::Suburb},
{"neighbourhood", PlaceType::Neighbourhood},
{"hamlet", PlaceType::Hamlet},
{"isolated_dwelling", PlaceType::IsolatedDwelling}
};
auto const it = m.find(place);
return it == m.end() ? PlaceType::Unknown : it->second;<|fim▁hole|>{
switch (level)
{
case PlaceLevel::Country:
return "country";
case PlaceLevel::Region:
return "region";
case PlaceLevel:: Subregion:
return "subregion";
case PlaceLevel::Locality:
return "locality";
case PlaceLevel::Suburb:
return "suburb";
case PlaceLevel::Sublocality:
return "sublocality";
case PlaceLevel::Unknown:
return nullptr;
case PlaceLevel::Count:
UNREACHABLE();
}
UNREACHABLE();
}
CollectorRegionInfo::CollectorRegionInfo(std::string const & filename) : m_filename(filename) {}
void CollectorRegionInfo::CollectFeature(const FeatureBuilder1 &, OsmElement const & el)
{
base::GeoObjectId const osmId = GetGeoObjectId(el);
RegionData regionData;
FillRegionData(osmId, el, regionData);
m_mapRegionData.emplace(osmId, regionData);
// If the region is a country.
if (regionData.m_adminLevel == AdminLevel::Two)
{
IsoCode isoCode;
FillIsoCode(osmId, el, isoCode);
m_mapIsoCode.emplace(osmId, isoCode);
}
}
void CollectorRegionInfo::Save()
{
FileWriter writer(m_filename);
WriteToSink(writer, kVersion);
WriteMap(writer, m_mapRegionData);
WriteMap(writer, m_mapIsoCode);
}
void CollectorRegionInfo::FillRegionData(base::GeoObjectId const & osmId, OsmElement const & el,
RegionData & rd)
{
rd.m_osmId = osmId;
rd.m_place = EncodePlaceType(el.GetTag("place"));
auto const al = el.GetTag("admin_level");
if (al.empty())
return;
try
{
auto const adminLevel = std::stoi(al);
// Administrative level is in the range [1 ... 12].
// https://wiki.openstreetmap.org/wiki/Tag:boundary=administrative
rd.m_adminLevel = (adminLevel >= 1 && adminLevel <= 12) ?
static_cast<AdminLevel>(adminLevel) : AdminLevel::Unknown;
}
catch (std::exception const & e) // std::invalid_argument, std::out_of_range
{
LOG(::base::LWARNING, (e.what()));
rd.m_adminLevel = AdminLevel::Unknown;
}
}
void CollectorRegionInfo::FillIsoCode(base::GeoObjectId const & osmId, OsmElement const & el,
IsoCode & rd)
{
rd.m_osmId = osmId;
rd.SetAlpha2(el.GetTag("ISO3166-1:alpha2"));
rd.SetAlpha3(el.GetTag("ISO3166-1:alpha3"));
rd.SetNumeric(el.GetTag("ISO3166-1:numeric"));
}
void IsoCode::SetAlpha2(std::string const & alpha2)
{
CHECK_LESS_OR_EQUAL(alpha2.size() + 1, ARRAY_SIZE(m_alpha2), ());
std::strcpy(m_alpha2, alpha2.data());
}
void IsoCode::SetAlpha3(std::string const & alpha3)
{
CHECK_LESS_OR_EQUAL(alpha3.size() + 1, ARRAY_SIZE(m_alpha3), ());
std::strcpy(m_alpha3, alpha3.data());
}
void IsoCode::SetNumeric(std::string const & numeric)
{
CHECK_LESS_OR_EQUAL(numeric.size() + 1, ARRAY_SIZE(m_numeric), ());
std::strcpy(m_numeric, numeric.data());
}
} // namespace regions
} // namespace generator<|fim▁end|> | }
char const * GetLabel(PlaceLevel level) |
<|file_name|>test_medium.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
# vim: ts=4 sw=4 expandtab ai
"""Test class for Medium CLI"""
from ddt import ddt
from fauxfactory import gen_string, gen_alphanumeric
from robottelo.cli.factory import CLIFactoryError
from robottelo.test import CLITestCase
from robottelo.common.decorators import data, run_only_on
from robottelo.cli.factory import make_medium, make_os
from robottelo.cli.medium import Medium
URL = "http://mirror.fakeos.org/%s/$major.$minor/os/$arch"
OSES = [
'Archlinux',
'Debian',
'Gentoo',
'Redhat',
'Solaris',
'Suse',
'Windows',
]
@run_only_on('sat')
@ddt
class TestMedium(CLITestCase):
@data({'name': gen_string("latin1", 10)},
{'name': gen_string("utf8", 10)},
{'name': gen_string("alpha", 10)},
{'name': gen_string("alphanumeric", 10)},
{'name': gen_string("numeric", 10)},
{'name': gen_string("html", 10)})
def test_positive_create_1(self, test_data):
"""@Test: Check if Medium can be created
@Feature: Medium - Positive Create
@Assert: Medium is created
"""
new_obj = make_medium(test_data)
# Can we find the new object?
result = Medium.info({'id': new_obj['id']})
self.assertEqual(result.return_code, 0, "Failed to create object")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
self.assertGreater(
len(result.stdout), 0, "Failed to fetch medium")
self.assertEqual(new_obj['name'],
result.stdout['name'])
@data({'name': gen_string("latin1", 10)},
{'name': gen_string("utf8", 10)},
{'name': gen_string("alpha", 10)},
{'name': gen_string("alphanumeric", 10)},
{'name': gen_string("numeric", 10)},
{'name': gen_string("html", 10)})
def test_positive_delete_1(self, test_data):
"""@Test: Check if Medium can be deleted
@Feature: Medium - Positive Delete
@Assert: Medium is deleted
"""
new_obj = make_medium(test_data)
# Can we find the new object?
result = Medium.info({'id': new_obj['id']})
self.assertEqual(result.return_code, 0)
self.assertEqual(len(result.stderr), 0)
self.assertEqual(new_obj['name'], result.stdout['name'])
return_value = Medium.delete({'id': new_obj['id']})
self.assertEqual(return_value.return_code, 0, "Deletion failed")
self.assertEqual(
len(return_value.stderr), 0, "There should not be an error here")
# Can we find the object?
result = Medium.info({'id': new_obj['id']})
self.assertNotEqual(
result.return_code, 0, "Medium should be deleted")
self.assertGreater(len(result.stderr), 0,
"There should be an exception here")
self.assertEqual(
len(result.stdout), 0, "Output should be blank.")
def test_addoperatingsystem_medium(self):
"""@Test: Check if Medium can be associated with operating system
@Feature: Medium - Add operating system
@Assert: Operating system added
"""
try:
medium = make_medium({'name': gen_alphanumeric(6)})
os = make_os()
except CLIFactoryError as err:
self.fail(err)
args = {
'id': medium['id'],
'operatingsystem-id': os['id'],
}
result = Medium().add_operating_system(args)
self.assertEqual(result.return_code, 0,
"Could not associate the operating system to media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
def test_removeoperatingsystem_medium(self):
"""@Test: Check if operating system can be removed from media
@Feature: Medium - Remove operating system
@Assert: Operating system removed
"""
try:
medium = make_medium({'name': gen_alphanumeric(6)})
os = make_os()
except CLIFactoryError as err:
self.fail(err)
args = {<|fim▁hole|> }
result = Medium().add_operating_system(args)
self.assertEqual(result.return_code, 0,
"Could not associate the operating system to media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertIn(os['title'],
result.stdout['operating-systems'],
"Operating system is not added to the media")
result = Medium().remove_operating_system(args)
self.assertEqual(result.return_code, 0,
"Removed the operating system from media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertNotIn(os['name'],
result.stdout['operating-systems'],
"Operating system is not removed from the media")
def test_medium_update(self):
"""@Test: Check if medium can be updated
@Feature: Medium - Update medium
@Assert: Medium updated
"""
new_name = gen_alphanumeric(6)
try:
medium = make_medium({'name': gen_alphanumeric(6)})
except CLIFactoryError as e:
self.fail(e)
args = {
'name': medium['name'],
'new-name': new_name,
}
result = Medium().update(args)
self.assertEqual(result.return_code, 0,
"Could not update media")
self.assertEqual(len(result.stderr), 0,
"There should not be an exception here")
result = Medium().info({'id': medium['id']})
self.assertEqual(result.stdout['name'], new_name,
"Medium name was not updated")<|fim▁end|> | 'id': medium['id'],
'operatingsystem-id': os['id'], |
<|file_name|>RGBToHSB.js<|end_file_name|><|fim▁begin|>//This file is automatically rebuilt by the Cesium build process.
/*global define*/
define(function() {
'use strict';
return "/**\n\
* Converts an RGB color to HSB (hue, saturation, brightness)\n\
* HSB <-> RGB conversion with minimal branching: {@link http://lolengine.net/blog/2013/07/27/rgb-to-hsv-in-glsl}\n\
*\n\
* @name czm_RGBToHSB\n\
* @glslFunction\n\<|fim▁hole|> * \n\
* @param {vec3} rgb The color in RGB.\n\
*\n\
* @returns {vec3} The color in HSB.\n\
*\n\
* @example\n\
* vec3 hsb = czm_RGBToHSB(rgb);\n\
* hsb.z *= 0.1;\n\
* rgb = czm_HSBToRGB(hsb);\n\
*/\n\
\n\
const vec4 K_RGB2HSB = vec4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);\n\
\n\
vec3 czm_RGBToHSB(vec3 rgb)\n\
{\n\
vec4 p = mix(vec4(rgb.bg, K_RGB2HSB.wz), vec4(rgb.gb, K_RGB2HSB.xy), step(rgb.b, rgb.g));\n\
vec4 q = mix(vec4(p.xyw, rgb.r), vec4(rgb.r, p.yzx), step(p.x, rgb.r));\n\
\n\
float d = q.x - min(q.w, q.y);\n\
return vec3(abs(q.z + (q.w - q.y) / (6.0 * d + czm_epsilon7)), d / (q.x + czm_epsilon7), q.x);\n\
}\n\
";
});<|fim▁end|> | |
<|file_name|>test_clean_prime_step.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2018 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from testtools.matchers import (
Contains,
DirExists,
FileExists,
Not
)
from tests import integration
class CleanPrimeStepTestCase(integration.TestCase):
def setUp(self):
super().setUp()
self.copy_project_to_cwd('independent-parts')
self.run_snapcraft('prime')
def test_clean_prime_step(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
output = self.run_snapcraft(
['clean', '--step=prime'], debug=False)
self.assertThat(self.prime_dir, Not(DirExists()))
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Assert that the priming area was removed wholesale, not a part at a
# time (since we didn't specify any parts).
self.assertThat(output, Contains("Cleaning up priming area"))
self.expectThat(output, Not(Contains('part1')))
self.expectThat(output, Not(Contains('part2')))
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
def test_clean_prime_step_single_part(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.run_snapcraft(['clean', 'part1', '--step=prime'])
self.assertThat(os.path.join(bindir, 'file1'), Not(FileExists()))
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
def test_clean_with_deprecated_strip_step(self):
bindir = os.path.join(self.prime_dir, 'bin')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())
self.run_snapcraft(['clean', '--step=strip'])
self.assertThat(self.prime_dir, Not(DirExists()))
self.assertThat(self.stage_dir, DirExists())
self.assertThat(self.parts_dir, DirExists())
# Now try to prime again
self.run_snapcraft('prime')
self.assertThat(os.path.join(bindir, 'file1'), FileExists())
self.assertThat(os.path.join(bindir, 'file2'), FileExists())<|fim▁end|> | # GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License |
<|file_name|>InsectNameUserAgentRequestFilter.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2017 Jonas Zeiger <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package net.talpidae.base.client;
import net.talpidae.base.insect.config.SlaveSettings;
import java.io.IOException;
import java.util.Optional;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.ws.rs.client.ClientRequestContext;
import javax.ws.rs.client.ClientRequestFilter;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.ext.Provider;
import lombok.extern.slf4j.Slf4j;
import lombok.val;
import static com.google.common.base.Strings.isNullOrEmpty;
/**
* Adds the insect name to the user-agent header value.
*/
@Singleton<|fim▁hole|>{
private final String insectName;
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
@Inject
public InsectNameUserAgentRequestFilter(Optional<SlaveSettings> slaveSettings)
{
insectName = slaveSettings.map(SlaveSettings::getName).orElse(null);
}
@Override
public void filter(ClientRequestContext requestContext) throws IOException
{
if (insectName != null)
{
val userAgent = requestContext.getHeaderString(HttpHeaders.USER_AGENT);
val nextUserAgent = isNullOrEmpty(userAgent) ? insectName : insectName + "/" + userAgent;
requestContext.getHeaders().putSingle(HttpHeaders.USER_AGENT, nextUserAgent);
}
}
}<|fim▁end|> | @Provider
@Slf4j
public class InsectNameUserAgentRequestFilter implements ClientRequestFilter |
<|file_name|>multiplexed.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use log::debug;
use std::collections::HashMap;
use std::convert::Into;
use std::fmt;
use std::fmt::{Debug, Formatter};
use std::sync::{Arc, Mutex};
use crate::protocol::{TInputProtocol, TMessageIdentifier, TOutputProtocol, TStoredInputProtocol};
use super::{handle_process_result, TProcessor};
const MISSING_SEPARATOR_AND_NO_DEFAULT: &str =
"missing service separator and no default processor set";
type ThreadSafeProcessor = Box<dyn TProcessor + Send + Sync>;
/// A `TProcessor` that can demux service calls to multiple underlying
/// Thrift services.
///
/// Users register service-specific `TProcessor` instances with a
/// `TMultiplexedProcessor`, and then register that processor with a server
/// implementation. Following that, all incoming service calls are automatically
/// routed to the service-specific `TProcessor`.
///
/// A `TMultiplexedProcessor` can only handle messages sent by a
/// `TMultiplexedOutputProtocol`.
#[derive(Default)]
pub struct TMultiplexedProcessor {
stored: Mutex<StoredProcessors>,
}
#[derive(Default)]
struct StoredProcessors {
processors: HashMap<String, Arc<ThreadSafeProcessor>>,
default_processor: Option<Arc<ThreadSafeProcessor>>,
}
impl TMultiplexedProcessor {
/// Create a new `TMultiplexedProcessor` with no registered service-specific
/// processors.
pub fn new() -> TMultiplexedProcessor {
TMultiplexedProcessor {
stored: Mutex::new(StoredProcessors {
processors: HashMap::new(),
default_processor: None,
}),
}
}
/// Register a service-specific `processor` for the service named
/// `service_name`. This implementation is also backwards-compatible with
/// non-multiplexed clients. Set `as_default` to `true` to allow
/// non-namespaced requests to be dispatched to a default processor.
///
/// Returns success if a new entry was inserted. Returns an error if:
/// * A processor exists for `service_name`
/// * You attempt to register a processor as default, and an existing default exists
#[allow(clippy::map_entry)]
pub fn register<S: Into<String>>(
&mut self,
service_name: S,
processor: Box<dyn TProcessor + Send + Sync>,
as_default: bool,
) -> crate::Result<()> {
let mut stored = self.stored.lock().unwrap();
let name = service_name.into();
if !stored.processors.contains_key(&name) {
let processor = Arc::new(processor);
if as_default {
if stored.default_processor.is_none() {
stored.processors.insert(name, processor.clone());
stored.default_processor = Some(processor.clone());
Ok(())
} else {
Err("cannot reset default processor".into())
}
} else {
stored.processors.insert(name, processor);
Ok(())
}
} else {
Err(format!("cannot overwrite existing processor for service {}", name).into())<|fim▁hole|>
fn process_message(
&self,
msg_ident: &TMessageIdentifier,
i_prot: &mut dyn TInputProtocol,
o_prot: &mut dyn TOutputProtocol,
) -> crate::Result<()> {
let (svc_name, svc_call) = split_ident_name(&msg_ident.name);
debug!("routing svc_name {:?} svc_call {}", &svc_name, &svc_call);
let processor: Option<Arc<ThreadSafeProcessor>> = {
let stored = self.stored.lock().unwrap();
if let Some(name) = svc_name {
stored.processors.get(name).cloned()
} else {
stored.default_processor.clone()
}
};
match processor {
Some(arc) => {
let new_msg_ident = TMessageIdentifier::new(
svc_call,
msg_ident.message_type,
msg_ident.sequence_number,
);
let mut proxy_i_prot = TStoredInputProtocol::new(i_prot, new_msg_ident);
(*arc).process(&mut proxy_i_prot, o_prot)
}
None => Err(missing_processor_message(svc_name).into()),
}
}
}
impl TProcessor for TMultiplexedProcessor {
fn process(
&self,
i_prot: &mut dyn TInputProtocol,
o_prot: &mut dyn TOutputProtocol,
) -> crate::Result<()> {
let msg_ident = i_prot.read_message_begin()?;
debug!("process incoming msg id:{:?}", &msg_ident);
let res = self.process_message(&msg_ident, i_prot, o_prot);
handle_process_result(&msg_ident, res, o_prot)
}
}
impl Debug for TMultiplexedProcessor {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let stored = self.stored.lock().unwrap();
write!(
f,
"TMultiplexedProcess {{ registered_count: {:?} default: {:?} }}",
stored.processors.keys().len(),
stored.default_processor.is_some()
)
}
}
fn split_ident_name(ident_name: &str) -> (Option<&str>, &str) {
ident_name
.find(':')
.map(|pos| {
let (svc_name, svc_call) = ident_name.split_at(pos);
let (_, svc_call) = svc_call.split_at(1); // remove colon from service call name
(Some(svc_name), svc_call)
})
.or_else(|| Some((None, ident_name)))
.unwrap()
}
fn missing_processor_message(svc_name: Option<&str>) -> String {
match svc_name {
Some(name) => format!("no processor found for service {}", name),
None => MISSING_SEPARATOR_AND_NO_DEFAULT.to_owned(),
}
}
#[cfg(test)]
mod tests {
use std::convert::Into;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use crate::protocol::{
TBinaryInputProtocol, TBinaryOutputProtocol, TMessageIdentifier, TMessageType,
};
use crate::transport::{ReadHalf, TBufferChannel, TIoChannel, WriteHalf};
use crate::{ApplicationError, ApplicationErrorKind};
use super::*;
#[test]
fn should_split_name_into_proper_separator_and_service_call() {
let ident_name = "foo:bar_call";
let (serv, call) = split_ident_name(&ident_name);
assert_eq!(serv, Some("foo"));
assert_eq!(call, "bar_call");
}
#[test]
fn should_return_full_ident_if_no_separator_exists() {
let ident_name = "bar_call";
let (serv, call) = split_ident_name(&ident_name);
assert_eq!(serv, None);
assert_eq!(call, "bar_call");
}
#[test]
fn should_write_error_if_no_separator_found_and_no_default_processor_exists() {
let (mut i, mut o) = build_objects();
let sent_ident = TMessageIdentifier::new("foo", TMessageType::Call, 10);
o.write_message_begin(&sent_ident).unwrap();
o.flush().unwrap();
o.transport.copy_write_buffer_to_read_buffer();
o.transport.empty_write_buffer();
let p = TMultiplexedProcessor::new();
p.process(&mut i, &mut o).unwrap(); // at this point an error should be written out
i.transport.set_readable_bytes(&o.transport.write_bytes());
let rcvd_ident = i.read_message_begin().unwrap();
let expected_ident = TMessageIdentifier::new("foo", TMessageType::Exception, 10);
assert_eq!(rcvd_ident, expected_ident);
let rcvd_err = crate::Error::read_application_error_from_in_protocol(&mut i).unwrap();
let expected_err = ApplicationError::new(
ApplicationErrorKind::Unknown,
MISSING_SEPARATOR_AND_NO_DEFAULT,
);
assert_eq!(rcvd_err, expected_err);
}
#[test]
fn should_write_error_if_separator_exists_and_no_processor_found() {
let (mut i, mut o) = build_objects();
let sent_ident = TMessageIdentifier::new("missing:call", TMessageType::Call, 10);
o.write_message_begin(&sent_ident).unwrap();
o.flush().unwrap();
o.transport.copy_write_buffer_to_read_buffer();
o.transport.empty_write_buffer();
let p = TMultiplexedProcessor::new();
p.process(&mut i, &mut o).unwrap(); // at this point an error should be written out
i.transport.set_readable_bytes(&o.transport.write_bytes());
let rcvd_ident = i.read_message_begin().unwrap();
let expected_ident = TMessageIdentifier::new("missing:call", TMessageType::Exception, 10);
assert_eq!(rcvd_ident, expected_ident);
let rcvd_err = crate::Error::read_application_error_from_in_protocol(&mut i).unwrap();
let expected_err = ApplicationError::new(
ApplicationErrorKind::Unknown,
missing_processor_message(Some("missing")),
);
assert_eq!(rcvd_err, expected_err);
}
#[derive(Default)]
struct Service {
pub invoked: Arc<AtomicBool>,
}
impl TProcessor for Service {
fn process(
&self,
_: &mut dyn TInputProtocol,
_: &mut dyn TOutputProtocol,
) -> crate::Result<()> {
let res = self
.invoked
.compare_and_swap(false, true, Ordering::Relaxed);
if res {
Ok(())
} else {
Err("failed swap".into())
}
}
}
#[test]
fn should_route_call_to_correct_processor() {
let (mut i, mut o) = build_objects();
// build the services
let svc_1 = Service {
invoked: Arc::new(AtomicBool::new(false)),
};
let atm_1 = svc_1.invoked.clone();
let svc_2 = Service {
invoked: Arc::new(AtomicBool::new(false)),
};
let atm_2 = svc_2.invoked.clone();
// register them
let mut p = TMultiplexedProcessor::new();
p.register("service_1", Box::new(svc_1), false).unwrap();
p.register("service_2", Box::new(svc_2), false).unwrap();
// make the service call
let sent_ident = TMessageIdentifier::new("service_1:call", TMessageType::Call, 10);
o.write_message_begin(&sent_ident).unwrap();
o.flush().unwrap();
o.transport.copy_write_buffer_to_read_buffer();
o.transport.empty_write_buffer();
p.process(&mut i, &mut o).unwrap();
// service 1 should have been invoked, not service 2
assert_eq!(atm_1.load(Ordering::Relaxed), true);
assert_eq!(atm_2.load(Ordering::Relaxed), false);
}
#[test]
fn should_route_call_to_correct_processor_if_no_separator_exists_and_default_processor_set() {
let (mut i, mut o) = build_objects();
// build the services
let svc_1 = Service {
invoked: Arc::new(AtomicBool::new(false)),
};
let atm_1 = svc_1.invoked.clone();
let svc_2 = Service {
invoked: Arc::new(AtomicBool::new(false)),
};
let atm_2 = svc_2.invoked.clone();
// register them
let mut p = TMultiplexedProcessor::new();
p.register("service_1", Box::new(svc_1), false).unwrap();
p.register("service_2", Box::new(svc_2), true).unwrap(); // second processor is default
// make the service call (it's an old client, so we have to be backwards compatible)
let sent_ident = TMessageIdentifier::new("old_call", TMessageType::Call, 10);
o.write_message_begin(&sent_ident).unwrap();
o.flush().unwrap();
o.transport.copy_write_buffer_to_read_buffer();
o.transport.empty_write_buffer();
p.process(&mut i, &mut o).unwrap();
// service 2 should have been invoked, not service 1
assert_eq!(atm_1.load(Ordering::Relaxed), false);
assert_eq!(atm_2.load(Ordering::Relaxed), true);
}
fn build_objects() -> (
TBinaryInputProtocol<ReadHalf<TBufferChannel>>,
TBinaryOutputProtocol<WriteHalf<TBufferChannel>>,
) {
let c = TBufferChannel::with_capacity(128, 128);
let (r_c, w_c) = c.split().unwrap();
(
TBinaryInputProtocol::new(r_c, true),
TBinaryOutputProtocol::new(w_c, true),
)
}
}<|fim▁end|> | }
} |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import logging
from django.db.models import DateTimeField, Model, Manager
from django.db.models.query import QuerySet
from django.db.models.fields.related import \
OneToOneField, ManyToManyField, ManyToManyRel
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now
from django.core.exceptions import ObjectDoesNotExist
LOGGER = logging.getLogger(__name__)
def _unset_related_one_to_one(obj, field):
old_value = getattr(obj, field.column)
if old_value is not None:
LOGGER.debug(
'Setting %s.%s to None on object %s (old value: %s)',
obj._meta.model.__name__, field.column, obj.pk, old_value)
# Unset the fk field (e.g. Foo.baz_id)
setattr(obj, field.column, None)
# Unset the related object field (e.g. Foo.baz)
setattr(obj, field.name, None)
def _unset_related_many_to_many(obj, field):
manager = getattr(obj, field.name)
old_values = manager.values_list('pk', flat=True)
LOGGER.debug(
'Removing all objects from %s.%s on object %s (old values: %s)',
obj._meta.model.__name__, field.name, obj.pk,
', '.join(str(pk) for pk in old_values))
manager.remove(*manager.all())
def _unset_related_objects_relations(obj):
LOGGER.debug('Soft-deleting object %s %s',
obj._meta.model.__name__, obj.pk)
for field in obj._meta.get_fields():
field_type = type(field)
if field_type is OneToOneField:
_unset_related_one_to_one(obj, field)
elif field_type in (ManyToManyRel, ManyToManyField):
_unset_related_many_to_many(obj, field)
for related in obj._meta.get_all_related_objects():
# Unset related objects' relation
rel_name = related.get_accessor_name()
if related.one_to_one:
# Handle one-to-one relations.
try:
related_object = getattr(obj, rel_name)
except ObjectDoesNotExist:
pass
else:
_unset_related_one_to_one(related_object, related.field)
related_object.save()
else:
# Handle one-to-many and many-to-many relations.
related_objects = getattr(obj, rel_name)
if related_objects.count():
affected_objects_id = ', '.join(
str(pk) for pk in related_objects.values_list(
'pk', flat=True))
old_values = ', '.join(
str(val) for val in related_objects.values_list(
related.field.name, flat=True))
LOGGER.debug(
'Setting %s.%s to None on objects %s (old values: %s)',
related_objects.model.__name__, related.field.name,
affected_objects_id, old_values)
related_objects.update(**{related.field.name: None})
class SoftDeleteQuerySet(QuerySet):
"""This QuerySet subclass implements soft deletion of objects.
"""
def delete(self):
"""Soft delete all objects included in this queryset.
"""
for obj in self:
_unset_related_objects_relations(obj)
self.update(deleted=now())
def undelete(self):
"""Soft undelete all objects included in this queryset.
"""
objects = self.filter(deleted__isnull=False)
if objects.count():
LOGGER.debug(
'Soft undeleting %s objects: %s', self.model.__name__,
', '.join(str(pk)
for pk in objects.values_list('pk', flat=True)))
objects.update(deleted=None)
class SoftDeleteManager(Manager.from_queryset(SoftDeleteQuerySet)):
"""This Manager hides soft deleted objects by default,
and exposes methods to access them.
"""
def _get_base_queryset(self):
return super(SoftDeleteManager, self).get_queryset()
def get_queryset(self):
"""Return NOT DELETED objects.
"""
return self._get_base_queryset().filter(deleted__isnull=True)
def deleted(self):
"""Return DELETED objects.
"""
return self._get_base_queryset().filter(deleted__isnull=False)
def with_deleted(self):
"""Return ALL objects.
"""
return self._get_base_queryset()
class SoftDeleteModel(Model):
"""Simply inherit this class to enable soft deletion on a model.
"""
class Meta:
abstract = True
objects = SoftDeleteManager()
deleted = DateTimeField(verbose_name=_('deleted'), null=True, blank=True)
def delete(self):
"""Soft delete this object.<|fim▁hole|> self.deleted = now()
self.save()
return self
def undelete(self):
"""Undelete this soft-deleted object.
"""
if self.deleted is not None:
LOGGER.debug('Soft-undeleting object %s %s',
self._meta.model.__name__, self.pk)
self.deleted = None
self.save()
return self<|fim▁end|> | """
_unset_related_objects_relations(self) |
<|file_name|>wallet_library.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! The following document is a minimalist version of Diem Wallet. Note that this Wallet does
//! not promote security as the mnemonic is stored in unencrypted form. In future iterations,
//! we will be releasing more robust Wallet implementations. It is our intention to present a
//! foundation that is simple to understand and incrementally improve the DiemWallet
//! implementation and it's security guarantees throughout testnet. For a more robust wallet
//! reference, the authors suggest to audit the file of the same name in the rust-wallet crate.
//! That file can be found here:
//!
//! https://github.com/rust-bitcoin/rust-wallet/blob/master/wallet/src/walletlibrary.rs
use crate::{
error::WalletError,
io_utils,
key_factory::{ChildNumber, KeyFactory, Seed},
mnemonic::Mnemonic,
};
use anyhow::Result;
use diem_crypto::ed25519::Ed25519PrivateKey;
use diem_types::{
account_address::AccountAddress,
transaction::{
authenticator::AuthenticationKey, helpers::TransactionSigner, RawTransaction,
SignedTransaction,
},
};
use rand::{rngs::OsRng, Rng};
use std::{collections::HashMap, path::Path};
/// WalletLibrary contains all the information needed to recreate a particular wallet
pub struct WalletLibrary {
mnemonic: Mnemonic,
key_factory: KeyFactory,
addr_map: HashMap<AccountAddress, ChildNumber>,
key_leaf: ChildNumber,
}
impl WalletLibrary {
/// Constructor that generates a Mnemonic from OS randomness and subsequently instantiates an
/// empty WalletLibrary from that Mnemonic
#[allow(clippy::new_without_default)]
pub fn new() -> Self {
let mut rng = OsRng;
let data: [u8; 32] = rng.gen();
let mnemonic = Mnemonic::mnemonic(&data).unwrap();
Self::new_from_mnemonic(mnemonic)
}
/// Constructor that instantiates a new WalletLibrary from Mnemonic
pub fn new_from_mnemonic(mnemonic: Mnemonic) -> Self {
let seed = Seed::new(&mnemonic, "DIEM");
WalletLibrary {
mnemonic,
key_factory: KeyFactory::new(&seed).unwrap(),
addr_map: HashMap::new(),
key_leaf: ChildNumber(0),
}
}
/// Function that returns the string representation of the WalletLibrary Mnemonic
/// NOTE: This is not secure, and in general the mnemonic should be stored in encrypted format
pub fn mnemonic(&self) -> String {
self.mnemonic.to_string()
}
/// Function that writes the wallet Mnemonic to file
/// NOTE: This is not secure, and in general the Mnemonic would need to be decrypted before it
/// can be written to file; otherwise the encrypted Mnemonic should be written to file
pub fn write_recovery(&self, output_file_path: &Path) -> Result<()> {
io_utils::write_recovery(&self, &output_file_path)?;
Ok(())
}
/// Recover wallet from input_file_path
pub fn recover(input_file_path: &Path) -> Result<WalletLibrary> {
io_utils::recover(&input_file_path)
}
/// Get the current ChildNumber in u64 format
pub fn key_leaf(&self) -> u64 {
self.key_leaf.0
}
/// Function that iterates from the current key_leaf until the supplied depth
pub fn generate_addresses(&mut self, depth: u64) -> Result<()> {
let current = self.key_leaf.0;
if current > depth {<|fim▁hole|> "Addresses already generated up to the supplied depth".to_string(),
)
.into());
}
while self.key_leaf != ChildNumber(depth) {
let _ = self.new_address();
}
Ok(())
}
/// Function that allows to get the address of a particular key at a certain ChildNumber
pub fn new_address_at_child_number(
&mut self,
child_number: ChildNumber,
) -> Result<AccountAddress> {
let child = self.key_factory.private_child(child_number)?;
Ok(child.get_address())
}
/// Function that generates a new key and adds it to the addr_map and subsequently returns the
/// AuthenticationKey associated to the PrivateKey, along with it's ChildNumber
pub fn new_address(&mut self) -> Result<(AuthenticationKey, ChildNumber)> {
let child = self.key_factory.private_child(self.key_leaf)?;
let authentication_key = child.get_authentication_key();
let old_key_leaf = self.key_leaf;
self.key_leaf.increment();
if self
.addr_map
.insert(authentication_key.derived_address(), old_key_leaf)
.is_none()
{
Ok((authentication_key, old_key_leaf))
} else {
Err(WalletError::DiemWalletGeneric(
"This address is already in your wallet".to_string(),
)
.into())
}
}
/// Returns a list of all addresses controlled by this wallet that are currently held by the
/// addr_map
pub fn get_addresses(&self) -> Result<Vec<AccountAddress>> {
let mut ret = Vec::with_capacity(self.addr_map.len());
let rev_map = self
.addr_map
.iter()
.map(|(&k, &v)| (v.as_ref().to_owned(), k.to_owned()))
.collect::<HashMap<_, _>>();
for i in 0..self.addr_map.len() as u64 {
match rev_map.get(&i) {
Some(account_address) => {
ret.push(*account_address);
}
None => {
return Err(WalletError::DiemWalletGeneric(format!(
"Child num {} not exist while depth is {}",
i,
self.addr_map.len()
))
.into())
}
}
}
Ok(ret)
}
/// Simple public function that allows to sign a Diem RawTransaction with the PrivateKey
/// associated to a particular AccountAddress. If the PrivateKey associated to an
/// AccountAddress is not contained in the addr_map, then this function will return an Error
pub fn sign_txn(&self, txn: RawTransaction) -> Result<SignedTransaction> {
if let Some(child) = self.addr_map.get(&txn.sender()) {
let child_key = self.key_factory.private_child(*child)?;
let signature = child_key.sign(&txn);
Ok(SignedTransaction::new(
txn,
child_key.get_public(),
signature,
))
} else {
Err(WalletError::DiemWalletGeneric(
"Well, that address is nowhere to be found... This is awkward".to_string(),
)
.into())
}
}
/// Return private key for an address in the wallet
pub fn get_private_key(&self, address: &AccountAddress) -> Result<Ed25519PrivateKey> {
if let Some(child) = self.addr_map.get(&address) {
Ok(self.key_factory.private_child(*child)?.get_private_key())
} else {
Err(WalletError::DiemWalletGeneric("missing address".to_string()).into())
}
}
/// Return authentication key (AuthenticationKey) for an address in the wallet
pub fn get_authentication_key(&self, address: &AccountAddress) -> Result<AuthenticationKey> {
if let Some(child) = self.addr_map.get(&address) {
Ok(self
.key_factory
.private_child(*child)?
.get_authentication_key())
} else {
Err(WalletError::DiemWalletGeneric("missing address".to_string()).into())
}
}
}
/// WalletLibrary naturally support TransactionSigner trait.
impl TransactionSigner for WalletLibrary {
fn sign_txn(&self, raw_txn: RawTransaction) -> Result<SignedTransaction, anyhow::Error> {
Ok(self.sign_txn(raw_txn)?)
}
}<|fim▁end|> | return Err(WalletError::DiemWalletGeneric( |
<|file_name|>core.rs<|end_file_name|><|fim▁begin|>use std::ffi::OsString;
use std::fs;
use std::fs::Permissions;
use std::io;
use std::io::Stderr;
use std::io::Stdin;
use std::io::Stdout;
use std::io::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::PathBuf;
use structopt::StructOpt;
use super::Error;
use super::Result;
enum Context {
/// The path is absolute (has root).
Absolute,
/// The `interactive` option is present.
Interactive,
}
#[derive(Debug, StructOpt)]
#[structopt(about = "Recursively change the mode of files or directories.")]
struct Options {
/// The mode to use for files.
#[structopt(short = "f", long = "file")]
file: Option<String>,
/// The mode to use for directories.
#[structopt(short = "d", long = "dir")]
dir: Option<String>,
/// Do not overwrite any files (verbose).
#[structopt(short = "D", long = "dry-run")]
dry_run: bool,
/// Prompt before overwriting each file.
#[structopt(short = "i", long = "interactive")]
interactive: bool,
/// Suppress all interaction.
#[structopt(short = "s", long = "suppress")]
suppress: bool,
/// Explain what's being done.
#[structopt(short = "V", long = "verbose")]
verbose: bool,
/// Show this message.
#[structopt(short = "h", long = "help")]
help: bool,
/// Show the version.
#[structopt(short = "v", long = "version")]
version: bool,
/// The paths to be modified by this tool.
#[structopt(name = "PATHS", parse(from_str))]
paths: Vec<PathBuf>,
}
pub struct Chmodrt {
options: Options,
stderr: Stderr,
stdout: Stdout,
stdin: Stdin,
}
impl Chmodrt {
/// Constructs this program from an iterable of arguments.
pub fn from_iter<I>(iter: I) -> Result<Self>
where
Self: Sized,
I: IntoIterator,
I::Item: Into<OsString> + Clone,
{
return Ok(
Self {
options: Options::from_iter_safe(iter)?,
stderr: io::stderr(),
stdout: io::stdout(),
stdin: io::stdin(),
}
);
}
/// Replaces the standard error stream for this program.
pub fn stderr(&mut self, stderr: Stderr) -> &mut Self {
self.stderr = stderr;
return self;
}
/// Replaces the standard output stream for this program.
pub fn stdout(&mut self, stdout: Stdout) -> &mut Self {
self.stdout = stdout;
return self;
}
/// Replaces the standard input stream for this program.
pub fn stdin(&mut self, stdin: Stdin) -> &mut Self {
self.stdin = stdin;
return self;
}
/// Runs this program and writes all errors.
pub fn run(&mut self) -> Result<()> {
match self.run_inner() {
Ok(val) => {
return Ok(val);
},
Err(err) => {
writeln!(self.stderr, "Error: {}", err)?;
return Err(err);
},
}
}
/// Runs this program.
fn run_inner(&mut self) -> Result<()> {
// Write the help or version message
if self.options.help {
return self.help();
}
if self.options.version {
return self.version();
}
// Validate the options
self.validate()?;
// Handle the paths
return self.change();
}
/// Validates the options.
fn validate(&self) -> Result<()> {
return if {
self.options.interactive && self.options.suppress ||
self.has_file() && self.has_dir()
} {
Err(Error::Conflict)
} else if {
!self.has_file() &&
!self.has_dir()
} {
Err(Error::Missing)
} else {
Ok(())
};
}
/// Writes the help message to the standard error stream.
fn help(&mut self) -> Result<()> {
Options::clap().write_help(&mut self.stderr)?;
writeln!(self.stderr, "")?;
return Ok(());
}
/// Writes the version message to the standard error stream.
fn version(&mut self) -> Result<()> {
Options::clap().write_version(&mut self.stderr)?;
writeln!(self.stderr, "")?;
return Ok(());
}
/// Authorizes directory and file access by prompting the user and reading
/// from the standard input stream.
fn auth(&mut self, path: &PathBuf, context: Context) -> Result<bool> {
// Determine the appropriate prompt
let prompt = match context {
Context::Absolute => "is absolute",
Context::Interactive => "mode will be changed",
};
let mut input = String::new();
loop {
// Prompt the user and normalize the input
write!(self.stderr, r#""{}" {} - continue? [y/n] "#, path.display(), prompt)?;
self.stdin.read_line(&mut input)?;
// The response must be `y` or `n`
match input.trim().to_lowercase().as_str() {
"n" => {
if self.options.verbose {
writeln!(self.stderr, "Skipped.")?;
}
return Ok(false);
},
"y" => {
return Ok(true);
},
_ => {
input.clear();
},
}
}
}
/// Changes all paths provided by the user. Authorization may be requested
/// if the `suppress` option is not present.
fn change(&mut self) -> Result<()> {
let mode = u32::from_str_radix(
if self.has_file() {
self.options.file.as_ref().unwrap()
} else if self.has_dir() {
self.options.dir.as_ref().unwrap()
} else {
""
},
8,
)?;
for path in self.options.paths.to_owned() {
if !self.options.suppress && path.has_root() {
// Authorize absolute paths (optional)
if let Ok(false) = self.auth(&path, Context::Absolute) {
continue;
}
}
if path.is_file() {
// The path is a file
if self.has_file() {
self.change_one(&path, mode)?;
}
} else {
// Try the path as a directory
self.change_many(&path, mode)?;
}
}
return Ok(());
}
/// Changes all entries under the given directory and writes all errors.
fn change_many(&mut self, path: &PathBuf, mode: u32) -> Result<()> {
return if let Err(err) = self.change_many_inner(path, mode) {
self.write_error("Cannot access", path, &err)
} else {
Ok(())
};
}
/// Changes all entries under the given directory.
fn change_many_inner(&mut self, path: &PathBuf, mode: u32) -> Result<()> {
if self.has_dir() {
self.change_one(path, mode)?;
}
for entry in path.read_dir()? {
let path = entry?.path();
// Recurse if the entry is a directory
if path.is_file() {
if self.has_file() {
self.change_one(&path, mode)?;
}
} else {
self.change_many(&path, mode)?;
}
}
return Ok(());
}
/// Changes the mode of the given path and writes all errors.
fn change_one(&mut self, path: &PathBuf, mode: u32) -> Result<()> {
return if let Err(err) = self.change_one_inner(path, mode) {
self.write_error("Cannot change permissions", path, &err)
} else {
Ok(())
};
}
/// Changes the mode of the given path. Authorization may be requested and
/// additional information may be written if the `interactive` and
/// `verbose` options are present. The path will not be changed during a
/// `dry-run`.
fn change_one_inner(&mut self, path: &PathBuf, mode: u32) -> Result<()> {
if self.options.interactive && !self.options.suppress {
// Authorize every path (optional)
if let Ok(false) = self.auth(path, Context::Interactive) {
return Ok(());
}
}
if !self.options.dry_run {
// Change the mode of the path
fs::set_permissions(path, Permissions::from_mode(mode))?;
if self.options.verbose {
// Write the results (optional)
self.write_result("changed", path)?;
}
} else {
// Perform a dry run (optional)
self.write_result("will be changed", path)?;
}
return Ok(());
}
/// Determines if the file option is present.
fn has_file(&self) -> bool {
return self.options.file.is_some();
}
/// Determines if the directory option is present.
fn has_dir(&self) -> bool {
return self.options.dir.is_some();
}
/// Writes a path related error to the standard error stream.
fn write_error(&mut self, msg: &str, path: &PathBuf, err: &Error) -> Result<()> {
writeln!(self.stderr, r#"Error: {} "{}": {}"#, msg, path.display(), err)?;
return Ok(());
}
/// Writes the result of an operation to the standard output stream.
fn write_result(&mut self, msg: &str, path: &PathBuf) -> Result<()> {<|fim▁hole|> return Ok(());
}
}<|fim▁end|> | writeln!(self.stdout, r#""{}": mode {}."#, path.display(), msg)?; |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>//! Implementations of `ProtobufType` for all types.
#![doc(hidden)]
use std::marker;
#[cfg(feature = "bytes")]
use ::bytes::Bytes;
#[cfg(feature = "bytes")]
use crate::chars::Chars;
use crate::coded_input_stream::CodedInputStream;
use crate::coded_output_stream::CodedOutputStream;
use crate::enums::Enum;
use crate::error::Result;
pub use crate::reflect::type_dynamic::ProtobufTypeDynamic;
use crate::reflect::type_dynamic::ProtobufTypeDynamicImpl;
use crate::reflect::ProtobufValue;
use crate::rt;
use crate::unknown::UnknownValues;
use crate::wire_format::WireType;
use crate::zigzag::decode_zig_zag_32;
use crate::zigzag::decode_zig_zag_64;
use crate::EnumOrUnknown;
use crate::Message;
/// Encapsulate type-specific serialization and conversion logic
pub trait ProtobufType: Send + Sync + Clone + Sized + 'static {
/// Rust type for this protobuf type.
type ProtobufValue: Default;
/// Dynamic version of this
fn dynamic() -> &'static dyn ProtobufTypeDynamic
where
Self::ProtobufValue: ProtobufValue,
{
&ProtobufTypeDynamicImpl::<Self>(marker::PhantomData)
}
/// Wire type for encoding objects of this type
const WIRE_TYPE: WireType;
/// Read a value from `CodedInputStream`
fn read(is: &mut CodedInputStream) -> Result<Self::ProtobufValue>;
/// Take a value from `UnknownValues`
fn get_from_unknown(_unknown_values: &UnknownValues) -> Option<Self::ProtobufValue>;
/// Compute serialized size of a value
fn compute_size(value: &Self::ProtobufValue) -> u64;
/// Compute size adding length prefix if wire type is length delimited
/// (i. e. string, bytes, message)
fn compute_size_with_length_delimiter(value: &Self::ProtobufValue) -> u64 {
let size = Self::compute_size(value);
if Self::WIRE_TYPE == WireType::LengthDelimited {
rt::compute_raw_varint64_size(size) + size
} else {
size
}
}
/// Get previously computed size
#[inline]
fn get_cached_size(value: &Self::ProtobufValue) -> u32 {
Self::compute_size(value) as u32
}
/// Get previously cached size with length prefix
#[inline]
fn get_cached_size_with_length_delimiter(value: &Self::ProtobufValue) -> u32 {
let size = Self::get_cached_size(value);
if Self::WIRE_TYPE == WireType::LengthDelimited {
rt::compute_raw_varint32_size(size) as u32 + size
} else {
size
}
}
/// Write a value with previously cached size
fn write_with_cached_size(
field_number: u32,
value: &Self::ProtobufValue,
os: &mut CodedOutputStream,
) -> Result<()>;
}
/// All fixed size types
pub trait ProtobufTypeFixed: ProtobufType {
/// Encoded size of value in bytes of this type.
///
/// E. g. it is `4` for `fixed32`
const ENCODED_SIZE: u32;
}
/// `float`
#[derive(Copy, Clone)]
pub struct ProtobufTypeFloat;
/// `double`
#[derive(Copy, Clone)]
pub struct ProtobufTypeDouble;
/// `int32`
#[derive(Copy, Clone)]
pub struct ProtobufTypeInt32;
/// `int64`
#[derive(Copy, Clone)]
pub struct ProtobufTypeInt64;
/// `uint32`
#[derive(Copy, Clone)]
pub struct ProtobufTypeUint32;
/// `uint64`
#[derive(Copy, Clone)]
pub struct ProtobufTypeUint64;
/// `sint32`
#[derive(Copy, Clone)]
pub struct ProtobufTypeSint32;
/// `sint64`
#[derive(Copy, Clone)]
pub struct ProtobufTypeSint64;
/// `fixed32`
#[derive(Copy, Clone)]
pub struct ProtobufTypeFixed32;
/// `fixed64`
#[derive(Copy, Clone)]
pub struct ProtobufTypeFixed64;
/// `sfixed32`
#[derive(Copy, Clone)]
pub struct ProtobufTypeSfixed32;
/// `sfixed64`
#[derive(Copy, Clone)]
pub struct ProtobufTypeSfixed64;
/// `bool`
#[derive(Copy, Clone)]
pub struct ProtobufTypeBool;
/// `string`
#[derive(Copy, Clone)]
pub struct ProtobufTypeString;
/// `bytes`
#[derive(Copy, Clone)]
pub struct ProtobufTypeBytes;
/// `bytes` as [`Bytes`](bytes::Bytes)
#[cfg(feature = "bytes")]
#[derive(Copy, Clone)]
pub struct ProtobufTypeTokioBytes;
/// `string` as [`Chars`](crate::Chars)
#[cfg(feature = "bytes")]
#[derive(Copy, Clone)]
pub struct ProtobufTypeTokioChars;
/// `enum` as `ProtobufEnumOrUnknown`
#[derive(Copy, Clone)]
pub struct ProtobufTypeEnumOrUnknown<E: Enum>(marker::PhantomData<E>);
/// `message`
#[derive(Copy, Clone)]
pub struct ProtobufTypeMessage<M: Message>(marker::PhantomData<M>);
impl ProtobufType for ProtobufTypeFloat {
type ProtobufValue = f32;
const WIRE_TYPE: WireType = WireType::Fixed32;
fn read(is: &mut CodedInputStream) -> Result<f32> {
is.read_float()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<f32> {
unknown_values
.fixed32
.iter()
.rev()
.next()
.map(|&bits| f32::from_bits(bits))
}
fn compute_size(_value: &f32) -> u64 {
Self::ENCODED_SIZE as u64
}
fn write_with_cached_size(
field_number: u32,
value: &f32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_float(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeFloat {
const ENCODED_SIZE: u32 = 4;
}
impl ProtobufType for ProtobufTypeDouble {
type ProtobufValue = f64;
const WIRE_TYPE: WireType = WireType::Fixed64;
fn read(is: &mut CodedInputStream) -> Result<f64> {
is.read_double()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<f64> {
unknown_values
.fixed64
.iter()
.rev()
.next()
.map(|&bits| f64::from_bits(bits))
}
fn compute_size(_value: &f64) -> u64 {
Self::ENCODED_SIZE as u64
}
fn write_with_cached_size(
field_number: u32,
value: &f64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_double(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeDouble {
const ENCODED_SIZE: u32 = 8;
}
impl ProtobufType for ProtobufTypeInt32 {
type ProtobufValue = i32;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<i32> {
is.read_int32()
}
fn compute_size(value: &i32) -> u64 {
// See also: https://github.com/protocolbuffers/protobuf/blob/bd00671b924310c0353a730bf8fa77c44e0a9c72/src/google/protobuf/io/coded_stream.h#L1300-L1306
if *value < 0 {
return 10;
}
rt::compute_raw_varint32_size(*value as u32)
}
fn write_with_cached_size(
field_number: u32,
value: &i32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_int32(field_number, *value)
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i32> {
unknown_values.varint.iter().rev().next().map(|&v| v as i32)
}
}
impl ProtobufType for ProtobufTypeInt64 {
type ProtobufValue = i64;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<i64> {
is.read_int64()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i64> {
unknown_values.varint.iter().rev().next().map(|&v| v as i64)
}
fn compute_size(value: &i64) -> u64 {
rt::compute_raw_varint64_size(*value as u64)
}
fn write_with_cached_size(
field_number: u32,
value: &i64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_int64(field_number, *value)
}
}
impl ProtobufType for ProtobufTypeUint32 {
type ProtobufValue = u32;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<u32> {
is.read_uint32()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<u32> {
unknown_values.varint.iter().rev().next().map(|&v| v as u32)
}
fn compute_size(value: &u32) -> u64 {
rt::compute_raw_varint32_size(*value)
}
fn write_with_cached_size(
field_number: u32,
value: &u32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_uint32(field_number, *value)
}
}
impl ProtobufType for ProtobufTypeUint64 {
type ProtobufValue = u64;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<u64> {
is.read_uint64()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<u64> {
unknown_values.varint.iter().cloned().rev().next()
}
fn compute_size(value: &u64) -> u64 {
rt::compute_raw_varint64_size(*value)
}
fn write_with_cached_size(
field_number: u32,
value: &u64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_uint64(field_number, *value)
}
}
impl ProtobufType for ProtobufTypeSint32 {
type ProtobufValue = i32;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<i32> {
is.read_sint32()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i32> {
ProtobufTypeUint32::get_from_unknown(unknown_values).map(decode_zig_zag_32)
}
fn compute_size(value: &i32) -> u64 {
rt::value_varint_zigzag_size_no_tag(*value)
}
fn write_with_cached_size(
field_number: u32,
value: &i32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_sint32(field_number, *value)
}
}
impl ProtobufType for ProtobufTypeSint64 {
type ProtobufValue = i64;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<i64> {
is.read_sint64()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i64> {
ProtobufTypeUint64::get_from_unknown(unknown_values).map(decode_zig_zag_64)
}
fn compute_size(value: &i64) -> u64 {
rt::value_varint_zigzag_size_no_tag(*value)
}
fn write_with_cached_size(
field_number: u32,
value: &i64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_sint64(field_number, *value)
}
}
<|fim▁hole|> const WIRE_TYPE: WireType = WireType::Fixed32;
fn read(is: &mut CodedInputStream) -> Result<u32> {
is.read_fixed32()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<u32> {
unknown_values.fixed32.iter().cloned().rev().next()
}
fn compute_size(_value: &u32) -> u64 {
Self::ENCODED_SIZE as u64
}
fn write_with_cached_size(
field_number: u32,
value: &u32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_fixed32(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeFixed32 {
const ENCODED_SIZE: u32 = 4;
}
impl ProtobufType for ProtobufTypeFixed64 {
type ProtobufValue = u64;
const WIRE_TYPE: WireType = WireType::Fixed64;
fn read(is: &mut CodedInputStream) -> Result<u64> {
is.read_fixed64()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<u64> {
unknown_values.fixed64.iter().cloned().rev().next()
}
fn compute_size(_value: &u64) -> u64 {
Self::ENCODED_SIZE as u64
}
fn write_with_cached_size(
field_number: u32,
value: &u64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_fixed64(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeFixed64 {
const ENCODED_SIZE: u32 = 8;
}
impl ProtobufType for ProtobufTypeSfixed32 {
type ProtobufValue = i32;
const WIRE_TYPE: WireType = WireType::Fixed32;
fn read(is: &mut CodedInputStream) -> Result<i32> {
is.read_sfixed32()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i32> {
ProtobufTypeFixed32::get_from_unknown(unknown_values).map(|u| u as i32)
}
fn compute_size(_value: &i32) -> u64 {
Self::ENCODED_SIZE as u64
}
fn write_with_cached_size(
field_number: u32,
value: &i32,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_sfixed32(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeSfixed32 {
const ENCODED_SIZE: u32 = 4;
}
impl ProtobufType for ProtobufTypeSfixed64 {
type ProtobufValue = i64;
const WIRE_TYPE: WireType = WireType::Fixed64;
fn read(is: &mut CodedInputStream) -> Result<i64> {
is.read_sfixed64()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<i64> {
ProtobufTypeFixed64::get_from_unknown(unknown_values).map(|u| u as i64)
}
fn compute_size(_value: &i64) -> u64 {
8
}
fn write_with_cached_size(
field_number: u32,
value: &i64,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_sfixed64(field_number, *value)
}
}
impl ProtobufTypeFixed for ProtobufTypeSfixed64 {
const ENCODED_SIZE: u32 = 8;
}
impl ProtobufType for ProtobufTypeBool {
type ProtobufValue = bool;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<bool> {
is.read_bool()
}
fn get_from_unknown(unknown: &UnknownValues) -> Option<bool> {
unknown.varint.iter().rev().next().map(|&v| v != 0)
}
fn compute_size(_value: &bool) -> u64 {
1
}
fn write_with_cached_size(
field_number: u32,
value: &bool,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_bool(field_number, *value)
}
}
impl ProtobufType for ProtobufTypeString {
type ProtobufValue = String;
const WIRE_TYPE: WireType = WireType::LengthDelimited;
fn read(is: &mut CodedInputStream) -> Result<String> {
is.read_string()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<String> {
// TODO: should not panic
ProtobufTypeBytes::get_from_unknown(unknown_values)
.map(|b| String::from_utf8(b).expect("not a valid string"))
}
fn compute_size(value: &String) -> u64 {
value.len() as u64
}
fn write_with_cached_size(
field_number: u32,
value: &String,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_string(field_number, &value)
}
}
impl ProtobufType for ProtobufTypeBytes {
type ProtobufValue = Vec<u8>;
const WIRE_TYPE: WireType = WireType::LengthDelimited;
fn read(is: &mut CodedInputStream) -> Result<Vec<u8>> {
is.read_bytes()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<Vec<u8>> {
unknown_values.length_delimited.iter().cloned().rev().next()
}
fn compute_size(value: &Vec<u8>) -> u64 {
value.len() as u64
}
fn write_with_cached_size(
field_number: u32,
value: &Vec<u8>,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_bytes(field_number, &value)
}
}
#[cfg(feature = "bytes")]
impl ProtobufType for ProtobufTypeTokioBytes {
type ProtobufValue = bytes::Bytes;
const WIRE_TYPE: WireType = ProtobufTypeBytes::WIRE_TYPE;
fn read(is: &mut CodedInputStream) -> Result<Bytes> {
is.read_tokio_bytes()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<Bytes> {
ProtobufTypeBytes::get_from_unknown(unknown_values).map(Bytes::from)
}
fn compute_size(value: &Bytes) -> u64 {
value.len() as u64
}
fn write_with_cached_size(
field_number: u32,
value: &Bytes,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_bytes(field_number, &value)
}
}
#[cfg(feature = "bytes")]
impl ProtobufType for ProtobufTypeTokioChars {
type ProtobufValue = Chars;
const WIRE_TYPE: WireType = ProtobufTypeBytes::WIRE_TYPE;
fn read(is: &mut CodedInputStream) -> Result<Chars> {
is.read_tokio_chars()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<Chars> {
ProtobufTypeString::get_from_unknown(unknown_values).map(Chars::from)
}
fn compute_size(value: &Chars) -> u64 {
value.len() as u64
}
fn write_with_cached_size(
field_number: u32,
value: &Chars,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_string(field_number, &value)
}
}
impl<E: Enum> ProtobufType for ProtobufTypeEnumOrUnknown<E> {
type ProtobufValue = EnumOrUnknown<E>;
const WIRE_TYPE: WireType = WireType::Varint;
fn read(is: &mut CodedInputStream) -> Result<EnumOrUnknown<E>> {
is.read_enum_or_unknown()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<EnumOrUnknown<E>> {
ProtobufTypeInt32::get_from_unknown(unknown_values).map(|i| EnumOrUnknown::from_i32(i))
}
fn compute_size(value: &EnumOrUnknown<E>) -> u64 {
rt::compute_raw_varint32_size(value.value() as u32) // TODO: wrap
}
fn write_with_cached_size(
field_number: u32,
value: &EnumOrUnknown<E>,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_enum_or_unknown(field_number, *value)
}
}
impl<M: Message + Clone + Default> ProtobufType for ProtobufTypeMessage<M> {
type ProtobufValue = M;
const WIRE_TYPE: WireType = WireType::LengthDelimited;
fn read(is: &mut CodedInputStream) -> Result<M> {
is.read_message()
}
fn get_from_unknown(unknown_values: &UnknownValues) -> Option<M> {
// TODO: do not panic
unknown_values
.length_delimited
.iter()
.rev()
.next()
.map(|bytes| M::parse_from_bytes(bytes).expect("cannot parse message"))
}
fn compute_size(value: &M) -> u64 {
value.compute_size()
}
fn get_cached_size(value: &M) -> u32 {
value.cached_size()
}
fn write_with_cached_size(
field_number: u32,
value: &Self::ProtobufValue,
os: &mut CodedOutputStream,
) -> Result<()> {
os.write_tag(field_number, WireType::LengthDelimited)?;
os.write_raw_varint32(value.cached_size())?;
value.write_to_with_cached_sizes(os)?;
Ok(())
}
}<|fim▁end|> | impl ProtobufType for ProtobufTypeFixed32 {
type ProtobufValue = u32;
|
<|file_name|>test_types.py<|end_file_name|><|fim▁begin|># Python test set -- part 6, built-in types
from test_support import *
print '6. Built-in types'
print '6.1 Truth value testing'
if None: raise TestFailed, 'None is true instead of false'
if 0: raise TestFailed, '0 is true instead of false'
if 0L: raise TestFailed, '0L is true instead of false'
if 0.0: raise TestFailed, '0.0 is true instead of false'
if '': raise TestFailed, '\'\' is true instead of false'
if (): raise TestFailed, '() is true instead of false'
if []: raise TestFailed, '[] is true instead of false'
if {}: raise TestFailed, '{} is true instead of false'
if not 1: raise TestFailed, '1 is false instead of true'
if not 1L: raise TestFailed, '1L is false instead of true'
if not 1.0: raise TestFailed, '1.0 is false instead of true'
if not 'x': raise TestFailed, '\'x\' is false instead of true'
if not (1, 1): raise TestFailed, '(1, 1) is false instead of true'
if not [1]: raise TestFailed, '[1] is false instead of true'
if not {'x': 1}: raise TestFailed, '{\'x\': 1} is false instead of true'
def f(): pass
class C: pass
import sys
x = C()
if not f: raise TestFailed, 'f is false instead of true'
if not C: raise TestFailed, 'C is false instead of true'
if not sys: raise TestFailed, 'sys is false instead of true'
if not x: raise TestFailed, 'x is false instead of true'
print '6.2 Boolean operations'
if 0 or 0: raise TestFailed, '0 or 0 is true instead of false'
if 1 and 1: pass
else: raise TestFailed, '1 and 1 is false instead of false'
if not 1: raise TestFailed, 'not 1 is true instead of false'
print '6.3 Comparisons'
if 0 < 1 <= 1 == 1 >= 1 > 0 != 1: pass
else: raise TestFailed, 'int comparisons failed'
if 0L < 1L <= 1L == 1L >= 1L > 0L != 1L: pass
else: raise TestFailed, 'long int comparisons failed'
if 0.0 < 1.0 <= 1.0 == 1.0 >= 1.0 > 0.0 != 1.0: pass
else: raise TestFailed, 'float comparisons failed'
if '' < 'a' <= 'a' == 'a' < 'abc' < 'abd' < 'b': pass
else: raise TestFailed, 'string comparisons failed'
if 0 in [0] and 0 not in [1]: pass
else: raise TestFailed, 'membership test failed'
if None is None and [] is not []: pass
else: raise TestFailed, 'identity test failed'
print '6.4 Numeric types (mostly conversions)'
if 0 != 0L or 0 != 0.0 or 0L != 0.0: raise TestFailed, 'mixed comparisons'
if 1 != 1L or 1 != 1.0 or 1L != 1.0: raise TestFailed, 'mixed comparisons'
if -1 != -1L or -1 != -1.0 or -1L != -1.0:
raise TestFailed, 'int/long/float value not equal'
if int(1.9) == 1 == int(1.1) and int(-1.1) == -1 == int(-1.9): pass
else: raise TestFailed, 'int() does not round properly'
if long(1.9) == 1L == long(1.1) and long(-1.1) == -1L == long(-1.9): pass
else: raise TestFailed, 'long() does not round properly'
if float(1) == 1.0 and float(-1) == -1.0 and float(0) == 0.0: pass
else: raise TestFailed, 'float() does not work properly'
print '6.4.1 32-bit integers'
if 12 + 24 != 36: raise TestFailed, 'int op'
if 12 + (-24) != -12: raise TestFailed, 'int op'
if (-12) + 24 != 12: raise TestFailed, 'int op'
if (-12) + (-24) != -36: raise TestFailed, 'int op'
if not 12 < 24: raise TestFailed, 'int op'
if not -24 < -12: raise TestFailed, 'int op'
# Test for a particular bug in integer multiply
xsize, ysize, zsize = 238, 356, 4
if not (xsize*ysize*zsize == zsize*xsize*ysize == 338912):
raise TestFailed, 'int mul commutativity'
# And another.
m = -sys.maxint - 1
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor
prod = divisor * j
if prod != m:
raise TestFailed, "%r * %r == %r != %r" % (divisor, j, prod, m)
if type(prod) is not int:
raise TestFailed, ("expected type(prod) to be int, not %r" %
type(prod))
# Check for expected * overflow to long.
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor - 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
# Check for expected * overflow to long.
m = sys.maxint
for divisor in 1, 2, 4, 8, 16, 32:
j = m // divisor + 1
prod = divisor * j
if type(prod) is not long:
raise TestFailed, ("expected type(%r) to be long, not %r" %
(prod, type(prod)))
print '6.4.2 Long integers'
if 12L + 24L != 36L: raise TestFailed, 'long op'
if 12L + (-24L) != -12L: raise TestFailed, 'long op'
if (-12L) + 24L != 12L: raise TestFailed, 'long op'
if (-12L) + (-24L) != -36L: raise TestFailed, 'long op'
if not 12L < 24L: raise TestFailed, 'long op'
if not -24L < -12L: raise TestFailed, 'long op'
x = sys.maxint
if int(long(x)) != x: raise TestFailed, 'long op'
try: int(long(x)+1L)
except OverflowError: pass
else:raise TestFailed, 'long op'
x = -x
if int(long(x)) != x: raise TestFailed, 'long op'
x = x-1
if int(long(x)) != x: raise TestFailed, 'long op'
try: int(long(x)-1L)
except OverflowError: pass
else:raise TestFailed, 'long op'
print '6.4.3 Floating point numbers'
if 12.0 + 24.0 != 36.0: raise TestFailed, 'float op'
if 12.0 + (-24.0) != -12.0: raise TestFailed, 'float op'
if (-12.0) + 24.0 != 12.0: raise TestFailed, 'float op'
if (-12.0) + (-24.0) != -36.0: raise TestFailed, 'float op'
if not 12.0 < 24.0: raise TestFailed, 'float op'
if not -24.0 < -12.0: raise TestFailed, 'float op'
print '6.5 Sequence types'
print '6.5.1 Strings'
if len('') != 0: raise TestFailed, 'len(\'\')'
if len('a') != 1: raise TestFailed, 'len(\'a\')'
if len('abcdef') != 6: raise TestFailed, 'len(\'abcdef\')'
if 'xyz' + 'abcde' != 'xyzabcde': raise TestFailed, 'string concatenation'
if 'xyz'*3 != 'xyzxyzxyz': raise TestFailed, 'string repetition *3'
if 0*'abcde' != '': raise TestFailed, 'string repetition 0*'
if min('abc') != 'a' or max('abc') != 'c': raise TestFailed, 'min/max string'
if 'a' in 'abc' and 'b' in 'abc' and 'c' in 'abc' and 'd' not in 'abc': pass
else: raise TestFailed, 'in/not in string'
x = 'x'*103
if '%s!'%x != x+'!': raise TestFailed, 'nasty string formatting bug'
print '6.5.2 Tuples'
if len(()) != 0: raise TestFailed, 'len(())'
if len((1,)) != 1: raise TestFailed, 'len((1,))'
if len((1,2,3,4,5,6)) != 6: raise TestFailed, 'len((1,2,3,4,5,6))'
if (1,2)+(3,4) != (1,2,3,4): raise TestFailed, 'tuple concatenation'
if (1,2)*3 != (1,2,1,2,1,2): raise TestFailed, 'tuple repetition *3'
if 0*(1,2,3) != (): raise TestFailed, 'tuple repetition 0*'
if min((1,2)) != 1 or max((1,2)) != 2: raise TestFailed, 'min/max tuple'
if 0 in (0,1,2) and 1 in (0,1,2) and 2 in (0,1,2) and 3 not in (0,1,2): pass
else: raise TestFailed, 'in/not in tuple'
print '6.5.3 Lists'
if len([]) != 0: raise TestFailed, 'len([])'
if len([1,]) != 1: raise TestFailed, 'len([1,])'
if len([1,2,3,4,5,6]) != 6: raise TestFailed, 'len([1,2,3,4,5,6])'
if [1,2]+[3,4] != [1,2,3,4]: raise TestFailed, 'list concatenation'
if [1,2]*3 != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3'
if [1,2]*3L != [1,2,1,2,1,2]: raise TestFailed, 'list repetition *3L'
if 0*[1,2,3] != []: raise TestFailed, 'list repetition 0*'
if 0L*[1,2,3] != []: raise TestFailed, 'list repetition 0L*'<|fim▁hole|>else: raise TestFailed, 'in/not in list'
a = [1, 2, 3, 4, 5]
a[:-1] = a
if a != [1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (head)"
a = [1, 2, 3, 4, 5]
a[1:] = a
if a != [1, 1, 2, 3, 4, 5]:
raise TestFailed, "list self-slice-assign (tail)"
a = [1, 2, 3, 4, 5]
a[1:-1] = a
if a != [1, 1, 2, 3, 4, 5, 5]:
raise TestFailed, "list self-slice-assign (center)"
print '6.5.3a Additional list operations'
a = [0,1,2,3,4]
a[0L] = 1
a[1L] = 2
a[2L] = 3
if a != [1,2,3,3,4]: raise TestFailed, 'list item assignment [0L], [1L], [2L]'
a[0] = 5
a[1] = 6
a[2] = 7
if a != [5,6,7,3,4]: raise TestFailed, 'list item assignment [0], [1], [2]'
a[-2L] = 88
a[-1L] = 99
if a != [5,6,7,88,99]: raise TestFailed, 'list item assignment [-2L], [-1L]'
a[-2] = 8
a[-1] = 9
if a != [5,6,7,8,9]: raise TestFailed, 'list item assignment [-2], [-1]'
a[:2] = [0,4]
a[-3:] = []
a[1:1] = [1,2,3]
if a != [0,1,2,3,4]: raise TestFailed, 'list slice assignment'
a[ 1L : 4L] = [7,8,9]
if a != [0,7,8,9,4]: raise TestFailed, 'list slice assignment using long ints'
del a[1:4]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1]
if a != []: raise TestFailed, 'list item deletion [-1]'
a=range(0,5)
del a[1L:4L]
if a != [0,4]: raise TestFailed, 'list slice deletion'
del a[0L]
if a != [4]: raise TestFailed, 'list item deletion [0]'
del a[-1L]
if a != []: raise TestFailed, 'list item deletion [-1]'
a.append(0)
a.append(1)
a.append(2)
if a != [0,1,2]: raise TestFailed, 'list append'
a.insert(0, -2)
a.insert(1, -1)
a.insert(2,0)
if a != [-2,-1,0,0,1,2]: raise TestFailed, 'list insert'
if a.count(0) != 2: raise TestFailed, ' list count'
if a.index(0) != 2: raise TestFailed, 'list index'
a.remove(0)
if a != [-2,-1,0,1,2]: raise TestFailed, 'list remove'
a.reverse()
if a != [2,1,0,-1,-2]: raise TestFailed, 'list reverse'
a.sort()
if a != [-2,-1,0,1,2]: raise TestFailed, 'list sort'
def revcmp(a, b): return cmp(b, a)
a.sort(revcmp)
if a != [2,1,0,-1,-2]: raise TestFailed, 'list sort with cmp func'
# The following dumps core in unpatched Python 1.5:
def myComparison(x,y):
return cmp(x%3, y%7)
z = range(12)
z.sort(myComparison)
# Test extreme cases with long ints
a = [0,1,2,3,4]
if a[ -pow(2,128L): 3 ] != [0,1,2]:
raise TestFailed, "list slicing with too-small long integer"
if a[ 3: pow(2,145L) ] != [3,4]:
raise TestFailed, "list slicing with too-large long integer"
print '6.6 Mappings == Dictionaries'
d = {}
if d.keys() != []: raise TestFailed, '{}.keys()'
if d.has_key('a') != 0: raise TestFailed, '{}.has_key(\'a\')'
if ('a' in d) != 0: raise TestFailed, "'a' in {}"
if ('a' not in d) != 1: raise TestFailed, "'a' not in {}"
if len(d) != 0: raise TestFailed, 'len({})'
d = {'a': 1, 'b': 2}
if len(d) != 2: raise TestFailed, 'len(dict)'
k = d.keys()
k.sort()
if k != ['a', 'b']: raise TestFailed, 'dict keys()'
if d.has_key('a') and d.has_key('b') and not d.has_key('c'): pass
else: raise TestFailed, 'dict keys()'
if 'a' in d and 'b' in d and 'c' not in d: pass
else: raise TestFailed, 'dict keys() # in/not in version'
if d['a'] != 1 or d['b'] != 2: raise TestFailed, 'dict item'
d['c'] = 3
d['a'] = 4
if d['c'] != 3 or d['a'] != 4: raise TestFailed, 'dict item assignment'
del d['b']
if d != {'a': 4, 'c': 3}: raise TestFailed, 'dict item deletion'
# dict.clear()
d = {1:1, 2:2, 3:3}
d.clear()
if d != {}: raise TestFailed, 'dict clear'
# dict.update()
d.update({1:100})
d.update({2:20})
d.update({1:1, 2:2, 3:3})
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict update'
d.clear()
try: d.update(None)
except AttributeError: pass
else: raise TestFailed, 'dict.update(None), AttributeError expected'
class SimpleUserDict:
def __init__(self):
self.d = {1:1, 2:2, 3:3}
def keys(self):
return self.d.keys()
def __getitem__(self, i):
return self.d[i]
d.update(SimpleUserDict())
if d != {1:1, 2:2, 3:3}: raise TestFailed, 'dict.update(instance)'
d.clear()
class FailingUserDict:
def keys(self):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.keys() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __iter__(self):
raise ValueError
return BogonIter()
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()) expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = 1
def __iter__(self):
return self
def next(self):
if self.i:
self.i = 0
return 'a'
raise ValueError
return BogonIter()
def __getitem__(self, key):
return key
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'iter(dict.keys()).next() expected ValueError'
class FailingUserDict:
def keys(self):
class BogonIter:
def __init__(self):
self.i = ord('a')
def __iter__(self):
return self
def next(self):
if self.i <= ord('z'):
rtn = chr(self.i)
self.i += 1
return rtn
raise StopIteration
return BogonIter()
def __getitem__(self, key):
raise ValueError
try: d.update(FailingUserDict())
except ValueError: pass
else: raise TestFailed, 'dict.update(), __getitem__ expected ValueError'
# dict.copy()
d = {1:1, 2:2, 3:3}
if d.copy() != {1:1, 2:2, 3:3}: raise TestFailed, 'dict copy'
if {}.copy() != {}: raise TestFailed, 'empty dict copy'
# dict.get()
d = {}
if d.get('c') is not None: raise TestFailed, 'missing {} get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing {} get, w/ 2nd arg'
d = {'a' : 1, 'b' : 2}
if d.get('c') is not None: raise TestFailed, 'missing dict get, no 2nd arg'
if d.get('c', 3) != 3: raise TestFailed, 'missing dict get, w/ 2nd arg'
if d.get('a') != 1: raise TestFailed, 'present dict get, no 2nd arg'
if d.get('a', 3) != 1: raise TestFailed, 'present dict get, w/ 2nd arg'
# dict.setdefault()
d = {}
if d.setdefault('key0') is not None:
raise TestFailed, 'missing {} setdefault, no 2nd arg'
if d.setdefault('key0') is not None:
raise TestFailed, 'present {} setdefault, no 2nd arg'
d.setdefault('key', []).append(3)
if d['key'][0] != 3:
raise TestFailed, 'missing {} setdefault, w/ 2nd arg'
d.setdefault('key', []).append(4)
if len(d['key']) != 2:
raise TestFailed, 'present {} setdefault, w/ 2nd arg'
# dict.popitem()
for copymode in -1, +1:
# -1: b has same structure as a
# +1: b is a.copy()
for log2size in range(12):
size = 2**log2size
a = {}
b = {}
for i in range(size):
a[`i`] = i
if copymode < 0:
b[`i`] = i
if copymode > 0:
b = a.copy()
for i in range(size):
ka, va = ta = a.popitem()
if va != int(ka): raise TestFailed, "a.popitem: %s" % str(ta)
kb, vb = tb = b.popitem()
if vb != int(kb): raise TestFailed, "b.popitem: %s" % str(tb)
if copymode < 0 and ta != tb:
raise TestFailed, "a.popitem != b.popitem: %s, %s" % (
str(ta), str(tb))
if a: raise TestFailed, 'a not empty after popitems: %s' % str(a)
if b: raise TestFailed, 'b not empty after popitems: %s' % str(b)
try: type(1, 2)
except TypeError: pass
else: raise TestFailed, 'type(), w/2 args expected TypeError'
try: type(1, 2, 3, 4)
except TypeError: pass
else: raise TestFailed, 'type(), w/4 args expected TypeError'<|fim▁end|> | if min([1,2]) != 1 or max([1,2]) != 2: raise TestFailed, 'min/max list'
if 0 in [0,1,2] and 1 in [0,1,2] and 2 in [0,1,2] and 3 not in [0,1,2]: pass |
<|file_name|>add_video.py<|end_file_name|><|fim▁begin|># encoding: utf-8
from django.core.management.base import NoArgsCommand
from optparse import make_option
from video.management.commands.sub_commands.AddVideo import AddVideo
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--video-link',action='store',dest='video-link',
help="link to the video, use --list-types to see a list of supported link types"),
make_option('--list-types',action='store_true',dest='list-types',<|fim▁hole|> make_option('--object-type',action='store',dest='object-type',
help="set the object type, currently only member is supported"),
make_option('--object-id',action='store',dest='object-id',
help="set the object id that the video will be related to"),
make_option('--sticky',action='store_true',dest='is_sticky',
help="set the video as sticky"),
)
def handle_noargs(self, **options):
if options.get('list-types',False):
print """Supported link formats:
youtube - http://www.youtube.com/watch?v=2sASREICzqY"""
else:
av=AddVideo(options)
av.run()
print av.ans<|fim▁end|> | help="list supported video link types and formats"), |
<|file_name|>webpack.dev.config.js<|end_file_name|><|fim▁begin|>const path = require('path');
const webpack = require('webpack');
const webpackMerge = require('webpack-merge');
const commonConfig = require('./webpack.common.config.js');
<|fim▁hole|> devtool: 'cheap-module-source-map',
// plugins: [
// new webpack.optimize.CommonsChunkPlugin({
// name: "common",
// })
// ],
devServer: {
contentBase: __dirname + "/public/",
port: 8080,
watchContentBase: true
}
})
};<|fim▁end|> | module.exports = function () {
return webpackMerge(commonConfig, {
watch: true, |
<|file_name|>sct_get_centerline_from_labels.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import commands, sys
# Get path of the toolbox
status, path_sct = commands.getstatusoutput('echo $SCT_DIR')
# Append path that contains scripts, to be able to load modules
sys.path.append(path_sct + '/scripts')
from msct_parser import Parser
from nibabel import load, save, Nifti1Image
import os
import time
import sct_utils as sct
from sct_process_segmentation import extract_centerline
from sct_orientation import get_orientation
# DEFAULT PARAMETERS
class Param:
## The constructor
def __init__(self):
self.debug = 0
self.verbose = 1 # verbose
self.remove_temp_files = 1
self.type_window = 'hanning' # for smooth_centerline @sct_straighten_spinalcord
self.window_length = 80 # for smooth_centerline @sct_straighten_spinalcord
self.algo_fitting = 'nurbs'
# self.parameter = "binary_centerline"
self.list_file = []
self.output_file_name = ''
def main(list_file, param, output_file_name=None, remove_temp_files = 1, verbose = 0):
path, file, ext = sct.extract_fname(list_file[0])
# create temporary folder
path_tmp = 'tmp.'+time.strftime("%y%m%d%H%M%S")
sct.run('mkdir '+path_tmp)
# copy files into tmp folder
sct.printv('\nCopy files into tmp folder...', verbose)
for i in range(len(list_file)):
file_temp = os.path.abspath(list_file[i])
sct.run('cp '+file_temp+' '+path_tmp)
# go to tmp folder
os.chdir(path_tmp)
## Concatenation of the files
# Concatenation : sum of matrices
file_0 = load(file+ext)
data_concatenation = file_0.get_data()
hdr_0 = file_0.get_header()
orientation_file_0 = get_orientation(list_file[0])
if len(list_file)>0:
for i in range(1, len(list_file)):
orientation_file_temp = get_orientation(list_file[i])
if orientation_file_0 != orientation_file_temp :
print "ERROR: The files ", list_file[0], " and ", list_file[i], " are not in the same orientation. Use sct_orientation to change the orientation of a file."
sys.exit(2)<|fim▁hole|> data_temp = file_temp.get_data()
data_concatenation = data_concatenation + data_temp
# Save concatenation as a file
print '\nWrite NIFTI volumes...'
img = Nifti1Image(data_concatenation, None, hdr_0)
save(img,'concatenation_file.nii.gz')
# Applying nurbs to the concatenation and save file as binary file
fname_output = extract_centerline('concatenation_file.nii.gz', remove_temp_files = remove_temp_files, verbose = verbose, algo_fitting=param.algo_fitting, type_window=param.type_window, window_length=param.window_length)
# Rename files after processing
if output_file_name != None:
output_file_name = output_file_name
else : output_file_name = "generated_centerline.nii.gz"
os.rename(fname_output, output_file_name)
path_binary, file_binary, ext_binary = sct.extract_fname(output_file_name)
os.rename('concatenation_file_centerline.txt', file_binary+'.txt')
# Process for a binary file as output:
sct.run('cp '+output_file_name+' ../')
# Process for a text file as output:
sct.run('cp '+file_binary+ '.txt'+ ' ../')
os.chdir('../')
# Remove temporary files
if remove_temp_files:
print('\nRemove temporary files...')
sct.run('rm -rf '+path_tmp)
# Display results
# The concatenate centerline and its fitted curve are displayed whithin extract_centerline
#=======================================================================================================================
# Start program
#=======================================================================================================================
if __name__ == "__main__":
# initialize parameters
# Initialize the parser
parser = Parser(__file__)
parser.usage.set_description('Compute a centerline from a list of segmentation and label files. It concatenates the parts, then extract the centerline. The output is a NIFTI image and a text file with the float coordinates (z, x, y) of the centerline.')
parser.add_option(name="-i",
type_value=[[','],'file'],
description="List containing segmentation NIFTI file and label NIFTI files. They must be 3D. Names must be separated by commas without spaces.",
mandatory=True,
example= "data_seg.nii.gz,label1.nii.gz,label2.nii.gz")
parser.add_option(name="-o",
type_value="file_output",
description="Name of the output NIFTI image with the centerline and of the output text file with the coordinates (z, x, y) (but text file will have '.txt' extension).",
mandatory=False,
default_value='generated_centerline.nii.gz')
parser.add_option(name="-r",
type_value="multiple_choice",
description="Remove temporary files. Specify 0 to get access to temporary files.",
mandatory=False,
example=['0','1'],
default_value="1")
parser.add_option(name="-v",
type_value="multiple_choice",
description="Verbose. 0: nothing. 1: basic. 2: extended.",
mandatory=False,
default_value='0',
example=['0', '1', '2'])
arguments = parser.parse(sys.argv[1:])
remove_temp_files = int(arguments["-r"])
verbose = int(arguments["-v"])
if "-i" in arguments:
list_file = arguments["-i"]
else: list_file = None
if "-o" in arguments:
output_file_name = arguments["-o"]
else: output_file_name = None
param = Param()
param.verbose = verbose
param.remove_temp_files =remove_temp_files
main(list_file, param, output_file_name, remove_temp_files, verbose)<|fim▁end|> | file_temp = load(list_file[i]) |
<|file_name|>bool.go<|end_file_name|><|fim▁begin|>package functools
//All returns false if any predicate is false. All() is True.
func All(bools ...bool) bool {
for _, b := range bools {
if !b {
return false
}
}
return true
}
//Any returns true if any predicate is true. Any() is False.
func Any(bools ...bool) bool {
for _, b := range bools {
if b {
return true
}
}
return false
}
//None returns true if all predicates are false. None() is True.
func None(bools ...bool) bool {
for _, b := range bools {
if b {
return false
}<|fim▁hole|><|fim▁end|> | }
return true
} |
<|file_name|>mem.go<|end_file_name|><|fim▁begin|>/*
* MinIO Cloud Storage, (C) 2019 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mem<|fim▁hole|> "time"
)
// historicUsage holds the rolling average of memory used by
// minio server
var historicUsage *Usage
// memUsageMeasureInterval is the window of time between
// two measurements of memory usage
const memUsageMeasureInterval = 5 * time.Second
// triggers the collection of historic stats about the memory
// utilized by minio server
func init() {
historicUsage = &Usage{}
var cycles uint64
go func() {
for {
time.Sleep(memUsageMeasureInterval)
currUsage := GetUsage()
currSum := cycles * historicUsage.Mem
cycles = cycles + 1
historicUsage.Mem = (currSum + currUsage.Mem) / cycles
}
}()
}
// Usage holds memory utilization information in human readable format
type Usage struct {
Mem uint64 `json:"mem"`
Error string `json:"error,omitempty"`
}
// GetHistoricUsage measures the historic average of memory utilized by
// current process
func GetHistoricUsage() Usage {
return *historicUsage
}
// GetUsage measures the total memory provisioned for the current process
// from the OS
func GetUsage() Usage {
memStats := new(runtime.MemStats)
runtime.ReadMemStats(memStats)
return Usage{
Mem: memStats.Sys,
}
}<|fim▁end|> |
import (
"runtime" |
<|file_name|>cloudresourcemanager_v3_generated_folders_delete_folder_async.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for DeleteFolder
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-resourcemanager
# [START cloudresourcemanager_v3_generated_Folders_DeleteFolder_async]
from google.cloud import resourcemanager_v3
async def sample_delete_folder():
# Create a client<|fim▁hole|> name="name_value",
)
# Make the request
operation = client.delete_folder(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END cloudresourcemanager_v3_generated_Folders_DeleteFolder_async]<|fim▁end|> | client = resourcemanager_v3.FoldersAsyncClient()
# Initialize request argument(s)
request = resourcemanager_v3.DeleteFolderRequest( |
<|file_name|>rack.js<|end_file_name|><|fim▁begin|>var hat = require('../');
var assert = require('assert');
exports.rack = function () {
var rack = hat.rack(4);
var seen = {};
for (var i = 0; i < 8; i++) {
var id = rack();
assert.ok(!seen[id], 'seen this id');
seen[id] = true;
assert.ok(id.match(/^[0-9a-f]$/));
}
assert.throws(function () {
for (var i = 0; i < 10; i++) rack()
});
};
exports.data = function () {
var rack = hat.rack(64);
var a = rack('a!');
var b = rack("it's a b!")
var c = rack([ 'c', 'c', 'c' ]);
assert.equal(rack.get(a), 'a!');
assert.equal(rack.get(b), "it's a b!");
assert.deepEqual(rack.get(c), [ 'c', 'c', 'c' ]);
assert.equal(rack.hats[a], 'a!');
assert.equal(rack.hats[b], "it's a b!");
assert.deepEqual(rack.hats[c], [ 'c', 'c', 'c' ]);
rack.set(a, 'AAA');<|fim▁hole|> assert.equal(rack.get(a), 'AAA');
};
exports.expandBy = function () {
var rack = hat.rack(4, 16, 4);
var seen = {};
for (var i = 0; i < 8; i++) {
var id = rack();
assert.ok(!seen[id], 'seen this id');
seen[id] = true;
assert.ok(id.match(/^[0-9a-f]$/));
}
for (var i = 0; i < 8; i++) {
var id = rack();
assert.ok(!seen[id], 'seen this id');
seen[id] = true;
assert.ok(id.match(/^[0-9a-f]{1,2}$/));
}
for (var i = 0; i < 8; i++) {
var id = rack();
assert.ok(!seen[id], 'seen this id');
seen[id] = true;
assert.ok(id.match(/^[0-9a-f]{2}$/));
}
};<|fim▁end|> | |
<|file_name|>defs.py<|end_file_name|><|fim▁begin|>"""
x86 definitions.
Commonly used definitions.
"""
from __future__ import absolute_import
from cdsl.isa import TargetISA, CPUMode
import base.instructions
from . import instructions as x86
from base.immediates import floatcc
ISA = TargetISA('x86', [base.instructions.GROUP, x86.GROUP]) # type: TargetISA
# CPU modes for 32-bit and 64-bit operation.
X86_64 = CPUMode('I64', ISA)
X86_32 = CPUMode('I32', ISA)
# The set of floating point condition codes that are directly supported.
# Other condition codes need to be reversed or expressed as two tests.
supported_floatccs = [
floatcc.ord,
floatcc.uno,
floatcc.one,
floatcc.ueq,
floatcc.gt,
floatcc.ge,<|fim▁hole|><|fim▁end|> | floatcc.ult,
floatcc.ule] |
<|file_name|>layer_artist.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function
import numpy as np
from matplotlib.colors import Normalize
from matplotlib.collections import LineCollection
from mpl_scatter_density import ScatterDensityArtist
from astropy.visualization import (ImageNormalize, LinearStretch, SqrtStretch,
AsinhStretch, LogStretch)
from glue.utils import defer_draw, broadcast_to
from glue.viewers.scatter.state import ScatterLayerState
from glue.viewers.matplotlib.layer_artist import MatplotlibLayerArtist
from glue.core.exceptions import IncompatibleAttribute
STRETCHES = {'linear': LinearStretch,
'sqrt': SqrtStretch,
'arcsinh': AsinhStretch,
'log': LogStretch}
CMAP_PROPERTIES = set(['cmap_mode', 'cmap_att', 'cmap_vmin', 'cmap_vmax', 'cmap'])
MARKER_PROPERTIES = set(['size_mode', 'size_att', 'size_vmin', 'size_vmax', 'size_scaling', 'size'])
LINE_PROPERTIES = set(['linewidth', 'linestyle'])
DENSITY_PROPERTIES = set(['dpi', 'stretch', 'density_contrast'])
VISUAL_PROPERTIES = (CMAP_PROPERTIES | MARKER_PROPERTIES | DENSITY_PROPERTIES |
LINE_PROPERTIES | set(['color', 'alpha', 'zorder', 'visible']))
DATA_PROPERTIES = set(['layer', 'x_att', 'y_att', 'cmap_mode', 'size_mode', 'density_map',
'xerr_att', 'yerr_att', 'xerr_visible', 'yerr_visible',
'vector_visible', 'vx_att', 'vy_att', 'vector_arrowhead', 'vector_mode',
'vector_origin', 'line_visible', 'markers_visible', 'vector_scaling'])
class InvertedNormalize(Normalize):
def __call__(self, *args, **kwargs):
return 1 - super(InvertedNormalize, self).__call__(*args, **kwargs)
class DensityMapLimits(object):
contrast = 1
def min(self, array):
return 0
def max(self, array):
return 10. ** (np.log10(np.nanmax(array)) * self.contrast)
def set_mpl_artist_cmap(artist, values, state):
vmin = state.cmap_vmin
vmax = state.cmap_vmax
cmap = state.cmap
if isinstance(artist, ScatterDensityArtist):
artist.set_c(values)
else:
artist.set_array(values)
artist.set_cmap(cmap)
if vmin > vmax:
artist.set_clim(vmax, vmin)
artist.set_norm(InvertedNormalize(vmax, vmin))
else:
artist.set_clim(vmin, vmax)
artist.set_norm(Normalize(vmin, vmax))
class ScatterLayerArtist(MatplotlibLayerArtist):
_layer_state_cls = ScatterLayerState
def __init__(self, axes, viewer_state, layer_state=None, layer=None):
super(ScatterLayerArtist, self).__init__(axes, viewer_state,
layer_state=layer_state, layer=layer)
# Watch for changes in the viewer state which would require the
# layers to be redrawn
self._viewer_state.add_global_callback(self._update_scatter)
self.state.add_global_callback(self._update_scatter)
# Scatter
self.scatter_artist = self.axes.scatter([], [])
self.plot_artist = self.axes.plot([], [], 'o', mec='none')[0]
self.errorbar_artist = self.axes.errorbar([], [], fmt='none')
self.vector_artist = None
self.line_collection = LineCollection(np.zeros((0, 2, 2)))
self.axes.add_collection(self.line_collection)
# Scatter density
self.density_auto_limits = DensityMapLimits()
self.density_artist = ScatterDensityArtist(self.axes, [], [], color='white',
vmin=self.density_auto_limits.min,
vmax=self.density_auto_limits.max)
self.axes.add_artist(self.density_artist)
self.mpl_artists = [self.scatter_artist, self.plot_artist,
self.errorbar_artist, self.vector_artist,
self.line_collection, self.density_artist]
self.errorbar_index = 2
self.vector_index = 3
self.reset_cache()
def reset_cache(self):
self._last_viewer_state = {}
self._last_layer_state = {}
@defer_draw
def _update_data(self, changed):
# Layer artist has been cleared already
if len(self.mpl_artists) == 0:
return
try:
x = self.layer[self._viewer_state.x_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.x_att)
return
else:
self.enable()
try:
y = self.layer[self._viewer_state.y_att].ravel()
except (IncompatibleAttribute, IndexError):
# The following includes a call to self.clear()
self.disable_invalid_attributes(self._viewer_state.y_att)
return
else:
self.enable()
if self.state.markers_visible:
if self.state.density_map:
self.density_artist.set_xy(x, y)
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
else:
if self.state.cmap_mode == 'Fixed' and self.state.size_mode == 'Fixed':
# In this case we use Matplotlib's plot function because it has much
# better performance than scatter.
self.plot_artist.set_data(x, y)<|fim▁hole|> else:
self.plot_artist.set_data([], [])
offsets = np.vstack((x, y)).transpose()
self.scatter_artist.set_offsets(offsets)
self.density_artist.set_xy([], [])
else:
self.plot_artist.set_data([], [])
self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], [])
if self.state.line_visible:
if self.state.cmap_mode == 'Fixed':
points = np.array([x, y]).transpose()
self.line_collection.set_segments([points])
else:
# In the case where we want to color the line, we need to over
# sample the line by a factor of two so that we can assign the
# correct colors to segments - if we didn't do this, then
# segments on one side of a point would be a different color
# from the other side. With oversampling, we can have half a
# segment on either side of a point be the same color as a
# point
x_fine = np.zeros(len(x) * 2 - 1, dtype=float)
y_fine = np.zeros(len(y) * 2 - 1, dtype=float)
x_fine[::2] = x
x_fine[1::2] = 0.5 * (x[1:] + x[:-1])
y_fine[::2] = y
y_fine[1::2] = 0.5 * (y[1:] + y[:-1])
points = np.array([x_fine, y_fine]).transpose().reshape(-1, 1, 2)
segments = np.concatenate([points[:-1], points[1:]], axis=1)
self.line_collection.set_segments(segments)
else:
self.line_collection.set_segments(np.zeros((0, 2, 2)))
for eartist in list(self.errorbar_artist[2]):
if eartist is not None:
try:
eartist.remove()
except ValueError:
pass
except AttributeError: # Matplotlib < 1.5
pass
if self.vector_artist is not None:
self.vector_artist.remove()
self.vector_artist = None
if self.state.vector_visible:
if self.state.vx_att is not None and self.state.vy_att is not None:
vx = self.layer[self.state.vx_att].ravel()
vy = self.layer[self.state.vy_att].ravel()
if self.state.vector_mode == 'Polar':
ang = vx
length = vy
# assume ang is anti clockwise from the x axis
vx = length * np.cos(np.radians(ang))
vy = length * np.sin(np.radians(ang))
else:
vx = None
vy = None
if self.state.vector_arrowhead:
hw = 3
hl = 5
else:
hw = 1
hl = 0
v = np.hypot(vx, vy)
vmax = np.nanmax(v)
vx = vx / vmax
vy = vy / vmax
self.vector_artist = self.axes.quiver(x, y, vx, vy, units='width',
pivot=self.state.vector_origin,
headwidth=hw, headlength=hl,
scale_units='width',
scale=10 / self.state.vector_scaling)
self.mpl_artists[self.vector_index] = self.vector_artist
if self.state.xerr_visible or self.state.yerr_visible:
if self.state.xerr_visible and self.state.xerr_att is not None:
xerr = self.layer[self.state.xerr_att].ravel()
else:
xerr = None
if self.state.yerr_visible and self.state.yerr_att is not None:
yerr = self.layer[self.state.yerr_att].ravel()
else:
yerr = None
self.errorbar_artist = self.axes.errorbar(x, y, fmt='none',
xerr=xerr, yerr=yerr)
self.mpl_artists[self.errorbar_index] = self.errorbar_artist
@defer_draw
def _update_visual_attributes(self, changed, force=False):
if not self.enabled:
return
if self.state.markers_visible:
if self.state.density_map:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.density_artist.set_color(self.state.color)
self.density_artist.set_c(None)
self.density_artist.set_clim(self.density_auto_limits.min,
self.density_auto_limits.max)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.density_artist, c, self.state)
if force or 'stretch' in changed:
self.density_artist.set_norm(ImageNormalize(stretch=STRETCHES[self.state.stretch]()))
if force or 'dpi' in changed:
self.density_artist.set_dpi(self._viewer_state.dpi)
if force or 'density_contrast' in changed:
self.density_auto_limits.contrast = self.state.density_contrast
self.density_artist.stale = True
else:
if self.state.cmap_mode == 'Fixed' and self.state.size_mode == 'Fixed':
if force or 'color' in changed:
self.plot_artist.set_color(self.state.color)
if force or 'size' in changed or 'size_scaling' in changed:
self.plot_artist.set_markersize(self.state.size *
self.state.size_scaling)
else:
# TEMPORARY: Matplotlib has a bug that causes set_alpha to
# change the colors back: https://github.com/matplotlib/matplotlib/issues/8953
if 'alpha' in changed:
force = True
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.scatter_artist.set_facecolors(self.state.color)
self.scatter_artist.set_edgecolor('none')
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.scatter_artist, c, self.state)
self.scatter_artist.set_edgecolor('none')
if force or any(prop in changed for prop in MARKER_PROPERTIES):
if self.state.size_mode == 'Fixed':
s = self.state.size * self.state.size_scaling
s = broadcast_to(s, self.scatter_artist.get_sizes().shape)
else:
s = self.layer[self.state.size_att].ravel()
s = ((s - self.state.size_vmin) /
(self.state.size_vmax - self.state.size_vmin)) * 30
s *= self.state.size_scaling
# Note, we need to square here because for scatter, s is actually
# proportional to the marker area, not radius.
self.scatter_artist.set_sizes(s ** 2)
if self.state.line_visible:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.line_collection.set_array(None)
self.line_collection.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
# Higher up we oversampled the points in the line so that
# half a segment on either side of each point has the right
# color, so we need to also oversample the color here.
c = self.layer[self.state.cmap_att].ravel()
cnew = np.zeros((len(c) - 1) * 2)
cnew[::2] = c[:-1]
cnew[1::2] = c[1:]
set_mpl_artist_cmap(self.line_collection, cnew, self.state)
if force or 'linewidth' in changed:
self.line_collection.set_linewidth(self.state.linewidth)
if force or 'linestyle' in changed:
self.line_collection.set_linestyle(self.state.linestyle)
if self.state.vector_visible and self.vector_artist is not None:
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
self.vector_artist.set_array(None)
self.vector_artist.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(self.vector_artist, c, self.state)
if self.state.xerr_visible or self.state.yerr_visible:
for eartist in list(self.errorbar_artist[2]):
if eartist is None:
continue
if self.state.cmap_mode == 'Fixed':
if force or 'color' in changed or 'cmap_mode' in changed:
eartist.set_color(self.state.color)
elif force or any(prop in changed for prop in CMAP_PROPERTIES):
c = self.layer[self.state.cmap_att].ravel()
set_mpl_artist_cmap(eartist, c, self.state)
if force or 'alpha' in changed:
eartist.set_alpha(self.state.alpha)
if force or 'visible' in changed:
eartist.set_visible(self.state.visible)
if force or 'zorder' in changed:
eartist.set_zorder(self.state.zorder)
for artist in [self.scatter_artist, self.plot_artist,
self.vector_artist, self.line_collection,
self.density_artist]:
if artist is None:
continue
if force or 'alpha' in changed:
artist.set_alpha(self.state.alpha)
if force or 'zorder' in changed:
artist.set_zorder(self.state.zorder)
if force or 'visible' in changed:
artist.set_visible(self.state.visible)
self.redraw()
@defer_draw
def _update_scatter(self, force=False, **kwargs):
if (self._viewer_state.x_att is None or
self._viewer_state.y_att is None or
self.state.layer is None):
return
# Figure out which attributes are different from before. Ideally we shouldn't
# need this but currently this method is called multiple times if an
# attribute is changed due to x_att changing then hist_x_min, hist_x_max, etc.
# If we can solve this so that _update_histogram is really only called once
# then we could consider simplifying this. Until then, we manually keep track
# of which properties have changed.
changed = set()
if not force:
for key, value in self._viewer_state.as_dict().items():
if value != self._last_viewer_state.get(key, None):
changed.add(key)
for key, value in self.state.as_dict().items():
if value != self._last_layer_state.get(key, None):
changed.add(key)
self._last_viewer_state.update(self._viewer_state.as_dict())
self._last_layer_state.update(self.state.as_dict())
if force or len(changed & DATA_PROPERTIES) > 0:
self._update_data(changed)
force = True
if force or len(changed & VISUAL_PROPERTIES) > 0:
self._update_visual_attributes(changed, force=force)
def get_layer_color(self):
if self.state.cmap_mode == 'Fixed':
return self.state.color
else:
return self.state.cmap
@defer_draw
def update(self):
self._update_scatter(force=True)
self.redraw()<|fim▁end|> | self.scatter_artist.set_offsets(np.zeros((0, 2)))
self.density_artist.set_xy([], []) |
<|file_name|>mock_test.go<|end_file_name|><|fim▁begin|>// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// AUTO-GENERATED CODE. DO NOT EDIT.
package videointelligence
import (
videointelligencepb "google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1"
longrunningpb "google.golang.org/genproto/googleapis/longrunning"
)
import (
"flag"
"fmt"
"io"
"log"
"net"
"os"
"strings"
"testing"
"github.com/golang/protobuf/proto"
"github.com/golang/protobuf/ptypes"
"golang.org/x/net/context"
"google.golang.org/api/option"
status "google.golang.org/genproto/googleapis/rpc/status"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/metadata"
gstatus "google.golang.org/grpc/status"
)
var _ = io.EOF
var _ = ptypes.MarshalAny
var _ status.Status
type mockVideoIntelligenceServer struct {
// Embed for forward compatibility.
// Tests will keep working if more methods are added
// in the future.
videointelligencepb.VideoIntelligenceServiceServer
reqs []proto.Message
// If set, all calls return this error.
err error
<|fim▁hole|> resps []proto.Message
}
func (s *mockVideoIntelligenceServer) AnnotateVideo(ctx context.Context, req *videointelligencepb.AnnotateVideoRequest) (*longrunningpb.Operation, error) {
md, _ := metadata.FromIncomingContext(ctx)
if xg := md["x-goog-api-client"]; len(xg) == 0 || !strings.Contains(xg[0], "gl-go/") {
return nil, fmt.Errorf("x-goog-api-client = %v, expected gl-go key", xg)
}
s.reqs = append(s.reqs, req)
if s.err != nil {
return nil, s.err
}
return s.resps[0].(*longrunningpb.Operation), nil
}
// clientOpt is the option tests should use to connect to the test server.
// It is initialized by TestMain.
var clientOpt option.ClientOption
var (
mockVideoIntelligence mockVideoIntelligenceServer
)
func TestMain(m *testing.M) {
flag.Parse()
serv := grpc.NewServer()
videointelligencepb.RegisterVideoIntelligenceServiceServer(serv, &mockVideoIntelligence)
lis, err := net.Listen("tcp", "localhost:0")
if err != nil {
log.Fatal(err)
}
go serv.Serve(lis)
conn, err := grpc.Dial(lis.Addr().String(), grpc.WithInsecure())
if err != nil {
log.Fatal(err)
}
clientOpt = option.WithGRPCConn(conn)
os.Exit(m.Run())
}
func TestVideoIntelligenceServiceAnnotateVideo(t *testing.T) {
var expectedResponse *videointelligencepb.AnnotateVideoResponse = &videointelligencepb.AnnotateVideoResponse{}
mockVideoIntelligence.err = nil
mockVideoIntelligence.reqs = nil
any, err := ptypes.MarshalAny(expectedResponse)
if err != nil {
t.Fatal(err)
}
mockVideoIntelligence.resps = append(mockVideoIntelligence.resps[:0], &longrunningpb.Operation{
Name: "longrunning-test",
Done: true,
Result: &longrunningpb.Operation_Response{Response: any},
})
var inputUri string = "gs://demomaker/cat.mp4"
var featuresElement videointelligencepb.Feature = videointelligencepb.Feature_LABEL_DETECTION
var features = []videointelligencepb.Feature{featuresElement}
var request = &videointelligencepb.AnnotateVideoRequest{
InputUri: inputUri,
Features: features,
}
c, err := NewClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
respLRO, err := c.AnnotateVideo(context.Background(), request)
if err != nil {
t.Fatal(err)
}
resp, err := respLRO.Wait(context.Background())
if err != nil {
t.Fatal(err)
}
if want, got := request, mockVideoIntelligence.reqs[0]; !proto.Equal(want, got) {
t.Errorf("wrong request %q, want %q", got, want)
}
if want, got := expectedResponse, resp; !proto.Equal(want, got) {
t.Errorf("wrong response %q, want %q)", got, want)
}
}
func TestVideoIntelligenceServiceAnnotateVideoError(t *testing.T) {
errCode := codes.PermissionDenied
mockVideoIntelligence.err = nil
mockVideoIntelligence.resps = append(mockVideoIntelligence.resps[:0], &longrunningpb.Operation{
Name: "longrunning-test",
Done: true,
Result: &longrunningpb.Operation_Error{
Error: &status.Status{
Code: int32(errCode),
Message: "test error",
},
},
})
var inputUri string = "gs://demomaker/cat.mp4"
var featuresElement videointelligencepb.Feature = videointelligencepb.Feature_LABEL_DETECTION
var features = []videointelligencepb.Feature{featuresElement}
var request = &videointelligencepb.AnnotateVideoRequest{
InputUri: inputUri,
Features: features,
}
c, err := NewClient(context.Background(), clientOpt)
if err != nil {
t.Fatal(err)
}
respLRO, err := c.AnnotateVideo(context.Background(), request)
if err != nil {
t.Fatal(err)
}
resp, err := respLRO.Wait(context.Background())
if st, ok := gstatus.FromError(err); !ok {
t.Errorf("got error %v, expected grpc error", err)
} else if c := st.Code(); c != errCode {
t.Errorf("got error code %q, want %q", c, errCode)
}
_ = resp
}<|fim▁end|> | // responses to return if err == nil |
<|file_name|>main.cc<|end_file_name|><|fim▁begin|>#include "app.h"
<|fim▁hole|>int main(int argc, char *argv[])
{
return App{}.run(argc, argv);
}<|fim▁end|> | |
<|file_name|>cannabisdarkcoin_bs.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="bs" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>cannabisdarkcoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2014 CannabisDarkcoin team
Copyright © 2014 The cannabisdarkcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.<|fim▁hole|></context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your cannabisdarkcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>cannabisdarkcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CannabisDarkcoinGUI</name>
<message>
<location filename="../cannabisdarkcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show information about cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>cannabisdarkcoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to cannabisdarkcoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About cannabisdarkcoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about cannabisdarkcoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid cannabisdarkcoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../cannabisdarkcoin.cpp" line="+109"/>
<source>A fatal error occurred. cannabisdarkcoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid cannabisdarkcoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>cannabisdarkcoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start cannabisdarkcoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start cannabisdarkcoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the cannabisdarkcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the cannabisdarkcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting cannabisdarkcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show cannabisdarkcoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting cannabisdarkcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the cannabisdarkcoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the cannabisdarkcoin-Qt help message to get a list with possible cannabisdarkcoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>cannabisdarkcoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>cannabisdarkcoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the cannabisdarkcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the cannabisdarkcoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>123.456 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a cannabisdarkcoin address (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a cannabisdarkcoin address (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified cannabisdarkcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a cannabisdarkcoin address (e.g. EakqhrmwJuHG22WirpfBvQMMUuisWZNzrP)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter cannabisdarkcoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Sve</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Danas</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ovaj mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Prošli mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ove godine</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>cannabisdarkcoin-core</name>
<message>
<location filename="../cannabisdarkcoinstrings.cpp" line="+33"/>
<source>cannabisdarkcoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or cannabisdarkcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: cannabisdarkcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: cannabisdarkcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong cannabisdarkcoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the CannabisDarkcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=cannabisdarkcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "cannabisdarkcoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. cannabisdarkcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of cannabisdarkcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart cannabisdarkcoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. cannabisdarkcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> |
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message> |
<|file_name|>messages_spec.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {findEndOfBlock, parseMessage, parseMetadata, splitBlock} from '../../src/utils/messages';
import {makeTemplateObject} from '../../src/utils/translations';
describe('messages utils', () => {
describe('parseMessage', () => {
it('should use the message-id parsed from the metadata if available', () => {
const message = parseMessage(
makeTemplateObject(
[':@@custom-message-id:a', ':one:b', ':two:c'],
[':@@custom-message-id:a', ':one:b', ':two:c']),
[1, 2]);
expect(message.messageId).toEqual('custom-message-id');
});
it('should compute the translation key if no metadata', () => {
const message = parseMessage(
makeTemplateObject(['a', ':one:b', ':two:c'], ['a', ':one:b', ':two:c']), [1, 2]);
expect(message.messageId).toEqual('8865273085679272414');
});
it('should compute the translation key if no id in the metadata', () => {
const message = parseMessage(
makeTemplateObject(
[':description:a', ':one:b', ':two:c'], [':description:a', ':one:b', ':two:c']),
[1, 2]);
expect(message.messageId).toEqual('8865273085679272414');
});
it('should compute a different id if the meaning changes', () => {
const message1 = parseMessage(makeTemplateObject(['abc'], ['abc']), []);
const message2 = parseMessage(makeTemplateObject([':meaning1|:abc'], [':meaning1|:abc']), []);
const message3 = parseMessage(makeTemplateObject([':meaning2|:abc'], [':meaning2|:abc']), []);
expect(message1.messageId).not.toEqual(message2.messageId);
expect(message2.messageId).not.toEqual(message3.messageId);
expect(message3.messageId).not.toEqual(message1.messageId);
});
it('should compute the translation key, inferring placeholder names if not given', () => {
const message = parseMessage(makeTemplateObject(['a', 'b', 'c'], ['a', 'b', 'c']), [1, 2]);
expect(message.messageId).toEqual('8107531564991075946');
});
it('should compute the translation key, ignoring escaped placeholder names', () => {
const message = parseMessage(
makeTemplateObject(['a', ':one:b', ':two:c'], ['a', '\\:one:b', '\\:two:c']), [1, 2]);
expect(message.messageId).toEqual('2623373088949454037');
});
it('should compute the translation key, handling empty raw values', () => {
const message =
parseMessage(makeTemplateObject(['a', ':one:b', ':two:c'], ['', '', '']), [1, 2]);
expect(message.messageId).toEqual('8865273085679272414');
});
it('should build a map of named placeholders to expressions', () => {
const message = parseMessage(
makeTemplateObject(['a', ':one:b', ':two:c'], ['a', ':one:b', ':two:c']), [1, 2]);
expect(message.substitutions).toEqual({one: 1, two: 2});
});
it('should build a map of implied placeholders to expressions', () => {
const message = parseMessage(makeTemplateObject(['a', 'b', 'c'], ['a', 'b', 'c']), [1, 2]);
expect(message.substitutions).toEqual({PH: 1, PH_1: 2});
});
});
describe('splitBlock()', () => {
it('should return just the text if there is no block',
() => { expect(splitBlock('abc def', 'abc def')).toEqual({text: 'abc def'}); });
it('should return just the text and block if there is one', () => {
expect(splitBlock(':block info:abc def', ':block info:abc def'))
.toEqual({text: 'abc def', block: 'block info'});
});
it('should handle an empty block if there is one', () => {
expect(splitBlock('::abc def', '::abc def')).toEqual({text: 'abc def', block: ''});
});
it('should error on an unterminated block', () => {
expect(() => splitBlock(':abc def', ':abc def'))
.toThrowError('Unterminated $localize metadata block in ":abc def".');
});
it('should handle escaped block markers', () => {
expect(splitBlock(':part of the message:abc def', '\\:part of the message:abc def')).toEqual({
text: ':part of the message:abc def'
});
expect(splitBlock(
':block with escaped : in it:abc def', ':block with escaped \\: in it:abc def'))
.toEqual({text: 'abc def', block: 'block with escaped : in it'});
});
it('should handle the empty raw part', () => {
expect(splitBlock(':block info:abc def', '')).toEqual({text: 'abc def', block: 'block info'});
});
});
describe('findEndOfBlock()', () => {
it('should throw error if there is no end of block marker', () => {
expect(() => findEndOfBlock(':some text', ':some text'))
.toThrowError('Unterminated $localize metadata block in ":some text".');
expect(() => findEndOfBlock(':escaped colon:', ':escaped colon\\:'))
.toThrowError('Unterminated $localize metadata block in ":escaped colon\\:".');
});
it('should return index of the end of block marker', () => {
expect(findEndOfBlock(':block:', ':block:')).toEqual(6);
expect(findEndOfBlock(':block::', ':block::')).toEqual(6);
expect(findEndOfBlock(':block:some text', ':block:some text')).toEqual(6);
expect(findEndOfBlock(':block:some text:more text', ':block:some text:more text')).toEqual(6);
expect(findEndOfBlock('::::', ':\\:\\::')).toEqual(3);
expect(findEndOfBlock(':block::', ':block\\::')).toEqual(7);
expect(findEndOfBlock(':block:more:some text', ':block\\:more:some text')).toEqual(11);
expect(findEndOfBlock(':block:more:and-more:some text', ':block\\:more\\:and-more:some text'))
.toEqual(20);
});
});
describe('parseMetadata()', () => {
it('should return just the text if there is no block', () => {
expect(parseMetadata('abc def', 'abc def'))
.toEqual({text: 'abc def', meaning: undefined, description: undefined, id: undefined});
});
it('should extract the metadata if provided', () => {
expect(parseMetadata(':description:abc def', ':description:abc def'))
.toEqual(
{text: 'abc def', description: 'description', meaning: undefined, id: undefined});
expect(parseMetadata(':meaning|:abc def', ':meaning|:abc def'))
.toEqual({text: 'abc def', description: undefined, meaning: 'meaning', id: undefined});
expect(parseMetadata(':@@message-id:abc def', ':@@message-id:abc def'))
.toEqual({text: 'abc def', description: undefined, meaning: undefined, id: 'message-id'});
expect(parseMetadata(':meaning|description:abc def', ':meaning|description:abc def'))
.toEqual(
{text: 'abc def', description: 'description', meaning: 'meaning', id: undefined});
expect(parseMetadata(':description@@message-id:abc def', ':description@@message-id:abc def'))
.toEqual(
{text: 'abc def', description: 'description', meaning: undefined, id: 'message-id'});
expect(parseMetadata(':meaning|@@message-id:abc def', ':meaning|@@message-id:abc def'))
.toEqual({text: 'abc def', description: undefined, meaning: 'meaning', id: 'message-id'});
});
it('should handle an empty block if there is one', () => {
expect(parseMetadata('::abc def', '::abc def'))
.toEqual({text: 'abc def', meaning: undefined, description: undefined, id: undefined});
});<|fim▁hole|>
it('should handle escaped block markers', () => {
expect(parseMetadata(':part of the message:abc def', '\\:part of the message:abc def'))
.toEqual({
text: ':part of the message:abc def',
meaning: undefined,
description: undefined,
id: undefined
});
});
it('should handle the empty raw part', () => {
expect(parseMetadata(':description:abc def', ''))
.toEqual(
{text: 'abc def', meaning: undefined, description: 'description', id: undefined});
});
});
});<|fim▁end|> | |
<|file_name|>html.js<|end_file_name|><|fim▁begin|>if(typeof define !== 'function')
var define = require('amdefine')(module);
define(["require","deep/deep", "./ajax"],function (require, deep, Ajax)
{
deep.store.jqueryajax = deep.store.jqueryajax || {};
deep.store.jqueryajax.HTML = deep.compose.Classes(Ajax, {
headers:{
"Accept" : "text/html; charset=utf-8"
},
dataType:"html",
bodyParser : function(data){
if(typeof data === 'string')
return data;
if(data.toString())
return data.toString();
return String(data);
},
responseParser : function(data, msg, jqXHR){
return data.toString();
}
});
//__________________________________________________
deep.extensions.push({
extensions:[
/(\.(html|htm|xhtm|xhtml)(\?.*)?)$/gi
],
store:deep.store.jqueryajax.HTML
});<|fim▁hole|>
});<|fim▁end|> | deep.store.jqueryajax.HTML.createDefault = function(){
new deep.store.jqueryajax.HTML("html");
};
return deep.store.jqueryajax.HTML; |
<|file_name|>AvailabilityNodes.py<|end_file_name|><|fim▁begin|>from Child import Child
from Node import Node # noqa: I201
AVAILABILITY_NODES = [
# availability-spec-list -> availability-entry availability-spec-list?
Node('AvailabilitySpecList', kind='SyntaxCollection',
element='AvailabilityArgument'),
# Wrapper for all the different entries that may occur inside @available
# availability-entry -> '*' ','?
# | identifier ','?
# | availability-version-restriction ','?
# | availability-versioned-argument ','?
Node('AvailabilityArgument', kind='Syntax',
description='''
A single argument to an `@available` argument like `*`, `iOS 10.1`, \
or `message: "This has been deprecated"`.<|fim▁hole|> children=[
Child('Entry', kind='Syntax',
description='The actual argument',
node_choices=[
Child('Star', kind='SpacedBinaryOperatorToken',
text_choices=['*']),
Child('IdentifierRestriction',
kind='IdentifierToken'),
Child('AvailabilityVersionRestriction',
kind='AvailabilityVersionRestriction'),
Child('AvailabilityLabeledArgument',
kind='AvailabilityLabeledArgument'),
]),
Child('TrailingComma', kind='CommaToken', is_optional=True,
description='''
A trailing comma if the argument is followed by another \
argument
'''),
]),
# Representation of 'deprecated: 2.3', 'message: "Hello world"' etc.
# availability-versioned-argument -> identifier ':' version-tuple
Node('AvailabilityLabeledArgument', kind='Syntax',
description='''
A argument to an `@available` attribute that consists of a label and \
a value, e.g. `message: "This has been deprecated"`.
''',
children=[
Child('Label', kind='IdentifierToken',
description='The label of the argument'),
Child('Colon', kind='ColonToken',
description='The colon separating label and value'),
Child('Value', kind='Syntax',
node_choices=[
Child('String', 'StringLiteralToken'),
Child('Version', 'VersionTuple'),
], description='The value of this labeled argument',),
]),
# Representation for 'iOS 10', 'swift 3.4' etc.
# availability-version-restriction -> identifier version-tuple
Node('AvailabilityVersionRestriction', kind='Syntax',
description='''
An argument to `@available` that restricts the availability on a \
certain platform to a version, e.g. `iOS 10` or `swift 3.4`.
''',
children=[
Child('Platform', kind='IdentifierToken',
classification='Keyword',
description='''
The name of the OS on which the availability should be \
restricted or 'swift' if the availability should be \
restricted based on a Swift version.
'''),
Child('Version', kind='VersionTuple'),
]),
# version-tuple -> integer-literal
# | float-literal
# | float-literal '.' integer-literal
Node('VersionTuple', kind='Syntax',
description='''
A version number of the form major.minor.patch in which the minor \
and patch part may be ommited.
''',
children=[
Child('MajorMinor', kind='Syntax',
node_choices=[
Child('Major', kind='IntegerLiteralToken'),
Child('MajorMinor', kind='FloatingLiteralToken')
], description='''
In case the version consists only of the major version, an \
integer literal that specifies the major version. In case \
the version consists of major and minor version number, a \
floating literal in which the decimal part is interpreted \
as the minor version.
'''),
Child('PatchPeriod', kind='PeriodToken', is_optional=True,
description='''
If the version contains a patch number, the period \
separating the minor from the patch number.
'''),
Child('PatchVersion', kind='IntegerLiteralToken',
is_optional=True, description='''
The patch version if specified.
'''),
]),
]<|fim▁end|> | ''', |
<|file_name|>checks.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package preflight
import (
"bufio"
"bytes"
"fmt"
"io"
"net"
"net/http"
"os"
"os/exec"
"path/filepath"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
kubeadmapi "k8s.io/kubernetes/cmd/kubeadm/app/apis/kubeadm"
kubeadmconstants "k8s.io/kubernetes/cmd/kubeadm/app/constants"
"k8s.io/kubernetes/pkg/api/validation"
authzmodes "k8s.io/kubernetes/pkg/kubeapiserver/authorizer/modes"
"k8s.io/kubernetes/pkg/util/initsystem"
"k8s.io/kubernetes/pkg/util/node"
"k8s.io/kubernetes/test/e2e_node/system"
)
const bridgenf string = "/proc/sys/net/bridge/bridge-nf-call-iptables"
type Error struct {
Msg string
}
func (e *Error) Error() string {
return fmt.Sprintf("[preflight] Some fatal errors occurred:\n%s%s", e.Msg, "[preflight] If you know what you are doing, you can skip pre-flight checks with `--skip-preflight-checks`")
}
// Checker validates the state of the system to ensure kubeadm will be
// successful as often as possilble.
type Checker interface {
Check() (warnings, errors []error)
}
// ServiceCheck verifies that the given service is enabled and active. If we do not
// detect a supported init system however, all checks are skipped and a warning is
// returned.
type ServiceCheck struct {
Service string
CheckIfActive bool
}
func (sc ServiceCheck) Check() (warnings, errors []error) {
initSystem, err := initsystem.GetInitSystem()
if err != nil {
return []error{err}, nil
}
warnings = []error{}
if !initSystem.ServiceExists(sc.Service) {
warnings = append(warnings, fmt.Errorf("%s service does not exist", sc.Service))
return warnings, nil
}
if !initSystem.ServiceIsEnabled(sc.Service) {
warnings = append(warnings,
fmt.Errorf("%s service is not enabled, please run 'systemctl enable %s.service'",
sc.Service, sc.Service))
}
if sc.CheckIfActive && !initSystem.ServiceIsActive(sc.Service) {
errors = append(errors,
fmt.Errorf("%s service is not active, please run 'systemctl start %s.service'",
sc.Service, sc.Service))
}
return warnings, errors
}
// FirewalldCheck checks if firewalld is enabled or active, and if so outputs a warning.
type FirewalldCheck struct {
ports []int
}
func (fc FirewalldCheck) Check() (warnings, errors []error) {
initSystem, err := initsystem.GetInitSystem()
if err != nil {
return []error{err}, nil
}
warnings = []error{}
if !initSystem.ServiceExists("firewalld") {
return nil, nil
}
if initSystem.ServiceIsActive("firewalld") {
warnings = append(warnings,
fmt.Errorf("firewalld is active, please ensure ports %v are open or your cluster may not function correctly",
fc.ports))
}
return warnings, errors
}
// PortOpenCheck ensures the given port is available for use.
type PortOpenCheck struct {
port int
}
func (poc PortOpenCheck) Check() (warnings, errors []error) {
errors = []error{}
// TODO: Get IP from KubeadmConfig
ln, err := net.Listen("tcp", fmt.Sprintf(":%d", poc.port))
if err != nil {
errors = append(errors, fmt.Errorf("Port %d is in use", poc.port))
}
if ln != nil {
ln.Close()
}
return nil, errors
}
// IsRootCheck verifies user is root
type IsRootCheck struct{}
func (irc IsRootCheck) Check() (warnings, errors []error) {
errors = []error{}
if os.Getuid() != 0 {
errors = append(errors, fmt.Errorf("user is not running as root"))
}
return nil, errors
}
// DirAvailableCheck checks if the given directory either does not exist, or is empty.
type DirAvailableCheck struct {
Path string
}
func (dac DirAvailableCheck) Check() (warnings, errors []error) {
errors = []error{}
// If it doesn't exist we are good:
if _, err := os.Stat(dac.Path); os.IsNotExist(err) {
return nil, nil
}
f, err := os.Open(dac.Path)
if err != nil {
errors = append(errors, fmt.Errorf("unable to check if %s is empty: %s", dac.Path, err))
return nil, errors
}
defer f.Close()
_, err = f.Readdirnames(1)
if err != io.EOF {
errors = append(errors, fmt.Errorf("%s is not empty", dac.Path))
}
return nil, errors
}
// FileAvailableCheck checks that the given file does not already exist.
type FileAvailableCheck struct {
Path string
}
func (fac FileAvailableCheck) Check() (warnings, errors []error) {
errors = []error{}
if _, err := os.Stat(fac.Path); err == nil {
errors = append(errors, fmt.Errorf("%s already exists", fac.Path))
}
return nil, errors
}
// FileExistingCheck checks that the given file does not already exist.
type FileExistingCheck struct {
Path string
}
func (fac FileExistingCheck) Check() (warnings, errors []error) {
errors = []error{}
if _, err := os.Stat(fac.Path); err != nil {
errors = append(errors, fmt.Errorf("%s doesn't exist", fac.Path))
}
return nil, errors
}
// FileContentCheck checks that the given file contains the string Content.
type FileContentCheck struct {
Path string
Content []byte
}
func (fcc FileContentCheck) Check() (warnings, errors []error) {
f, err := os.Open(fcc.Path)
if err != nil {
return nil, []error{fmt.Errorf("%s does not exist", fcc.Path)}
}
lr := io.LimitReader(f, int64(len(fcc.Content)))
defer f.Close()
buf := &bytes.Buffer{}
_, err = io.Copy(buf, lr)
if err != nil {
return nil, []error{fmt.Errorf("%s could not be read", fcc.Path)}
}
if !bytes.Equal(buf.Bytes(), fcc.Content) {
return nil, []error{fmt.Errorf("%s contents are not set to %s", fcc.Path, fcc.Content)}
}
return nil, []error{}
}
// InPathCheck checks if the given executable is present in the path
type InPathCheck struct {
executable string
mandatory bool
}
func (ipc InPathCheck) Check() (warnings, errors []error) {
_, err := exec.LookPath(ipc.executable)
if err != nil {
if ipc.mandatory {
// Return as an error:
return nil, []error{fmt.Errorf("%s not found in system path", ipc.executable)}
}
// Return as a warning:
return []error{fmt.Errorf("%s not found in system path", ipc.executable)}, nil
}
return nil, nil
}
// HostnameCheck checks if hostname match dns sub domain regex.
// If hostname doesn't match this regex, kubelet will not launch static pods like kube-apiserver/kube-controller-manager and so on.
type HostnameCheck struct{}
func (hc HostnameCheck) Check() (warnings, errors []error) {
errors = []error{}
warnings = []error{}
hostname := node.GetHostname("")
for _, msg := range validation.ValidateNodeName(hostname, false) {
errors = append(errors, fmt.Errorf("hostname \"%s\" %s", hostname, msg))
}
addr, err := net.LookupHost(hostname)
if addr == nil {
warnings = append(warnings, fmt.Errorf("hostname \"%s\" could not be reached", hostname))
}
if err != nil {
warnings = append(warnings, fmt.Errorf("hostname \"%s\" %s", hostname, err))
}
return warnings, errors
}
// HTTPProxyCheck checks if https connection to specific host is going
// to be done directly or over proxy. If proxy detected, it will return warning.
type HTTPProxyCheck struct {
Proto string
Host string
Port int
}
func (hst HTTPProxyCheck) Check() (warnings, errors []error) {
url := fmt.Sprintf("%s://%s:%d", hst.Proto, hst.Host, hst.Port)
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, []error{err}
}
proxy, err := http.DefaultTransport.(*http.Transport).Proxy(req)
if err != nil {
return nil, []error{err}
}
if proxy != nil {
return []error{fmt.Errorf("Connection to %q uses proxy %q. If that is not intended, adjust your proxy settings", url, proxy)}, nil
}
return nil, nil<|fim▁hole|>func (sysver SystemVerificationCheck) Check() (warnings, errors []error) {
// Create a buffered writer and choose a quite large value (1M) and suppose the output from the system verification test won't exceed the limit
// Run the system verification check, but write to out buffered writer instead of stdout
bufw := bufio.NewWriterSize(os.Stdout, 1*1024*1024)
reporter := &system.StreamReporter{WriteStream: bufw}
var errs []error
// All the validators we'd like to run:
var validators = []system.Validator{
&system.OSValidator{Reporter: reporter},
&system.KernelValidator{Reporter: reporter},
&system.CgroupsValidator{Reporter: reporter},
&system.DockerValidator{Reporter: reporter},
}
// Run all validators
for _, v := range validators {
errs = append(errs, v.Validate(system.DefaultSysSpec))
}
err := utilerrors.NewAggregate(errs)
if err != nil {
// Only print the output from the system verification check if the check failed
fmt.Println("[preflight] The system verification failed. Printing the output from the verification:")
bufw.Flush()
return nil, []error{err}
}
return nil, nil
}
func RunInitMasterChecks(cfg *kubeadmapi.MasterConfiguration) error {
checks := []Checker{
SystemVerificationCheck{},
IsRootCheck{},
HostnameCheck{},
ServiceCheck{Service: "kubelet", CheckIfActive: false},
ServiceCheck{Service: "docker", CheckIfActive: true},
FirewalldCheck{ports: []int{int(cfg.API.Port), 10250}},
PortOpenCheck{port: int(cfg.API.Port)},
PortOpenCheck{port: 8080},
PortOpenCheck{port: 10250},
PortOpenCheck{port: 10251},
PortOpenCheck{port: 10252},
HTTPProxyCheck{Proto: "https", Host: cfg.API.AdvertiseAddresses[0], Port: int(cfg.API.Port)},
DirAvailableCheck{Path: filepath.Join(kubeadmapi.GlobalEnvParams.KubernetesDir, "manifests")},
DirAvailableCheck{Path: "/var/lib/kubelet"},
FileContentCheck{Path: bridgenf, Content: []byte{'1'}},
InPathCheck{executable: "ip", mandatory: true},
InPathCheck{executable: "iptables", mandatory: true},
InPathCheck{executable: "mount", mandatory: true},
InPathCheck{executable: "nsenter", mandatory: true},
InPathCheck{executable: "ebtables", mandatory: false},
InPathCheck{executable: "ethtool", mandatory: false},
InPathCheck{executable: "socat", mandatory: false},
InPathCheck{executable: "tc", mandatory: false},
InPathCheck{executable: "touch", mandatory: false},
}
if len(cfg.Etcd.Endpoints) == 0 {
// Only do etcd related checks when no external endpoints were specified
checks = append(checks,
PortOpenCheck{port: 2379},
DirAvailableCheck{Path: "/var/lib/etcd"},
)
}
// Check the config for authorization mode
switch cfg.AuthorizationMode {
case authzmodes.ModeABAC:
checks = append(checks, FileExistingCheck{Path: kubeadmconstants.AuthorizationPolicyPath})
case authzmodes.ModeWebhook:
checks = append(checks, FileExistingCheck{Path: kubeadmconstants.AuthorizationWebhookConfigPath})
}
return RunChecks(checks, os.Stderr)
}
func RunJoinNodeChecks(cfg *kubeadmapi.NodeConfiguration) error {
checks := []Checker{
SystemVerificationCheck{},
IsRootCheck{},
HostnameCheck{},
ServiceCheck{Service: "kubelet", CheckIfActive: false},
ServiceCheck{Service: "docker", CheckIfActive: true},
PortOpenCheck{port: 10250},
DirAvailableCheck{Path: filepath.Join(kubeadmapi.GlobalEnvParams.KubernetesDir, "manifests")},
DirAvailableCheck{Path: "/var/lib/kubelet"},
FileAvailableCheck{Path: filepath.Join(kubeadmapi.GlobalEnvParams.HostPKIPath, kubeadmconstants.CACertName)},
FileAvailableCheck{Path: filepath.Join(kubeadmapi.GlobalEnvParams.KubernetesDir, kubeadmconstants.KubeletKubeConfigFileName)},
FileContentCheck{Path: bridgenf, Content: []byte{'1'}},
InPathCheck{executable: "ip", mandatory: true},
InPathCheck{executable: "iptables", mandatory: true},
InPathCheck{executable: "mount", mandatory: true},
InPathCheck{executable: "nsenter", mandatory: true},
InPathCheck{executable: "ebtables", mandatory: false},
InPathCheck{executable: "ethtool", mandatory: false},
InPathCheck{executable: "socat", mandatory: false},
InPathCheck{executable: "tc", mandatory: false},
InPathCheck{executable: "touch", mandatory: false},
}
return RunChecks(checks, os.Stderr)
}
func RunRootCheckOnly() error {
checks := []Checker{
IsRootCheck{},
}
return RunChecks(checks, os.Stderr)
}
// RunChecks runs each check, displays it's warnings/errors, and once all
// are processed will exit if any errors occurred.
func RunChecks(checks []Checker, ww io.Writer) error {
found := []error{}
for _, c := range checks {
warnings, errs := c.Check()
for _, w := range warnings {
io.WriteString(ww, fmt.Sprintf("[preflight] WARNING: %s\n", w))
}
found = append(found, errs...)
}
if len(found) > 0 {
var errs bytes.Buffer
for _, i := range found {
errs.WriteString("\t" + i.Error() + "\n")
}
return &Error{Msg: errs.String()}
}
return nil
}
func TryStartKubelet() {
// If we notice that the kubelet service is inactive, try to start it
initSystem, err := initsystem.GetInitSystem()
if err != nil {
fmt.Println("[preflight] No supported init system detected, won't ensure kubelet is running.")
} else if initSystem.ServiceExists("kubelet") && !initSystem.ServiceIsActive("kubelet") {
fmt.Println("[preflight] Starting the kubelet service")
if err := initSystem.ServiceStart("kubelet"); err != nil {
fmt.Printf("[preflight] WARNING: Unable to start the kubelet service: [%v]\n", err)
fmt.Println("[preflight] WARNING: Please ensure kubelet is running manually.")
}
}
}<|fim▁end|> | }
type SystemVerificationCheck struct{}
|
<|file_name|>searchControllerSpec.js<|end_file_name|><|fim▁begin|>'use strict';
describe('test search controller', function() {
beforeEach(module('mapTweetInfoApp'));<|fim▁hole|> beforeEach(module('latLngServices'));
describe('searchCtrl', function(){
var scope, ctrl, $httpBackend, $browser, $location;
beforeEach(inject(function(_$httpBackend_, $rootScope, $controller) {
// $httpBackend = _$httpBackend_;
// $httpBackend.expectGET('phones/phones.json').
// respond([{name: 'Nexus S'}, {name: 'Motorola DROID'}]);
scope = $rootScope.$new();
$location = scope.$service('$location');
$browser = scope.$service('$browser');
ctrl = $controller('searchCtrl', {$scope: scope});
}));
it('should have default variables set', function() {
// expect(scope.phones).toEqualData([]);
// $httpBackend.flush();
expect(scope.counts).toEqualData(
[{label: '25 Tweets', value: '25'},{label: '50 Tweets', value: '50'},{label: '75 Tweets', value: '75'},{label: '150 Tweets', value: '150'}]
);
});
// it('should set the default value of orderProp model', function() {
// expect(scope.orderProp).toBe('age');
// });
});
});<|fim▁end|> | beforeEach(module('twitterSearchServices')); |
<|file_name|>level.ts<|end_file_name|><|fim▁begin|>class Level {
private game: Phaser.Game;
private myInput: Input;
public player: Player;
private data: LevelData;
private sprites: Phaser.Sprite[];
private orderedFadingRenderGroup: Phaser.Group;
private floorbodies: Phaser.Group;
private isCurrentlyRotating: boolean = false;
private TOTAL_FRAMES_TO_CHANGE_STATE: number = 10;
private DEGREES_PER_STEP: number = 90 / this.TOTAL_FRAMES_TO_CHANGE_STATE;
private RADIANS_PER_STEP: number = Math.PI / 2 / this.TOTAL_FRAMES_TO_CHANGE_STATE;
private FRAMES_TO_CHANGE_STATE: number = this.TOTAL_FRAMES_TO_CHANGE_STATE;
private FADE_AMOUNT: number = 0.05;
private MAX_FADE_AMOUNT: number = 0.3;
constructor(game: Phaser.Game, data: LevelData){
this.game = game;
this.data = data;
this.myInput = new Input(this.game);
this.orderedFadingRenderGroup = this.game.add.group();
this.player = new Player(this.game, this.myInput);
this.player.body.x = 200;
this.player.body.y = 175;
this.orderedFadingRenderGroup.add(this.player.sprite);
this.game.camera.follow(this.player.sprite);
this.sprites = [];
this.data.floorplan.floor.forEach((s)=>{
s.sprite = this.game.add.sprite(s.x, s.y, s.img);
s.sprite.width = s.width;
s.sprite.height = s.height;
s.sprite.rotation = Phaser.Math.degToRad(s.rotation);
this.sprites.push(s.sprite);
this.game.world.sendToBack(s.sprite);
});
this.data.getScenery().forEach((s)=>{
s.sprite = this.game.add.sprite(s.x, s.y, s.img, 0, this.orderedFadingRenderGroup);
s.sprite.width = s.width;
s.sprite.height = s.height;
s.sprite.rotation = Phaser.Math.degToRad(s.rotation);
s.sprite.alpha = 0;
this.sprites.push(s.sprite);
});
this.floorbodies = this.game.add.physicsGroup(Phaser.Physics.P2JS);
this.data.floorplan.outline.forEach((lastPoint, index)=>{
var curPoint = (index + 1 === this.data.floorplan.outline.length ? this.data.floorplan.outline[0] : this.data.floorplan.outline[index + 1]);
var distance = Phaser.Math.distance(lastPoint.x, lastPoint.y, curPoint.x, curPoint.y);
var zeroedCurPoint = curPoint.clone().subtract(lastPoint.x, lastPoint.y);
var midPoint = zeroedCurPoint.clone().multiply(0.5, 0.5).add(lastPoint.x, lastPoint.y);
var dirNorm = zeroedCurPoint.clone().normalize();
var rotation = Phaser.Math.radToDeg(Phaser.Math.angleBetween(0, 0, dirNorm.x, dirNorm.y));
var body = this.floorbodies.create(midPoint.x, midPoint.y);
body.body.setRectangle(distance, 10, 0, 0);
body.body.angle = rotation;
//body.body.debug = true;
body.body.static = true;
});
}
changeFacing(){
var center = new Phaser.Point(this.game.width/2, this.game.height/2);
this.sprites.forEach((sprite)=>{
var p = new Phaser.Point(sprite.x, sprite.y);
p.rotate(center.x, center.y, this.DEGREES_PER_STEP, true);
sprite.x = p.x;
sprite.y = p.y;
sprite.rotation += this.RADIANS_PER_STEP;
});<|fim▁hole|>
var p = new Phaser.Point(this.player.body.x, this.player.body.y);
p.rotate(center.x, center.y, this.DEGREES_PER_STEP, true);
this.player.body.x = p.x;
this.player.body.y = p.y;
this.floorbodies.forEach((body:any)=>{
var p = new Phaser.Point(body.body.x, body.body.y);
p.rotate(center.x, center.y, this.DEGREES_PER_STEP, true);
console.log("Rotate body: " + Object.keys(body));
body.body.x = p.x;
body.body.y = p.y;
body.body.rotation += this.RADIANS_PER_STEP;
}, this);
this.FRAMES_TO_CHANGE_STATE -= 1;
if (this.FRAMES_TO_CHANGE_STATE <= 0){
this.FRAMES_TO_CHANGE_STATE = this.TOTAL_FRAMES_TO_CHANGE_STATE;
this.isCurrentlyRotating = false;
}
}
update(){
this.myInput.update();
this.player.update();
var playerBottomEdge = this.player.sprite.y + (this.player.sprite.height * (1 - this.player.sprite.anchor.y));
this.data.getCurrentDirection().scenery.forEach((s)=>{
var sBottomEdge = s.sprite.y + (s.sprite.height * (1 - s.sprite.anchor.y));
if (playerBottomEdge < sBottomEdge && this.player.sprite.overlap(s.sprite)){
s.sprite.alpha -= this.FADE_AMOUNT;
if (s.sprite.alpha <= this.MAX_FADE_AMOUNT) s.sprite.alpha = this.MAX_FADE_AMOUNT;
} else {
s.sprite.alpha += this.FADE_AMOUNT;
if (s.sprite.alpha >= 1) s.sprite.alpha = 1;
}
});
this.data.getOtherDirections().forEach((dir)=>{dir.scenery.forEach((s)=>{
s.sprite.alpha -= this.FADE_AMOUNT;
if (s.sprite.alpha < 0) s.sprite.alpha = 0;
})});
this.orderedFadingRenderGroup.customSort((a, b)=>{
var aBottomEdge = a.y + (a.height * (1 - a.anchor.y));
var bBottomEdge = b.y + (b.height * (1 - b.anchor.y));
return (aBottomEdge < bBottomEdge ? -1 : 1);
});
if (!this.isCurrentlyRotating && this.myInput.isJustDown(InputType.ACTION)) {
this.isCurrentlyRotating = true;
this.data.rotateRight();
}
if (this.isCurrentlyRotating){
this.changeFacing();
}
}
}<|fim▁end|> | |
<|file_name|>bson_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012, Google Inc. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package bson
import (
"bytes"
"testing"
"time"
"github.com/youtube/vitess/go/bytes2"
)
func TestVariety(t *testing.T) {
in := map[string]string{"Val": "test"}
encoded := VerifyMarshal(t, in)
expected := []byte("\x13\x00\x00\x00\x05Val\x00\x04\x00\x00\x00\x00test\x00")
compare(t, encoded, expected)
out := make(map[string]interface{})
err := Unmarshal(encoded, &out)
if in["Val"] != string(out["Val"].([]byte)) {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out)
}
var out1 string
err = Unmarshal(encoded, &out1)
if out1 != "test" {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out1)
}
var out2 interface{}
err = Unmarshal(encoded, &out2)
if string(out2.(map[string]interface{})["Val"].([]byte)) != "test" {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out2)
}
type mystruct struct {
Val string
}
var out3 mystruct
err = Unmarshal(encoded, &out3)
if out3.Val != "test" {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out3)
}
}
type alltypes struct {
Bytes []byte
Float64 float64
String string
Bool bool
Time time.Time
Int32 int32
Int int
Int64 int64
Uint64 uint64
Strings []string
Nil interface{}
}
func (a *alltypes) UnmarshalBson(buf *bytes.Buffer) {
Next(buf, 4)
kind := NextByte(buf)
for kind != EOO {
key := ReadCString(buf)
switch key {
case "Bytes":
verifyKind("Bytes", Binary, kind)
a.Bytes = DecodeBytes(buf, kind)
case "Float64":
verifyKind("Float64", Number, kind)
a.Float64 = DecodeFloat64(buf, kind)
case "String":
verifyKind("String", Binary, kind)
a.String = DecodeString(buf, kind)
case "Bool":
verifyKind("Bool", Boolean, kind)
a.Bool = DecodeBool(buf, kind)
case "Time":
verifyKind("Time", Datetime, kind)
a.Time = DecodeTime(buf, kind)
case "Int32":
verifyKind("Int32", Int, kind)
a.Int32 = DecodeInt32(buf, kind)
case "Int":
verifyKind("Int", Long, kind)
a.Int = DecodeInt(buf, kind)
case "Int64":
verifyKind("Int64", Long, kind)
a.Int64 = DecodeInt64(buf, kind)
case "Uint64":
verifyKind("Uint64", Ulong, kind)
a.Uint64 = DecodeUint64(buf, kind)
case "Strings":
verifyKind("Strings", Array, kind)
a.Strings = DecodeStringArray(buf, kind)
case "Nil":
verifyKind("Nil", Null, kind)
default:
panic(NewBsonError("Unrecognized tag %s", key))
}
kind = NextByte(buf)
}
}
func verifyKind(tag string, expecting, actual byte) {
if expecting != actual {
panic(NewBsonError("Decode %s: expecting %v, actual %v", tag, expecting, actual))
}
}
// TestCustom tests custom unmarshalling
func TestCustom(t *testing.T) {
a := alltypes{
Bytes: []byte("bytes"),<|fim▁hole|> Time: time.Unix(1136243045, 0),
Int32: int32(-0x80000000),
Int: int(-0x80000000),
Int64: int64(-0x8000000000000000),
Uint64: uint64(0xFFFFFFFFFFFFFFFF),
Strings: []string{"a", "b"},
Nil: nil,
}
encoded := VerifyMarshal(t, a)
var out alltypes
err := Unmarshal(encoded, &out)
if err != nil {
t.Fatalf("unmarshal fail: %v\n", err)
}
if string(out.Bytes) != "bytes" {
t.Errorf("bytes fail: %s", out.Bytes)
}
if out.Float64 != 64 {
t.Error("float fail: %v", out.Float64)
}
if out.String != "string" {
t.Errorf("string fail: %v", out.String)
}
if !out.Bool {
t.Errorf("bool fail: %v", out.Bool)
}
if out.Time.Unix() != 1136243045 {
t.Errorf("time fail: %v", out.Time)
}
if out.Int32 != -0x80000000 {
t.Errorf("int32 fail: %v", out.Int32)
}
if out.Int != -0x80000000 {
t.Errorf("int fail: %v", out.Int)
}
if out.Int64 != -0x8000000000000000 {
t.Errorf("int64 fail: %v", out.Int64)
}
if out.Uint64 != 0xFFFFFFFFFFFFFFFF {
t.Errorf("uint64 fail: %v", out.Uint64)
}
if out.Strings[0] != "a" || out.Strings[1] != "b" {
t.Errorf("strings fail: %v", out.Strings)
}
b := alltypes{Bytes: []byte(""), Strings: []string{"a"}}
encoded = VerifyMarshal(t, b)
var outb alltypes
err = Unmarshal(encoded, &outb)
if err != nil {
t.Fatalf("unmarshal fail: %v\n", err)
}
if outb.Bytes == nil || len(outb.Bytes) != 0 {
t.Errorf("nil bytes fail: %s", outb.Bytes)
}
}
func TestTypes(t *testing.T) {
in := make(map[string]interface{})
in["bytes"] = []byte("bytes")
in["float64"] = float64(64)
in["string"] = "string"
in["bool"] = true
in["time"] = time.Unix(1136243045, 0)
in["int32"] = int32(-0x80000000)
in["int"] = int(-0x80000000)
in["int64"] = int64(-0x8000000000000000)
in["uint"] = uint(0xFFFFFFFF)
in["uint32"] = uint32(0xFFFFFFFF)
in["uint64"] = uint64(0xFFFFFFFFFFFFFFFF)
in["slice"] = []interface{}{1, nil}
in["nil"] = nil
encoded := VerifyMarshal(t, in)
out := make(map[string]interface{})
err := Unmarshal(encoded, &out)
if err != nil {
t.Fatalf("unmarshal fail: %v\n", err)
}
if string(in["bytes"].([]byte)) != "bytes" {
t.Errorf("bytes fail")
}
if out["float64"].(float64) != float64(64) {
t.Errorf("float fail")
}
if string(out["string"].([]byte)) != "string" {
t.Errorf("string fail")
}
if out["bool"].(bool) == false {
t.Errorf("bool fail")
}
tm, ok := out["time"].(time.Time)
if !ok {
t.Errorf("time type failed")
}
if tm.Unix() != 1136243045 {
t.Error("time failed")
}
if v := out["int32"].(int32); v != int32(-0x80000000) {
t.Errorf("int32 fail: %v", v)
}
if v := out["int"].(int64); v != int64(-0x80000000) {
t.Errorf("int fail: %v", v)
}
if v := out["int64"].(int64); v != int64(-0x8000000000000000) {
t.Errorf("int64 fail: %v", v)
}
if v := out["uint"].(uint64); v != uint64(0xFFFFFFFF) {
t.Errorf("uint fail: %v", v)
}
if v := out["uint32"].(uint64); v != uint64(0xFFFFFFFF) {
t.Errorf("uint32 fail: %v", v)
}
if v := out["uint64"].(uint64); v != uint64(0xFFFFFFFFFFFFFFFF) {
t.Errorf("uint64 fail: %v", v)
}
if v := out["slice"].([]interface{})[0].(int64); v != 1 {
t.Errorf("slice fail: %v", v)
}
if v := out["slice"].([]interface{})[1]; v != nil {
t.Errorf("slice fail: %v", v)
}
if nilval, ok := out["nil"]; !ok || nilval != nil {
t.Errorf("nil fail")
}
}
func TestBinary(t *testing.T) {
in := map[string][]byte{"Val": []byte("test")}
encoded := VerifyMarshal(t, in)
expected := []byte("\x13\x00\x00\x00\x05Val\x00\x04\x00\x00\x00\x00test\x00")
compare(t, encoded, expected)
out := make(map[string]interface{})
err := Unmarshal(encoded, &out)
if string(out["Val"].([]byte)) != "test" {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out)
}
var out1 []byte
err = Unmarshal(encoded, &out1)
if string(out1) != "test" {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out1)
}
}
func TestInt(t *testing.T) {
in := map[string]int{"Val": 20}
encoded := VerifyMarshal(t, in)
expected := []byte("\x12\x00\x00\x00\x12Val\x00\x14\x00\x00\x00\x00\x00\x00\x00\x00")
compare(t, encoded, expected)
out := make(map[string]interface{})
err := Unmarshal(encoded, &out)
if out["Val"].(int64) != 20 {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%v\n", err, in, out)
}
var out1 int
err = Unmarshal(encoded, &out1)
if out1 != 20 {
t.Errorf("unmarshal doesn't match input: %v\n%v\n%vn", err, in, out1)
}
}
// test that we are calling the right encoding method
// if we use the reflection code, this will fail as reflection
// cannot access the non-exported field
type PrivateStruct struct {
veryPrivate uint64
}
// an array can use non-pointers for custom marshaler
type PrivateStructList struct {
List []PrivateStruct
}
// the map has to be using pointers, so the custom marshaler is used
type PrivateStructMap struct {
Map map[string]*PrivateStruct
}
func (ps *PrivateStruct) MarshalBson(buf *bytes2.ChunkedWriter) {
lenWriter := NewLenWriter(buf)
EncodePrefix(buf, Long, "Type")
EncodeUint64(buf, ps.veryPrivate)
buf.WriteByte(0)
lenWriter.RecordLen()
}
func TestCustomMarshaler(t *testing.T) {
s := &PrivateStruct{1}
_, err := Marshal(s)
if err != nil {
t.Errorf("Marshal error 1: %s\n", err)
}
sl := &PrivateStructList{make([]PrivateStruct, 1)}
sl.List[0] = *s
_, err = Marshal(sl)
if err != nil {
t.Errorf("Marshal error 2: %s\n", err)
}
sm := &PrivateStructMap{make(map[string]*PrivateStruct)}
sm.Map["first"] = s
_, err = Marshal(sm)
if err != nil {
t.Errorf("Marshal error 3: %s\n", err)
}
}
func VerifyMarshal(t *testing.T, Val interface{}) []byte {
encoded, err := Marshal(Val)
if err != nil {
t.Errorf("marshal2 error: %s\n", err)
}
return encoded
}
type HasPrivate struct {
private string
Public string
}
func TestIgnorePrivateFields(t *testing.T) {
v := HasPrivate{private: "private", Public: "public"}
marshaled := VerifyMarshal(t, v)
unmarshaled := new(HasPrivate)
Unmarshal(marshaled, unmarshaled)
if unmarshaled.Public != "Public" && unmarshaled.private != "" {
t.Errorf("private fields were not ignored: %#v", unmarshaled)
}
}
func compare(t *testing.T, encoded []byte, expected []byte) {
if len(encoded) != len(expected) {
t.Errorf("encoding mismatch:\n%#v\n%#v\n", string(encoded), string(expected))
} else {
for i := range encoded {
if encoded[i] != expected[i] {
t.Errorf("encoding mismatch:\n%#v\n%#v\n", string(encoded), string(expected))
break
}
}
}
}
var testMap map[string]interface{}
var testBlob []byte
func init() {
in := make(map[string]interface{})
in["bytes"] = []byte("bytes")
in["float64"] = float64(64)
in["string"] = "string"
in["bool"] = true
in["time"] = time.Unix(1136243045, 0)
in["int32"] = int32(-0x80000000)
in["int"] = int(-0x80000000)
in["int64"] = int64(-0x8000000000000000)
in["uint"] = uint(0xFFFFFFFF)
in["uint32"] = uint32(0xFFFFFFFF)
in["uint64"] = uint64(0xFFFFFFFFFFFFFFFF)
in["slice"] = []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 15, 16, nil}
in["nil"] = nil
testMap = in
testBlob, _ = Marshal(testMap)
}
func BenchmarkMarshal(b *testing.B) {
for i := 0; i < b.N; i++ {
_, err := Marshal(testMap)
if err != nil {
panic(err)
}
}
}
func BenchmarkUnmarshal(b *testing.B) {
for i := 0; i < b.N; i++ {
err := Unmarshal(testBlob, map[string]interface{}{})
if err != nil {
panic(err)
}
}
}<|fim▁end|> | Float64: float64(64),
String: "string",
Bool: true, |
<|file_name|>buildings.py<|end_file_name|><|fim▁begin|>import blinker
from concurrency.fields import IntegerVersionField
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from game.apps.core.models.planet.models import Planet
from game.utils.models import ResourceContainer
from game.utils.polymorph import PolymorphicBase
from jsonfield import JSONField
import game.apps.core.signals
class Building(PolymorphicBase):
<|fim▁hole|> version = IntegerVersionField()
user = models.ForeignKey(User, related_name="buildings")
def save(self, *args, **kwargs):
signal = blinker.signal(game.apps.core.signals.building % self.id)
signal.send(self, building=self)
super().save(*args, **kwargs)
class Meta:
app_label = 'core'
ordering = ('id', )
class Citadel(Building):
class Meta:
proxy = True
def process_turn(self):
warehouse = self.owner.buildings.filter(type='Warehouse')
warehouse.add_resource("Aluminium", 10)
warehouse.add_resource("Steel", 10)
warehouse.save()
class Warehouse(Building, ResourceContainer):
class Meta:
proxy = True
class Terminal(Building):
class Meta:
proxy = True
class Mine(Building):
class Meta:
proxy = True
#TODO use Django ready()
@receiver(post_save, sender=User, dispatch_uid="create_default_buildings")
def create_default_buildings(sender, **kwargs):
if kwargs['created']:
Citadel.objects.create(user=kwargs['instance'], planet_id=1) # TODO don't hard-code planet id
Warehouse.objects.create(user=kwargs['instance'], planet_id=1) # TODO don't hard-code planet id
def get_base(self):
#TODO cache
return self.buildings.get(type="Base")
User.base = property(get_base)<|fim▁end|> | level = models.IntegerField(default=1)
data = JSONField(default={})
planet = models.ForeignKey(Planet, related_name="buildings")
|
<|file_name|>ping.go<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: Apache-2.0
// Copyright 2017 Authors of Cilium
package cmd
import (
"fmt"
"github.com/spf13/cobra"
)
// pingCmd represents the ping command
var pingCmd = &cobra.Command{
Use: "ping",
Short: "Check whether the cilium-health API is up",
Run: func(cmd *cobra.Command, args []string) {
_, err := client.Restapi.GetHealthz(nil)<|fim▁hole|> Fatalf("Cannot ping: %s\n", err)
}
fmt.Println("OK")
},
}
func init() {
rootCmd.AddCommand(pingCmd)
}<|fim▁end|> | if err != nil { |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>SECRET_KEY = 'spam'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ROOT_URLCONF = 'tests.urls'
INSTALLED_APPS = ['tests']
DATABASES = {'default': {'NAME': 'db.sqlite',
'ENGINE': 'django.db.backends.sqlite3'}}
<|fim▁hole|># Django < 1.8
TEMPLATE_CONTEXT_PROCESSORS = [
'django_settings_export.settings_export'
]
# Django 1.8+
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django_settings_export.settings_export',
],
},
},
]
FOO = 'foo'
BAR = 'bar'
SETTINGS_EXPORT = [
'FOO',
'BAR',
]<|fim▁end|> | |
<|file_name|>unknown_payment_method.py<|end_file_name|><|fim▁begin|>import braintree
from braintree.resource import Resource
class UnknownPaymentMethod(Resource):<|fim▁hole|> def image_url(self):
return "https://assets.braintreegateway.com/payment_method_logo/unknown.png"<|fim▁end|> | |
<|file_name|>testjsbeautifier.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import unittest
import jsbeautifier
class TestJSBeautifier(unittest.TestCase):
def test_unescape(self):
# Test cases contributed by <chrisjshull on GitHub.com>
test_fragment = self.decodesto
bt = self.bt
bt('"\\\\s"'); # == "\\s" in the js source
bt("'\\\\s'"); # == '\\s' in the js source
bt("'\\\\\\s'"); # == '\\\s' in the js source
bt("'\\s'"); # == '\s' in the js source
bt('"•"');
bt('"—"');
bt('"\\x41\\x42\\x43\\x01"', '"\\x41\\x42\\x43\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
#bt('a = /\\x41/','a = /A/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"\\x41\\x42\\x43\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\x22\\x27", \'\\x22\\x27\', "\\x5c", \'\\x5c\', "\\xff and \\xzz", "unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"');
self.options.unescape_strings = True
bt('"\\x41\\x42\\x43\\x01"', '"ABC\\x01"');
bt('"\\u2022"', '"\\u2022"');
bt('a = /\s+/')
bt('"\\u2022";a = /\s+/;"\\x41\\x42\\x43\\x01".match(/\\x41/);','"\\u2022";\na = /\s+/;\n"ABC\\x01".match(/\\x41/);')
bt('"\\x22\\x27",\'\\x22\\x27\',"\\x5c",\'\\x5c\',"\\xff and \\xzz","unicode \\u0000 \\u0022 \\u0027 \\u005c \\uffff \\uzzzz"', '"\\"\'", \'"\\\'\', "\\\\", \'\\\\\', "\\xff and \\xzz", "unicode \\u0000 \\" \' \\\\ \\uffff \\uzzzz"');
self.options.unescape_strings = False
def test_beautifier(self):
test_fragment = self.decodesto
bt = self.bt
bt('');
bt('return .5');
test_fragment(' return .5');
bt('a = 1', 'a = 1');
bt('a=1', 'a = 1');
bt("a();\n\nb();", "a();\n\nb();");
bt('var a = 1 var b = 2', "var a = 1\nvar b = 2");
bt('var a=1, b=c[d], e=6;', 'var a = 1,\n b = c[d],\n e = 6;');
bt('a = " 12345 "');
bt("a = ' 12345 '");
bt('if (a == 1) b = 2;', "if (a == 1) b = 2;");
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('if(1||2);', 'if (1 || 2);');
bt('(a==1)||(b==2)', '(a == 1) || (b == 2)');
bt('var a = 1 if (2) 3;', "var a = 1\nif (2) 3;");
bt('a = a + 1');
bt('a = a == 1');
bt('/12345[^678]*9+/.match(a)');
bt('a /= 5');
bt('a = 0.5 * 3');
bt('a *= 10.55');
bt('a < .5');
bt('a <= .5');
bt('a<.5', 'a < .5');
bt('a<=.5', 'a <= .5');
bt('a = 0xff;');
bt('a=0xff+4', 'a = 0xff + 4');
bt('a = [1, 2, 3, 4]');
bt('F*(g/=f)*g+b', 'F * (g /= f) * g + b');
bt('a.b({c:d})', "a.b({\n c: d\n})");
bt('a.b\n(\n{\nc:\nd\n}\n)', "a.b({\n c: d\n})");
bt('a=!b', 'a = !b');
bt('a?b:c', 'a ? b : c');
bt('a?1:2', 'a ? 1 : 2');
bt('a?(b):c', 'a ? (b) : c');
bt('x={a:1,b:w=="foo"?x:y,c:z}', 'x = {\n a: 1,\n b: w == "foo" ? x : y,\n c: z\n}');
bt('x=a?b?c?d:e:f:g;', 'x = a ? b ? c ? d : e : f : g;');
bt('x=a?b?c?d:{e1:1,e2:2}:f:g;', 'x = a ? b ? c ? d : {\n e1: 1,\n e2: 2\n} : f : g;');
bt('function void(void) {}');
bt('if(!a)foo();', 'if (!a) foo();');
bt('a=~a', 'a = ~a');
bt('a;/*comment*/b;', "a; /*comment*/\nb;");
bt('a;/* comment */b;', "a; /* comment */\nb;");
test_fragment('a;/*\ncomment\n*/b;', "a;\n/*\ncomment\n*/\nb;"); # simple comments don't get touched at all
bt('a;/**\n* javadoc\n*/b;', "a;\n/**\n * javadoc\n */\nb;");
test_fragment('a;/**\n\nno javadoc\n*/b;', "a;\n/**\n\nno javadoc\n*/\nb;");
bt('a;/*\n* javadoc\n*/b;', "a;\n/*\n * javadoc\n */\nb;"); # comment blocks detected and reindented even w/o javadoc starter
bt('if(a)break;', "if (a) break;");
bt('if(a){break}', "if (a) {\n break\n}");
bt('if((a))foo();', 'if ((a)) foo();');
bt('for(var i=0;;) a', 'for (var i = 0;;) a');
bt('for(var i=0;;)\na', 'for (var i = 0;;)\n a');
bt('a++;', 'a++;');
bt('for(;;i++)a()', 'for (;; i++) a()');
bt('for(;;i++)\na()', 'for (;; i++)\n a()');
bt('for(;;++i)a', 'for (;; ++i) a');
bt('return(1)', 'return (1)');
bt('try{a();}catch(b){c();}finally{d();}', "try {\n a();\n} catch (b) {\n c();\n} finally {\n d();\n}");
bt('(xx)()'); # magic function call
bt('a[1]()'); # another magic function call
bt('if(a){b();}else if(c) foo();', "if (a) {\n b();\n} else if (c) foo();");
bt('switch(x) {case 0: case 1: a(); break; default: break}', "switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}', 'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
bt('a !== b');
bt('if (a) b(); else c();', "if (a) b();\nelse c();");
bt("// comment\n(function something() {})"); # typical greasemonkey start
bt("{\n\n x();\n\n}"); # was: duplicating newlines
bt('if (a in b) foo();');
bt('var a, b;');
# bt('var a, b');
bt('{a:1, b:2}', "{\n a: 1,\n b: 2\n}");
bt('a={1:[-1],2:[+1]}', 'a = {\n 1: [-1],\n 2: [+1]\n}');
bt('var l = {\'a\':\'1\', \'b\':\'2\'}', "var l = {\n 'a': '1',\n 'b': '2'\n}");
bt('if (template.user[n] in bk) foo();');
bt('{{}/z/}', "{\n {}\n /z/\n}");
bt('return 45', "return 45");
bt('If[1]', "If[1]");
bt('Then[1]', "Then[1]");
bt('a = 1e10', "a = 1e10");
bt('a = 1.3e10', "a = 1.3e10");
bt('a = 1.3e-10', "a = 1.3e-10");
bt('a = -1.3e-10', "a = -1.3e-10");
bt('a = 1e-10', "a = 1e-10");
bt('a = e - 10', "a = e - 10");
bt('a = 11-10', "a = 11 - 10");
bt("a = 1;// comment", "a = 1; // comment");
bt("a = 1; // comment", "a = 1; // comment");
bt("a = 1;\n // comment", "a = 1;\n// comment");
bt('a = [-1, -1, -1]');
# The exact formatting these should have is open for discussion, but they are at least reasonable
bt('a = [ // comment\n -1, -1, -1\n]');
bt('var a = [ // comment\n -1, -1, -1\n]');
bt('a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('var a = [ // comment\n -1, // comment\n -1, -1\n]');
bt('o = [{a:b},{c:d}]', 'o = [{\n a: b\n}, {\n c: d\n}]');
bt("if (a) {\n do();\n}"); # was: extra space appended
bt("if (a) {\n// comment\n}else{\n// comment\n}", "if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt("if (a) {\n// comment\n// comment\n}", "if (a) {\n // comment\n // comment\n}"); # multiple comments indentation
bt("if (a) b() else c();", "if (a) b()\nelse c();");
bt("if (a) b() else if c() d();", "if (a) b()\nelse if c() d();");
bt("{}");
bt("{\n\n}");
bt("do { a(); } while ( 1 );", "do {\n a();\n} while (1);");
bt("do {} while (1);");
bt("do {\n} while (1);", "do {} while (1);");
bt("do {\n\n} while (1);");
bt("var a = x(a, b, c)");
bt("delete x if (a) b();", "delete x\nif (a) b();");
bt("delete x[x] if (a) b();", "delete x[x]\nif (a) b();");
bt("for(var a=1,b=2)d", "for (var a = 1, b = 2) d");
bt("for(var a=1,b=2,c=3) d", "for (var a = 1, b = 2, c = 3) d");
bt("for(var a=1,b=2,c=3;d<3;d++)\ne", "for (var a = 1, b = 2, c = 3; d < 3; d++)\n e");
bt("function x(){(a||b).c()}", "function x() {\n (a || b).c()\n}");
bt("function x(){return - 1}", "function x() {\n return -1\n}");
bt("function x(){return ! a}", "function x() {\n return !a\n}");
# a common snippet in jQuery plugins
bt("settings = $.extend({},defaults,settings);", "settings = $.extend({}, defaults, settings);");
bt('{xxx;}()', '{\n xxx;\n}()');
bt("a = 'a'\nb = 'b'");
bt("a = /reg/exp");
bt("a = /reg/");
bt('/abc/.test()');
bt('/abc/i.test()');
bt("{/abc/i.test()}", "{\n /abc/i.test()\n}");
bt('var x=(a)/a;', 'var x = (a) / a;');
bt('x != -1', 'x != -1');
bt('for (; s-->0;)t', 'for (; s-- > 0;) t');
bt('for (; s++>0;)u', 'for (; s++ > 0;) u');
bt('a = s++>s--;', 'a = s++ > s--;');
bt('a = s++>--s;', 'a = s++ > --s;');
bt('{x=#1=[]}', '{\n x = #1=[]\n}');
bt('{a:#1={}}', '{\n a: #1={}\n}');
bt('{a:#1#}', '{\n a: #1#\n}');
test_fragment('"incomplete-string');
test_fragment("'incomplete-string");
test_fragment('/incomplete-regex');
test_fragment('{a:1},{a:2}', '{\n a: 1\n}, {\n a: 2\n}');
test_fragment('var ary=[{a:1}, {a:2}];', 'var ary = [{\n a: 1\n}, {\n a: 2\n}];');
test_fragment('{a:#1', '{\n a: #1'); # incomplete
test_fragment('{a:#', '{\n a: #'); # incomplete
test_fragment('}}}', '}\n}\n}'); # incomplete
test_fragment('<!--\nvoid();\n// -->', '<!--\nvoid();\n// -->');
test_fragment('a=/regexp', 'a = /regexp'); # incomplete regexp
bt('{a:#1=[],b:#1#,c:#999999#}', '{\n a: #1=[],\n b: #1#,\n c: #999999#\n}');
bt("a = 1e+2");
bt("a = 1e-2");
bt("do{x()}while(a>1)", "do {\n x()\n} while (a > 1)");
bt("x(); /reg/exp.match(something)", "x();\n/reg/exp.match(something)");
test_fragment("something();(", "something();\n(");
test_fragment("#!she/bangs, she bangs\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\nf=1", "#!she/bangs, she bangs\n\nf = 1");
test_fragment("#!she/bangs, she bangs\n\n/* comment */", "#!she/bangs, she bangs\n\n/* comment */");
test_fragment("#!she/bangs, she bangs\n\n\n/* comment */", "#!she/bangs, she bangs\n\n\n/* comment */");
test_fragment("#", "#");
test_fragment("#!", "#!");
bt("function namespace::something()");
test_fragment("<!--\nsomething();\n-->", "<!--\nsomething();\n-->");
test_fragment("<!--\nif(i<0){bla();}\n-->", "<!--\nif (i < 0) {\n bla();\n}\n-->");
bt('{foo();--bar;}', '{\n foo();\n --bar;\n}');
bt('{foo();++bar;}', '{\n foo();\n ++bar;\n}');
bt('{--bar;}', '{\n --bar;\n}');
bt('{++bar;}', '{\n ++bar;\n}');
# Handling of newlines around unary ++ and -- operators
bt('{foo\n++bar;}', '{\n foo\n ++bar;\n}');
bt('{foo++\nbar;}', '{\n foo++\n bar;\n}');
# This is invalid, but harder to guard against. Issue #203.
bt('{foo\n++\nbar;}', '{\n foo\n ++\n bar;\n}');
# regexps
bt('a(/abc\\/\\/def/);b()', "a(/abc\\/\\/def/);\nb()");
bt('a(/a[b\\[\\]c]d/);b()', "a(/a[b\\[\\]c]d/);\nb()");
test_fragment('a(/a[b\\[', "a(/a[b\\["); # incomplete char class
# allow unescaped / in char classes
bt('a(/[a/b]/);b()', "a(/[a/b]/);\nb()");
bt('a=[[1,2],[4,5],[7,8]]', "a = [\n [1, 2],\n [4, 5],\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n function() {},\n [7, 8]\n]");
bt('a=[[1,2],[4,5],function(){},[7,8]]',
"a = [\n [1, 2],\n [4, 5],\n function() {},\n [7, 8]\n]");
bt('a=[b,c,function(){},function(){},d]',
"a = [b, c,\n function() {},\n function() {},\n d\n]");
bt('a=[a[1],b[4],c[d[7]]]', "a = [a[1], b[4], c[d[7]]]");
bt('[1,2,[3,4,[5,6],7],8]', "[1, 2, [3, 4, [5, 6], 7], 8]");
bt('[[["1","2"],["3","4"]],[["5","6","7"],["8","9","0"]],[["1","2","3"],["4","5","6","7"],["8","9","0"]]]',
'[\n [\n ["1", "2"],\n ["3", "4"]\n ],\n [\n ["5", "6", "7"],\n ["8", "9", "0"]\n ],\n [\n ["1", "2", "3"],\n ["4", "5", "6", "7"],\n ["8", "9", "0"]\n ]\n]');
bt('{[x()[0]];indent;}', '{\n [x()[0]];\n indent;\n}');
bt('return ++i', 'return ++i');
bt('return !!x', 'return !!x');
bt('return !x', 'return !x');
bt('return [1,2]', 'return [1, 2]');
bt('return;', 'return;');
bt('return\nfunc', 'return\nfunc');
bt('catch(e)', 'catch (e)');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},c=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n }, c = 4;');
bt('var a=1,b={foo:2,bar:3},{baz:4,wham:5},\nc=4;',
'var a = 1,\n b = {\n foo: 2,\n bar: 3\n }, {\n baz: 4,\n wham: 5\n },\n c = 4;');
# inline comment
bt('function x(/*int*/ start, /*string*/ foo)', 'function x( /*int*/ start, /*string*/ foo)');
# javadoc comment
bt('/**\n* foo\n*/', '/**\n * foo\n */');
bt('{\n/**\n* foo\n*/\n}', '{\n /**\n * foo\n */\n}');
bt('var a,b,c=1,d,e,f=2;', 'var a, b, c = 1,\n d, e, f = 2;');
bt('var a,b,c=[],d,e,f=2;', 'var a, b, c = [],\n d, e, f = 2;');
bt('function() {\n var a, b, c, d, e = [],\n f;\n}');
bt('do/regexp/;\nwhile(1);', 'do /regexp/;\nwhile (1);'); # hmmm
bt('var a = a,\na;\nb = {\nb\n}', 'var a = a,\n a;\nb = {\n b\n}');
bt('var a = a,\n /* c */\n b;');
bt('var a = a,\n // c\n b;');
bt('foo.("bar");'); # weird element referencing
bt('if (a) a()\nelse b()\nnewline()');
bt('if (a) a()\nnewline()');
bt('a=typeof(x)', 'a = typeof(x)');
bt('var a = function() {\n return null;\n},\n b = false;');
bt('var a = function() {\n func1()\n}');
bt('var a = function() {\n func1()\n}\nvar b = function() {\n func2()\n}');
self.options.jslint_happy = True
bt('x();\n\nfunction(){}', 'x();\n\nfunction () {}');
bt('function () {\n var a, b, c, d, e = [],\n f;\n}');
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\ncase 0:\ncase 1:\n a();\n break;\ndefault:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\ncase -1:\n break;\ncase !y:\n break;\n}');
test_fragment("// comment 1\n(function()", "// comment 1\n(function ()"); # typical greasemonkey start
bt('var o1=$.extend(a);function(){alert(x);}', 'var o1 = $.extend(a);\n\nfunction () {\n alert(x);\n}');
bt('a=typeof(x)', 'a = typeof (x)');
self.options.jslint_happy = False
bt('switch(x) {case 0: case 1: a(); break; default: break}',
"switch (x) {\n case 0:\n case 1:\n a();\n break;\n default:\n break\n}");
bt('switch(x){case -1:break;case !y:break;}',
'switch (x) {\n case -1:\n break;\n case !y:\n break;\n}');
test_fragment("// comment 2\n(function()", "// comment 2\n(function()"); # typical greasemonkey start
bt("var a2, b2, c2, d2 = 0, c = function() {}, d = '';", "var a2, b2, c2, d2 = 0,\n c = function() {}, d = '';");
bt("var a2, b2, c2, d2 = 0, c = function() {},\nd = '';", "var a2, b2, c2, d2 = 0,\n c = function() {},\n d = '';");
bt('var o2=$.extend(a);function(){alert(x);}', 'var o2 = $.extend(a);\n\nfunction() {\n alert(x);\n}');
bt('{"x":[{"a":1,"b":3},7,8,8,8,8,{"b":99},{"a":11}]}', '{\n "x": [{\n "a": 1,\n "b": 3\n },\n 7, 8, 8, 8, 8, {\n "b": 99\n }, {\n "a": 11\n }\n ]\n}');
bt('{"1":{"1a":"1b"},"2"}', '{\n "1": {\n "1a": "1b"\n },\n "2"\n}');
bt('{a:{a:b},c}', '{\n a: {\n a: b\n },\n c\n}');
bt('{[y[a]];keep_indent;}', '{\n [y[a]];\n keep_indent;\n}');
bt('if (x) {y} else { if (x) {y}}', 'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (foo) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one()\ntwo()\nthree()');
bt('if (1 + foo() && bar(baz()) / 2) one();\ntwo();\nthree();');
self.options.indent_size = 1;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
bt('var a,b=1,c=2', 'var a, b = 1,\n c = 2');
self.options.indent_size = 4;
self.options.indent_char = ' ';
bt('{ one_char() }', "{\n one_char()\n}");
self.options.indent_size = 1;
self.options.indent_char = "\t";
bt('{ one_char() }', "{\n\tone_char()\n}");
bt('x = a ? b : c; x;', 'x = a ? b : c;\nx;');
self.options.indent_size = 4;
self.options.indent_char = ' ';
self.options.preserve_newlines = False;
bt('var\na=dont_preserve_newlines;', 'var a = dont_preserve_newlines;');
# make sure the blank line between function definitions stays
# even when preserve_newlines = False
bt('function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}');
bt('function foo() {\n return 1;\n}\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
bt('function foo() {\n return 1;\n}\n\n\nfunction foo() {\n return 1;\n}',
'function foo() {\n return 1;\n}\n\nfunction foo() {\n return 1;\n}'
);
self.options.preserve_newlines = True;
bt('var\na=do_preserve_newlines;', 'var\na = do_preserve_newlines;')
bt('// a\n// b\n\n// c\n// d')
bt('if (foo) // comment\n{\n bar();\n}')
self.options.keep_array_indentation = False;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']",
"a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i'\n]");
bt('var x = [{}\n]', 'var x = [{}]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n}]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);",
"a = ['something',\n 'completely',\n 'different'\n];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([\n [a1]\n ], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n[\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = True;
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt("a = ['a', 'b', 'c',\n 'd', 'e', 'f',\n 'g', 'h', 'i']");
bt('var x = [{}\n]', 'var x = [{}\n]');
bt('var x = [{foo:bar}\n]', 'var x = [{\n foo: bar\n }\n]');
bt("a = ['something',\n 'completely',\n 'different'];\nif (x);");
bt("a = ['a','b','c']", "a = ['a', 'b', 'c']");
bt("a = ['a', 'b','c']", "a = ['a', 'b', 'c']");
bt("x = [{'a':0}]",
"x = [{\n 'a': 0\n}]");
bt('{a([[a1]], {b;});}',
'{\n a([[a1]], {\n b;\n });\n}');
bt("a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();",
"a();\n [\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ].toString();");
bt("function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}",
"function() {\n Foo([\n ['sdfsdfsd'],\n ['sdfsdfsdf']\n ]);\n}");
self.options.keep_array_indentation = False;
bt('a = //comment\n/regex/;');
test_fragment('/*\n * X\n */');
test_fragment('/*\r\n * X\r\n */', '/*\n * X\n */');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
bt('var a = new function();');
test_fragment('new function');
self.options.brace_style = 'expand';
bt('//case 1\nif (a == 1)\n{}\n//case 2\nelse if (a == 2)\n{}');
bt('if(1){2}else{3}', "if (1)\n{\n 2\n}\nelse\n{\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try\n{\n a();\n}\ncatch (b)\n{\n c();\n}\ncatch (d)\n{}\nfinally\n{\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a)\n{\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a)\n{\n // comment\n}\nelse\n{\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x)\n{\n y\n}\nelse\n{\n if (x)\n {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo)\n {\n bar();\n }');
bt('if (foo)\n{}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a)\n{\n b;\n}\nelse\n{\n c;\n}');
test_fragment('if (foo) {', 'if (foo)\n{');
test_fragment('foo {', 'foo\n{');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return;\n{');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x()\n{\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a()\n {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo(\n {\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i)\n{\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i)\n{\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n} else {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n} catch (b) {\n c();\n} catch (d) {} finally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n} else if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n} else {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n} else {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n} else {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {} else /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n} else {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = "end-expand";
bt('//case 1\nif (a == 1) {}\n//case 2\nelse if (a == 2) {}');
bt('if(1){2}else{3}', "if (1) {\n 2\n}\nelse {\n 3\n}");
bt('try{a();}catch(b){c();}catch(d){}finally{e();}',
"try {\n a();\n}\ncatch (b) {\n c();\n}\ncatch (d) {}\nfinally {\n e();\n}");
bt('if(a){b();}else if(c) foo();',
"if (a) {\n b();\n}\nelse if (c) foo();");
bt("if (a) {\n// comment\n}else{\n// comment\n}",
"if (a) {\n // comment\n}\nelse {\n // comment\n}"); # if/else statement with empty body
bt('if (x) {y} else { if (x) {y}}',
'if (x) {\n y\n}\nelse {\n if (x) {\n y\n }\n}');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}',
'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment(' /*\n* xx\n*/\n// xx\nif (foo) {\n bar();\n}',
' /*\n * xx\n */\n // xx\n if (foo) {\n bar();\n }');
bt('if (foo) {}\nelse /regex/.test();');
bt('if (foo) /regex/.test();');
bt('if (a)\n{\nb;\n}\nelse\n{\nc;\n}', 'if (a) {\n b;\n}\nelse {\n c;\n}');
test_fragment('if (foo) {', 'if (foo) {');
test_fragment('foo {', 'foo {');
test_fragment('return {', 'return {'); # return needs the brace.
test_fragment('return /* inline */ {', 'return /* inline */ {');
# test_fragment('return\n{', 'return\n{'); # can't support this?, but that's an improbable and extreme case anyway.
test_fragment('return;\n{', 'return; {');
bt("throw {}");
bt("throw {\n foo;\n}");
bt('var foo = {}');
bt('if (foo) bar();\nelse break');
bt('function x() {\n foo();\n}zzz', 'function x() {\n foo();\n}\nzzz');
bt('a: do {} while (); xxx', 'a: do {} while ();\nxxx');
bt('var a = new function();');
bt('var a = new function() {};');
bt('var a = new function a() {};');
test_fragment('new function');
bt("foo({\n 'a': 1\n},\n10);",
"foo({\n 'a': 1\n },\n 10);");
bt('(["foo","bar"]).each(function(i) {return i;});',
'(["foo", "bar"]).each(function(i) {\n return i;\n});');
bt('(function(i) {return i;})();',
'(function(i) {\n return i;\n})();');
bt( "test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test(\n" +
"/*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
"},\n" +
"/*Argument 2\n" +
" */ {\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test(\n" +
" /*Argument 1*/\n" +
" {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
bt( "test( /*Argument 1*/\n" +
"{\n" +
" 'Value1': '1'\n" +
"}, /*Argument 2\n" +
" */\n" +
"{\n" +
" 'Value2': '2'\n" +
"});",
# expected
"test( /*Argument 1*/ {\n" +
" 'Value1': '1'\n" +
" },\n" +
" /*Argument 2\n" +
" */\n" +
" {\n" +
" 'Value2': '2'\n" +
" });");
self.options.brace_style = 'collapse';
bt('a = <?= external() ?> ;'); # not the most perfect thing in the world, but you're the weirdo beaufifying php mix-ins with javascript beautifier
bt('a = <%= external() %> ;');
test_fragment('roo = {\n /*\n ****\n FOO\n ****\n */\n BAR: 0\n};');
test_fragment("if (zz) {\n // ....\n}\n(function");
self.options.preserve_newlines = True;
bt('var a = 42; // foo\n\nvar b;')
bt('var a = 42; // foo\n\n\nvar b;')
bt("var a = 'foo' +\n 'bar';");
bt("var a = \"foo\" +\n \"bar\";");
bt('"foo""bar""baz"', '"foo"\n"bar"\n"baz"')
bt("'foo''bar''baz'", "'foo'\n'bar'\n'baz'")
bt("{\n get foo() {}\n}")
bt("{\n var a = get\n foo();\n}")
bt("{\n set foo() {}\n}")
bt("{\n var a = set\n foo();\n}")
bt("var x = {\n get function()\n}")
bt("var x = {\n set function()\n}")
bt("var x = set\n\nfunction() {}", "var x = set\n\n function() {}")
bt('<!-- foo\nbar();\n-->')
bt('<!-- dont crash')
bt('for () /abc/.test()')
bt('if (k) /aaa/m.test(v) && l();')
bt('switch (true) {\n case /swf/i.test(foo):\n bar();\n}')
bt('createdAt = {\n type: Date,\n default: Date.now\n}')
bt('switch (createdAt) {\n case a:\n Date,\n default:\n Date.now\n}')
bt('return function();')
bt('var a = function();')
bt('var a = 5 + function();')
bt('{\n foo // something\n ,\n bar // something\n baz\n}')
bt('function a(a) {} function b(b) {} function c(c) {}', 'function a(a) {}\n\nfunction b(b) {}\n\nfunction c(c) {}')
bt('3.*7;', '3. * 7;')
bt('import foo.*;', 'import foo.*;') # actionscript's import
test_fragment('function f(a: a, b: b)') # actionscript
bt('foo(a, function() {})');
bt('foo(a, /regex/)');
bt('/* foo */\n"x"');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar().baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar().baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat);\nfoo.bar().baz().cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz().cucumber(fat)\nfoo.bar().baz().cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz().cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = False
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo.bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this.something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this.something.xxx = foo.moo.bar()');
self.options.break_chained_methods = True
self.options.preserve_newlines = True
bt('foo\n.bar()\n.baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat); foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat);\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('foo\n.bar()\n.baz().cucumber(fat)\n foo.bar().baz().cucumber(fat)', 'foo\n .bar()\n .baz()\n .cucumber(fat)\nfoo.bar()\n .baz()\n .cucumber(fat)');
bt('this\n.something = foo.bar()\n.baz().cucumber(fat)', 'this\n .something = foo.bar()\n .baz()\n .cucumber(fat)');
bt('this.something.xxx = foo.moo.bar()');
bt('this\n.something\n.xxx = foo.moo\n.bar()', 'this\n .something\n .xxx = foo.moo\n .bar()');
self.options.break_chained_methods = False
self.options.preserve_newlines = False
self.options.preserve_newlines = False
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();')
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_.okay();\n' +
'}');
self.options.preserve_newlines = True
self.options.wrap_line_length = 0
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 70
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 40
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat &&\n' +
' "sassy") || (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 41
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
'if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();',
# expected #
'foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
'Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
'if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();');
self.options.wrap_line_length = 45
# NOTE: wrap is only best effort - line continues until next wrap point is found.
#..............---------1---------2---------3---------4---------5---------6---------7
#..............1234567890123456789012345678901234567890123456789012345678901234567890
test_fragment('{\n' +
' foo.bar().baz().cucumber((fat && "sassy") || (leans\n&& mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n.but_this_can\n' +
' if (wraps_can_occur && inside_an_if_block) that_is_\n.okay();\n' +
'}',
# expected #
'{\n' +
' foo.bar().baz().cucumber((fat && "sassy") ||\n' +
' (leans && mean));\n' +
' Test_very_long_variable_name_this_should_never_wrap\n' +
' .but_this_can\n' +
' if (wraps_can_occur &&\n' +
' inside_an_if_block) that_is_\n' +
' .okay();\n' +
'}');
self.options.wrap_line_length = 0
self.options.preserve_newlines = False
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');',
'this.oa = new OAuth(_requestToken, _accessToken, consumer_key);');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
#bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz) whee();\n else a();');
bt('if (foo)\nbar();\nelse\ncar();',
'if (foo) bar();\nelse car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];',
'if (options)\n'+
' for (var p in options) this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c, d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a) b();");
bt('var a =\nfoo', 'var a = foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/ "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/ b;');
bt('{\n\n\n"x"\n}', '{\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a && b || c || d && e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a && (b || c || d) && e) e = f');
test_fragment('\n\n"x"', '"x"');
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\nb = 2;');
self.options.preserve_newlines = True
bt('if (foo) // comment\n bar();');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n (bar());');
bt('if (foo) // comment\n /asdf/;');
bt('this.oa = new OAuth(\n' +
' _requestToken,\n' +
' _accessToken,\n' +
' consumer_key\n' +
');');
bt('foo = {\n x: y, // #44\n w: z // #44\n}');
bt('switch (x) {\n case "a":\n // comment on newline\n break;\n case "b": // comment on same line\n break;\n}');
# these aren't ready yet.
# bt('if (foo) // comment\n bar() /*i*/ + baz() /*j\n*/ + asdf();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\na();');
bt('if\n(foo)\nif\n(bar)\nif\n(baz)\nwhee();\nelse\na();',
'if (foo)\n if (bar)\n if (baz)\n whee();\n else\n a();');
bt('if (foo) bar();\nelse\ncar();',
'if (foo) bar();\nelse\n car();');
bt('if (foo) if (bar) if (baz);\na();',
'if (foo)\n if (bar)\n if (baz);\na();');
bt('if (foo) if (bar) if (baz) whee();\na();',
'if (foo)\n if (bar)\n if (baz) whee();\na();');
bt('if (foo) a()\nif (bar) if (baz) whee();\na();',
'if (foo) a()\nif (bar)\n if (baz) whee();\na();');
bt('if (foo);\nif (bar) if (baz) whee();\na();',
'if (foo);\nif (bar)\n if (baz) whee();\na();');
bt('if (options)\n' +
' for (var p in options)\n' +
' this[p] = options[p];');
bt('if (options) for (var p in options) this[p] = options[p];',
'if (options)\n for (var p in options) this[p] = options[p];');
bt('if (options) do q(); while (b());',
'if (options)\n do q(); while (b());');
bt('if (options) do; while (b());',
'if (options)\n do; while (b());');
bt('if (options) while (b()) q();',
'if (options)\n while (b()) q();');
bt('if (options) do while (b()) q(); while (a());',
'if (options)\n do\n while (b()) q(); while (a());');
bt('function f(a, b, c,\nd, e) {}',
'function f(a, b, c,\n d, e) {}');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('function f(a,b) {if(a) b()}\n\n\n\nfunction g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\n\n\nfunction g(a, b) {\n if (!a) b()\n}');
# This is not valid syntax, but still want to behave reasonably and not side-effect
bt('(if(a) b())(if(a) b())',
'(\n if (a) b())(\n if (a) b())');
bt('(if(a) b())\n\n\n(if(a) b())',
'(\n if (a) b())\n\n\n(\n if (a) b())');
bt("if\n(a)\nb();", "if (a)\n b();");
bt('var a =\nfoo', 'var a =\n foo');
bt('var a = {\n"a":1,\n"b":2}', "var a = {\n \"a\": 1,\n \"b\": 2\n}");
bt("var a = {\n'a':1,\n'b':2}", "var a = {\n 'a': 1,\n 'b': 2\n}");
bt('var a = /*i*/ "b";');
bt('var a = /*i*/\n"b";', 'var a = /*i*/\n "b";');
bt('var a = /*i*/\nb;', 'var a = /*i*/\n b;');
bt('{\n\n\n"x"\n}', '{\n\n\n "x"\n}');
bt('if(a &&\nb\n||\nc\n||d\n&&\ne) e = f', 'if (a &&\n b ||\n c || d &&\n e) e = f');
bt('if(a &&\n(b\n||\nc\n||d)\n&&\ne) e = f', 'if (a &&\n (b ||\n c || d) &&\n e) e = f');
test_fragment('\n\n"x"', '"x"');
# this beavior differs between js and python, defaults to unlimited in js, 10 in python
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\n\n\nb = 2;');
self.options.max_preserve_newlines = 8;
bt('a = 1;\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\nb = 2;',
'a = 1;\n\n\n\n\n\n\n\nb = 2;');
# Test the option to have spaces within parens
self.options.space_in_paren = False
bt('if(p) foo(a,b)', 'if (p) foo(a, b)');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while (true) {\n willThrow()\n }\n} catch (result) switch (result) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '((e / ((a + (b) * c) - d)) ^ 2) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f(a, b) {\n if (a) b()\n}\n\nfunction g(a, b) {\n if (!a) b()\n}');
bt('a=[];',
'a = [];');
bt('a=[b,c,d];',
'a = [b, c, d];');
bt('a= f[b];',
'a = f[b];');
self.options.space_in_paren = True
bt('if(p) foo(a,b)', 'if ( p ) foo( a, b )');
bt('try{while(true){willThrow()}}catch(result)switch(result){case 1:++result }',
'try {\n while ( true ) {\n willThrow( )\n }\n} catch ( result ) switch ( result ) {\n case 1:\n ++result\n}');
bt('((e/((a+(b)*c)-d))^2)*5;', '( ( e / ( ( a + ( b ) * c ) - d ) ) ^ 2 ) * 5;');
bt('function f(a,b) {if(a) b()}function g(a,b) {if(!a) b()}',
'function f( a, b ) {\n if ( a ) b( )\n}\n\nfunction g( a, b ) {\n if ( !a ) b( )\n}');
bt('a=[ ];',
'a = [ ];');
bt('a=[b,c,d];',
'a = [ b, c, d ];');
bt('a= f[b];',
'a = f[ b ];');
self.options.space_in_paren = False
# Test that e4x literals passed through when e4x-option is enabled
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = < a b = "c" > < d / > < e >\n foo < /e>x</a > ;');
self.options.e4x = True
bt('xml=<a b="c"><d/><e>\n foo</e>x</a>;', 'xml = <a b="c"><d/><e>\n foo</e>x</a>;');
bt('<a b=\'This is a quoted "c".\'/>', '<a b=\'This is a quoted "c".\'/>');
bt('<a b="This is a quoted \'c\'."/>', '<a b="This is a quoted \'c\'."/>');
bt('<a b="A quote \' inside string."/>', '<a b="A quote \' inside string."/>');
bt('<a b=\'A quote " inside string.\'/>', '<a b=\'A quote " inside string.\'/>');
bt('<a b=\'Some """ quotes "" inside string.\'/>', '<a b=\'Some """ quotes "" inside string.\'/>');
# Handles inline expressions
bt('xml=<{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;', 'xml = <{a} b="c"><d/><e v={z}>\n foo</e>x</{a}>;');
# Handles CDATA
bt('xml=<a b="c"><![CDATA[d/>\n</a></{}]]></a>;', 'xml = <a b="c"><![CDATA[d/>\n</a></{}]]></a>;');
bt('xml=<![CDATA[]]>;', 'xml = <![CDATA[]]>;');
bt('xml=<![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;', 'xml = <![CDATA[ b="c"><d/><e v={z}>\n foo</e>x/]]>;');
# Handles messed up tags, as long as it isn't the same name
# as the root tag. Also handles tags of same name as root tag
# as long as nesting matches.
bt('xml=<a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;',
'xml = <a x="jn"><c></b></f><a><d jnj="jnn"><f></a ></nj></a>;');
# If xml is not terminated, the remainder of the file is treated
# as part of the xml-literal (passed through unaltered)
test_fragment('xml=<a></b>\nc<b;', 'xml = <a></b>\nc<b;');
self.options.e4x = False
# START tests for issue 241
bt('obj\n' +
' .last({\n' +
' foo: 1,\n' +
' bar: 2\n' +
' });\n' +
'var test = 1;');
bt('obj\n' +
' .last(a, function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('obj.first()\n' +
' .second()\n' +
' .last(function(err, response) {\n' +
' console.log(err);\n' +
' });');
# END tests for issue 241
# START tests for issue 268 and 275
bt('obj.last(a, function() {\n' +
' var test;\n' +
'});\n' +
'var test = 1;');
bt('obj.last(a,\n' +
' function() {\n' +
' var test;\n' +
' });\n' +
'var test = 1;');
bt('(function() {if (!window.FOO) window.FOO || (window.FOO = function() {var b = {bar: "zort"};});})();',
'(function() {\n' +
' if (!window.FOO) window.FOO || (window.FOO = function() {\n' +
' var b = {\n' +
' bar: "zort"\n' +
' };\n' +
' });\n' +
'})();');
# END tests for issue 268 and 275
# START tests for issue 281
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
'], function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +
' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' +
' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
'});');
bt('define(["dojo/_base/declare", "my/Employee", "dijit/form/Button",\n' +
' "dojo/_base/lang", "dojo/Deferred"\n' +
' ],\n' +
' function(declare, Employee, Button, lang, Deferred) {\n' +
' return declare(Employee, {\n' +
' constructor: function() {\n' +
' new Button({\n' +
' onClick: lang.hitch(this, function() {\n' +<|fim▁hole|> ' }));\n' +
' })\n' +
' });\n' +
' }\n' +
' });\n' +
' });');
# END tests for issue 281
# This is what I think these should look like related #256
# we don't have the ability yet
#bt('var a=1,b={bang:2},c=3;',
# 'var a = 1,\n b = {\n bang: 2\n },\n c = 3;');
#bt('var a={bing:1},b=2,c=3;',
# 'var a = {\n bing: 1\n },\n b = 2,\n c = 3;');
def decodesto(self, input, expectation=None):
self.assertEqual(
jsbeautifier.beautify(input, self.options), expectation or input)
# if the expected is different from input, run it again
# expected output should be unchanged when run twice.
if not expectation == None:
self.assertEqual(
jsbeautifier.beautify(expectation, self.options), expectation)
def wrap(self, text):
return self.wrapregex.sub(' \\1', text)
def bt(self, input, expectation=None):
expectation = expectation or input
self.decodesto(input, expectation)
if self.options.indent_size == 4 and input:
wrapped_input = '{\n%s\nfoo=bar;}' % self.wrap(input)
wrapped_expect = '{\n%s\n foo = bar;\n}' % self.wrap(expectation)
self.decodesto(wrapped_input, wrapped_expect)
@classmethod
def setUpClass(cls):
options = jsbeautifier.default_options()
options.indent_size = 4
options.indent_char = ' '
options.preserve_newlines = True
options.jslint_happy = False
options.keep_array_indentation = False
options.brace_style = 'collapse'
options.indent_level = 0
options.break_chained_methods = False
cls.options = options
cls.wrapregex = re.compile('^(.+)$', re.MULTILINE)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | ' new Deferred().then(lang.hitch(this, function() {\n' +
' this.salary * 0.25;\n' + |
<|file_name|>bootstrap.js<|end_file_name|><|fim▁begin|>/*!
* Bootstrap v3.3.5 (http://getbootstrap.com)
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
*/
/*!
* Generated using the Bootstrap Customizer (http://getbootstrap.com/customize/?id=8a41afce6c65a5315ae2)
* Config saved to config.json and https://gist.github.com/8a41afce6c65a5315ae2
*/
if (typeof jQuery === 'undefined') {
throw new Error('Bootstrap\'s JavaScript requires jQuery')
}
+function ($) {
'use strict';
var version = $.fn.jquery.split(' ')[0].split('.')
if ((version[0] < 2 && version[1] < 9) || (version[0] == 1 && version[1] == 9 && version[2] < 1)) {
throw new Error('Bootstrap\'s JavaScript requires jQuery version 1.9.1 or higher')
}
}(jQuery);
/* ========================================================================
* Bootstrap: alert.js v3.3.5
* http://getbootstrap.com/javascript/#alerts
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// ALERT CLASS DEFINITION
// ======================
var dismiss = '[data-dismiss="alert"]'
var Alert = function (el) {
$(el).on('click', dismiss, this.close)
}
Alert.VERSION = '3.3.5'
Alert.TRANSITION_DURATION = 150
Alert.prototype.close = function (e) {
var $this = $(this)
var selector = $this.attr('data-target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
var $parent = $(selector)
if (e) e.preventDefault()
if (!$parent.length) {
$parent = $this.closest('.alert')
}
$parent.trigger(e = $.Event('close.bs.alert'))
if (e.isDefaultPrevented()) return
$parent.removeClass('in')
function removeElement() {
// detach from parent, fire event then clean up data
$parent.detach().trigger('closed.bs.alert').remove()
}
$.support.transition && $parent.hasClass('fade') ?
$parent
.one('bsTransitionEnd', removeElement)
.emulateTransitionEnd(Alert.TRANSITION_DURATION) :
removeElement()
}
// ALERT PLUGIN DEFINITION
// =======================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.alert')
if (!data) $this.data('bs.alert', (data = new Alert(this)))
if (typeof option == 'string') data[option].call($this)
})
}
var old = $.fn.alert
$.fn.alert = Plugin
$.fn.alert.Constructor = Alert
// ALERT NO CONFLICT
// =================
$.fn.alert.noConflict = function () {
$.fn.alert = old
return this
}
// ALERT DATA-API
// ==============
$(document).on('click.bs.alert.data-api', dismiss, Alert.prototype.close)
}(jQuery);
/* ========================================================================
* Bootstrap: button.js v3.3.5
* http://getbootstrap.com/javascript/#buttons
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// BUTTON PUBLIC CLASS DEFINITION
// ==============================
var Button = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Button.DEFAULTS, options)
this.isLoading = false
}
Button.VERSION = '3.3.5'
Button.DEFAULTS = {
loadingText: 'loading...'
}
Button.prototype.setState = function (state) {
var d = 'disabled'
var $el = this.$element
var val = $el.is('input') ? 'val' : 'html'
var data = $el.data()
state += 'Text'
if (data.resetText == null) $el.data('resetText', $el[val]())
// push to event loop to allow forms to submit
setTimeout($.proxy(function () {
$el[val](data[state] == null ? this.options[state] : data[state])
if (state == 'loadingText') {
this.isLoading = true
$el.addClass(d).attr(d, d)
} else if (this.isLoading) {
this.isLoading = false
$el.removeClass(d).removeAttr(d)
}
}, this), 0)
}
Button.prototype.toggle = function () {
var changed = true
var $parent = this.$element.closest('[data-toggle="buttons"]')
if ($parent.length) {
var $input = this.$element.find('input')
if ($input.prop('type') == 'radio') {
if ($input.prop('checked')) changed = false
$parent.find('.active').removeClass('active')
this.$element.addClass('active')
} else if ($input.prop('type') == 'checkbox') {
if (($input.prop('checked')) !== this.$element.hasClass('active')) changed = false
this.$element.toggleClass('active')
}
$input.prop('checked', this.$element.hasClass('active'))
if (changed) $input.trigger('change')
} else {
this.$element.attr('aria-pressed', !this.$element.hasClass('active'))
this.$element.toggleClass('active')
}
}
// BUTTON PLUGIN DEFINITION
// ========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.button')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.button', (data = new Button(this, options)))
if (option == 'toggle') data.toggle()
else if (option) data.setState(option)
})
}
var old = $.fn.button
$.fn.button = Plugin
$.fn.button.Constructor = Button
// BUTTON NO CONFLICT
// ==================
$.fn.button.noConflict = function () {
$.fn.button = old
return this
}
// BUTTON DATA-API
// ===============
$(document)
.on('click.bs.button.data-api', '[data-toggle^="button"]', function (e) {
var $btn = $(e.target)
if (!$btn.hasClass('btn')) $btn = $btn.closest('.btn')
Plugin.call($btn, 'toggle')
if (!($(e.target).is('input[type="radio"]') || $(e.target).is('input[type="checkbox"]'))) e.preventDefault()
})
.on('focus.bs.button.data-api blur.bs.button.data-api', '[data-toggle^="button"]', function (e) {
$(e.target).closest('.btn').toggleClass('focus', /^focus(in)?$/.test(e.type))
})
}(jQuery);
/* ========================================================================
* Bootstrap: carousel.js v3.3.5
* http://getbootstrap.com/javascript/#carousel
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CAROUSEL CLASS DEFINITION
// =========================
var Carousel = function (element, options) {
this.$element = $(element)
this.$indicators = this.$element.find('.carousel-indicators')
this.options = options
this.paused = null
this.sliding = null
this.interval = null
this.$active = null
this.$items = null
this.options.keyboard && this.$element.on('keydown.bs.carousel', $.proxy(this.keydown, this))
this.options.pause == 'hover' && !('ontouchstart' in document.documentElement) && this.$element
.on('mouseenter.bs.carousel', $.proxy(this.pause, this))
.on('mouseleave.bs.carousel', $.proxy(this.cycle, this))
}
Carousel.VERSION = '3.3.5'
Carousel.TRANSITION_DURATION = 600
Carousel.DEFAULTS = {
interval: 5000,
pause: 'hover',
wrap: true,
keyboard: true
}
Carousel.prototype.keydown = function (e) {
if (/input|textarea/i.test(e.target.tagName)) return
switch (e.which) {
case 37: this.prev(); break
case 39: this.next(); break
default: return
}
e.preventDefault()
}
Carousel.prototype.cycle = function (e) {
e || (this.paused = false)
this.interval && clearInterval(this.interval)
this.options.interval
&& !this.paused
&& (this.interval = setInterval($.proxy(this.next, this), this.options.interval))
return this
}
Carousel.prototype.getItemIndex = function (item) {
this.$items = item.parent().children('.item')
return this.$items.index(item || this.$active)
}
Carousel.prototype.getItemForDirection = function (direction, active) {
var activeIndex = this.getItemIndex(active)
var willWrap = (direction == 'prev' && activeIndex === 0)
|| (direction == 'next' && activeIndex == (this.$items.length - 1))
if (willWrap && !this.options.wrap) return active
var delta = direction == 'prev' ? -1 : 1
var itemIndex = (activeIndex + delta) % this.$items.length
return this.$items.eq(itemIndex)
}
Carousel.prototype.to = function (pos) {
var that = this
var activeIndex = this.getItemIndex(this.$active = this.$element.find('.item.active'))
if (pos > (this.$items.length - 1) || pos < 0) return
if (this.sliding) return this.$element.one('slid.bs.carousel', function () { that.to(pos) }) // yes, "slid"
if (activeIndex == pos) return this.pause().cycle()
return this.slide(pos > activeIndex ? 'next' : 'prev', this.$items.eq(pos))
}
Carousel.prototype.pause = function (e) {
e || (this.paused = true)
if (this.$element.find('.next, .prev').length && $.support.transition) {
this.$element.trigger($.support.transition.end)
this.cycle(true)
}
this.interval = clearInterval(this.interval)
return this
}
Carousel.prototype.next = function () {
if (this.sliding) return
return this.slide('next')
}
Carousel.prototype.prev = function () {
if (this.sliding) return
return this.slide('prev')
}
Carousel.prototype.slide = function (type, next) {
var $active = this.$element.find('.item.active')
var $next = next || this.getItemForDirection(type, $active)
var isCycling = this.interval
var direction = type == 'next' ? 'left' : 'right'
var that = this
if ($next.hasClass('active')) return (this.sliding = false)
var relatedTarget = $next[0]
var slideEvent = $.Event('slide.bs.carousel', {
relatedTarget: relatedTarget,
direction: direction
})
this.$element.trigger(slideEvent)
if (slideEvent.isDefaultPrevented()) return
this.sliding = true
isCycling && this.pause()
if (this.$indicators.length) {
this.$indicators.find('.active').removeClass('active')
var $nextIndicator = $(this.$indicators.children()[this.getItemIndex($next)])
$nextIndicator && $nextIndicator.addClass('active')
}
var slidEvent = $.Event('slid.bs.carousel', { relatedTarget: relatedTarget, direction: direction }) // yes, "slid"
if ($.support.transition && this.$element.hasClass('slide')) {
$next.addClass(type)
$next[0].offsetWidth // force reflow
$active.addClass(direction)
$next.addClass(direction)
$active
.one('bsTransitionEnd', function () {
$next.removeClass([type, direction].join(' ')).addClass('active')
$active.removeClass(['active', direction].join(' '))
that.sliding = false
setTimeout(function () {
that.$element.trigger(slidEvent)
}, 0)
})
.emulateTransitionEnd(Carousel.TRANSITION_DURATION)
} else {
$active.removeClass('active')
$next.addClass('active')
this.sliding = false
this.$element.trigger(slidEvent)
}
isCycling && this.cycle()
return this
}
// CAROUSEL PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)<|fim▁hole|> var options = $.extend({}, Carousel.DEFAULTS, $this.data(), typeof option == 'object' && option)
var action = typeof option == 'string' ? option : options.slide
if (!data) $this.data('bs.carousel', (data = new Carousel(this, options)))
if (typeof option == 'number') data.to(option)
else if (action) data[action]()
else if (options.interval) data.pause().cycle()
})
}
var old = $.fn.carousel
$.fn.carousel = Plugin
$.fn.carousel.Constructor = Carousel
// CAROUSEL NO CONFLICT
// ====================
$.fn.carousel.noConflict = function () {
$.fn.carousel = old
return this
}
// CAROUSEL DATA-API
// =================
var clickHandler = function (e) {
var href
var $this = $(this)
var $target = $($this.attr('data-target') || (href = $this.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '')) // strip for ie7
if (!$target.hasClass('carousel')) return
var options = $.extend({}, $target.data(), $this.data())
var slideIndex = $this.attr('data-slide-to')
if (slideIndex) options.interval = false
Plugin.call($target, options)
if (slideIndex) {
$target.data('bs.carousel').to(slideIndex)
}
e.preventDefault()
}
$(document)
.on('click.bs.carousel.data-api', '[data-slide]', clickHandler)
.on('click.bs.carousel.data-api', '[data-slide-to]', clickHandler)
$(window).on('load', function () {
$('[data-ride="carousel"]').each(function () {
var $carousel = $(this)
Plugin.call($carousel, $carousel.data())
})
})
}(jQuery);
/* ========================================================================
* Bootstrap: dropdown.js v3.3.5
* http://getbootstrap.com/javascript/#dropdowns
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// DROPDOWN CLASS DEFINITION
// =========================
var backdrop = '.dropdown-backdrop'
var toggle = '[data-toggle="dropdown"]'
var Dropdown = function (element) {
$(element).on('click.bs.dropdown', this.toggle)
}
Dropdown.VERSION = '3.3.5'
function getParent($this) {
var selector = $this.attr('data-target')
if (!selector) {
selector = $this.attr('href')
selector = selector && /#[A-Za-z]/.test(selector) && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
var $parent = selector && $(selector)
return $parent && $parent.length ? $parent : $this.parent()
}
function clearMenus(e) {
if (e && e.which === 3) return
$(backdrop).remove()
$(toggle).each(function () {
var $this = $(this)
var $parent = getParent($this)
var relatedTarget = { relatedTarget: this }
if (!$parent.hasClass('open')) return
if (e && e.type == 'click' && /input|textarea/i.test(e.target.tagName) && $.contains($parent[0], e.target)) return
$parent.trigger(e = $.Event('hide.bs.dropdown', relatedTarget))
if (e.isDefaultPrevented()) return
$this.attr('aria-expanded', 'false')
$parent.removeClass('open').trigger('hidden.bs.dropdown', relatedTarget)
})
}
Dropdown.prototype.toggle = function (e) {
var $this = $(this)
if ($this.is('.disabled, :disabled')) return
var $parent = getParent($this)
var isActive = $parent.hasClass('open')
clearMenus()
if (!isActive) {
if ('ontouchstart' in document.documentElement && !$parent.closest('.navbar-nav').length) {
// if mobile we use a backdrop because click events don't delegate
$(document.createElement('div'))
.addClass('dropdown-backdrop')
.insertAfter($(this))
.on('click', clearMenus)
}
var relatedTarget = { relatedTarget: this }
$parent.trigger(e = $.Event('show.bs.dropdown', relatedTarget))
if (e.isDefaultPrevented()) return
$this
.trigger('focus')
.attr('aria-expanded', 'true')
$parent
.toggleClass('open')
.trigger('shown.bs.dropdown', relatedTarget)
}
return false
}
Dropdown.prototype.keydown = function (e) {
if (!/(38|40|27|32)/.test(e.which) || /input|textarea/i.test(e.target.tagName)) return
var $this = $(this)
e.preventDefault()
e.stopPropagation()
if ($this.is('.disabled, :disabled')) return
var $parent = getParent($this)
var isActive = $parent.hasClass('open')
if (!isActive && e.which != 27 || isActive && e.which == 27) {
if (e.which == 27) $parent.find(toggle).trigger('focus')
return $this.trigger('click')
}
var desc = ' li:not(.disabled):visible a'
var $items = $parent.find('.dropdown-menu' + desc)
if (!$items.length) return
var index = $items.index(e.target)
if (e.which == 38 && index > 0) index-- // up
if (e.which == 40 && index < $items.length - 1) index++ // down
if (!~index) index = 0
$items.eq(index).trigger('focus')
}
// DROPDOWN PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.dropdown')
if (!data) $this.data('bs.dropdown', (data = new Dropdown(this)))
if (typeof option == 'string') data[option].call($this)
})
}
var old = $.fn.dropdown
$.fn.dropdown = Plugin
$.fn.dropdown.Constructor = Dropdown
// DROPDOWN NO CONFLICT
// ====================
$.fn.dropdown.noConflict = function () {
$.fn.dropdown = old
return this
}
// APPLY TO STANDARD DROPDOWN ELEMENTS
// ===================================
$(document)
.on('click.bs.dropdown.data-api', clearMenus)
.on('click.bs.dropdown.data-api', '.dropdown form', function (e) { e.stopPropagation() })
.on('click.bs.dropdown.data-api', toggle, Dropdown.prototype.toggle)
.on('keydown.bs.dropdown.data-api', toggle, Dropdown.prototype.keydown)
.on('keydown.bs.dropdown.data-api', '.dropdown-menu', Dropdown.prototype.keydown)
}(jQuery);
/* ========================================================================
* Bootstrap: modal.js v3.3.5
* http://getbootstrap.com/javascript/#modals
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// MODAL CLASS DEFINITION
// ======================
var Modal = function (element, options) {
this.options = options
this.$body = $(document.body)
this.$element = $(element)
this.$dialog = this.$element.find('.modal-dialog')
this.$backdrop = null
this.isShown = null
this.originalBodyPad = null
this.scrollbarWidth = 0
this.ignoreBackdropClick = false
if (this.options.remote) {
this.$element
.find('.modal-content')
.load(this.options.remote, $.proxy(function () {
this.$element.trigger('loaded.bs.modal')
}, this))
}
}
Modal.VERSION = '3.3.5'
Modal.TRANSITION_DURATION = 300
Modal.BACKDROP_TRANSITION_DURATION = 150
Modal.DEFAULTS = {
backdrop: true,
keyboard: true,
show: true
}
Modal.prototype.toggle = function (_relatedTarget) {
return this.isShown ? this.hide() : this.show(_relatedTarget)
}
Modal.prototype.show = function (_relatedTarget) {
var that = this
var e = $.Event('show.bs.modal', { relatedTarget: _relatedTarget })
this.$element.trigger(e)
if (this.isShown || e.isDefaultPrevented()) return
this.isShown = true
this.checkScrollbar()
this.setScrollbar()
this.$body.addClass('modal-open')
this.escape()
this.resize()
this.$element.on('click.dismiss.bs.modal', '[data-dismiss="modal"]', $.proxy(this.hide, this))
this.$dialog.on('mousedown.dismiss.bs.modal', function () {
that.$element.one('mouseup.dismiss.bs.modal', function (e) {
if ($(e.target).is(that.$element)) that.ignoreBackdropClick = true
})
})
this.backdrop(function () {
var transition = $.support.transition && that.$element.hasClass('fade')
if (!that.$element.parent().length) {
that.$element.appendTo(that.$body) // don't move modals dom position
}
that.$element
.show()
.scrollTop(0)
that.adjustDialog()
if (transition) {
that.$element[0].offsetWidth // force reflow
}
that.$element.addClass('in')
that.enforceFocus()
var e = $.Event('shown.bs.modal', { relatedTarget: _relatedTarget })
transition ?
that.$dialog // wait for modal to slide in
.one('bsTransitionEnd', function () {
that.$element.trigger('focus').trigger(e)
})
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
that.$element.trigger('focus').trigger(e)
})
}
Modal.prototype.hide = function (e) {
if (e) e.preventDefault()
e = $.Event('hide.bs.modal')
this.$element.trigger(e)
if (!this.isShown || e.isDefaultPrevented()) return
this.isShown = false
this.escape()
this.resize()
$(document).off('focusin.bs.modal')
this.$element
.removeClass('in')
.off('click.dismiss.bs.modal')
.off('mouseup.dismiss.bs.modal')
this.$dialog.off('mousedown.dismiss.bs.modal')
$.support.transition && this.$element.hasClass('fade') ?
this.$element
.one('bsTransitionEnd', $.proxy(this.hideModal, this))
.emulateTransitionEnd(Modal.TRANSITION_DURATION) :
this.hideModal()
}
Modal.prototype.enforceFocus = function () {
$(document)
.off('focusin.bs.modal') // guard against infinite focus loop
.on('focusin.bs.modal', $.proxy(function (e) {
if (this.$element[0] !== e.target && !this.$element.has(e.target).length) {
this.$element.trigger('focus')
}
}, this))
}
Modal.prototype.escape = function () {
if (this.isShown && this.options.keyboard) {
this.$element.on('keydown.dismiss.bs.modal', $.proxy(function (e) {
e.which == 27 && this.hide()
}, this))
} else if (!this.isShown) {
this.$element.off('keydown.dismiss.bs.modal')
}
}
Modal.prototype.resize = function () {
if (this.isShown) {
$(window).on('resize.bs.modal', $.proxy(this.handleUpdate, this))
} else {
$(window).off('resize.bs.modal')
}
}
Modal.prototype.hideModal = function () {
var that = this
this.$element.hide()
this.backdrop(function () {
that.$body.removeClass('modal-open')
that.resetAdjustments()
that.resetScrollbar()
that.$element.trigger('hidden.bs.modal')
})
}
Modal.prototype.removeBackdrop = function () {
this.$backdrop && this.$backdrop.remove()
this.$backdrop = null
}
Modal.prototype.backdrop = function (callback) {
var that = this
var animate = this.$element.hasClass('fade') ? 'fade' : ''
if (this.isShown && this.options.backdrop) {
var doAnimate = $.support.transition && animate
this.$backdrop = $(document.createElement('div'))
.addClass('modal-backdrop ' + animate)
.appendTo(this.$body)
this.$element.on('click.dismiss.bs.modal', $.proxy(function (e) {
if (this.ignoreBackdropClick) {
this.ignoreBackdropClick = false
return
}
if (e.target !== e.currentTarget) return
this.options.backdrop == 'static'
? this.$element[0].focus()
: this.hide()
}, this))
if (doAnimate) this.$backdrop[0].offsetWidth // force reflow
this.$backdrop.addClass('in')
if (!callback) return
doAnimate ?
this.$backdrop
.one('bsTransitionEnd', callback)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callback()
} else if (!this.isShown && this.$backdrop) {
this.$backdrop.removeClass('in')
var callbackRemove = function () {
that.removeBackdrop()
callback && callback()
}
$.support.transition && this.$element.hasClass('fade') ?
this.$backdrop
.one('bsTransitionEnd', callbackRemove)
.emulateTransitionEnd(Modal.BACKDROP_TRANSITION_DURATION) :
callbackRemove()
} else if (callback) {
callback()
}
}
// these following methods are used to handle overflowing modals
Modal.prototype.handleUpdate = function () {
this.adjustDialog()
}
Modal.prototype.adjustDialog = function () {
var modalIsOverflowing = this.$element[0].scrollHeight > document.documentElement.clientHeight
this.$element.css({
paddingLeft: !this.bodyIsOverflowing && modalIsOverflowing ? this.scrollbarWidth : '',
paddingRight: this.bodyIsOverflowing && !modalIsOverflowing ? this.scrollbarWidth : ''
})
}
Modal.prototype.resetAdjustments = function () {
this.$element.css({
paddingLeft: '',
paddingRight: ''
})
}
Modal.prototype.checkScrollbar = function () {
var fullWindowWidth = window.innerWidth
if (!fullWindowWidth) { // workaround for missing window.innerWidth in IE8
var documentElementRect = document.documentElement.getBoundingClientRect()
fullWindowWidth = documentElementRect.right - Math.abs(documentElementRect.left)
}
this.bodyIsOverflowing = document.body.clientWidth < fullWindowWidth
this.scrollbarWidth = this.measureScrollbar()
}
Modal.prototype.setScrollbar = function () {
var bodyPad = parseInt((this.$body.css('padding-right') || 0), 10)
this.originalBodyPad = document.body.style.paddingRight || ''
if (this.bodyIsOverflowing) this.$body.css('padding-right', bodyPad + this.scrollbarWidth)
}
Modal.prototype.resetScrollbar = function () {
this.$body.css('padding-right', this.originalBodyPad)
}
Modal.prototype.measureScrollbar = function () { // thx walsh
var scrollDiv = document.createElement('div')
scrollDiv.className = 'modal-scrollbar-measure'
this.$body.append(scrollDiv)
var scrollbarWidth = scrollDiv.offsetWidth - scrollDiv.clientWidth
this.$body[0].removeChild(scrollDiv)
return scrollbarWidth
}
// MODAL PLUGIN DEFINITION
// =======================
function Plugin(option, _relatedTarget) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.modal')
var options = $.extend({}, Modal.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data) $this.data('bs.modal', (data = new Modal(this, options)))
if (typeof option == 'string') data[option](_relatedTarget)
else if (options.show) data.show(_relatedTarget)
})
}
var old = $.fn.modal
$.fn.modal = Plugin
$.fn.modal.Constructor = Modal
// MODAL NO CONFLICT
// =================
$.fn.modal.noConflict = function () {
$.fn.modal = old
return this
}
// MODAL DATA-API
// ==============
$(document).on('click.bs.modal.data-api', '[data-toggle="modal"]', function (e) {
var $this = $(this)
var href = $this.attr('href')
var $target = $($this.attr('data-target') || (href && href.replace(/.*(?=#[^\s]+$)/, ''))) // strip for ie7
var option = $target.data('bs.modal') ? 'toggle' : $.extend({ remote: !/#/.test(href) && href }, $target.data(), $this.data())
if ($this.is('a')) e.preventDefault()
$target.one('show.bs.modal', function (showEvent) {
if (showEvent.isDefaultPrevented()) return // only register focus restorer if modal will actually get shown
$target.one('hidden.bs.modal', function () {
$this.is(':visible') && $this.trigger('focus')
})
})
Plugin.call($target, option, this)
})
}(jQuery);
/* ========================================================================
* Bootstrap: tooltip.js v3.3.5
* http://getbootstrap.com/javascript/#tooltip
* Inspired by the original jQuery.tipsy by Jason Frame
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TOOLTIP PUBLIC CLASS DEFINITION
// ===============================
var Tooltip = function (element, options) {
this.type = null
this.options = null
this.enabled = null
this.timeout = null
this.hoverState = null
this.$element = null
this.inState = null
this.init('tooltip', element, options)
}
Tooltip.VERSION = '3.3.5'
Tooltip.TRANSITION_DURATION = 150
Tooltip.DEFAULTS = {
animation: true,
placement: 'top',
selector: false,
template: '<div class="tooltip" role="tooltip"><div class="tooltip-arrow"></div><div class="tooltip-inner"></div></div>',
trigger: 'hover focus',
title: '',
delay: 0,
html: false,
container: false,
viewport: {
selector: 'body',
padding: 0
}
}
Tooltip.prototype.init = function (type, element, options) {
this.enabled = true
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
this.$viewport = this.options.viewport && $($.isFunction(this.options.viewport) ? this.options.viewport.call(this, this.$element) : (this.options.viewport.selector || this.options.viewport))
this.inState = { click: false, hover: false, focus: false }
if (this.$element[0] instanceof document.constructor && !this.options.selector) {
throw new Error('`selector` option must be specified when initializing ' + this.type + ' on the window.document object!')
}
var triggers = this.options.trigger.split(' ')
for (var i = triggers.length; i--;) {
var trigger = triggers[i]
if (trigger == 'click') {
this.$element.on('click.' + this.type, this.options.selector, $.proxy(this.toggle, this))
} else if (trigger != 'manual') {
var eventIn = trigger == 'hover' ? 'mouseenter' : 'focusin'
var eventOut = trigger == 'hover' ? 'mouseleave' : 'focusout'
this.$element.on(eventIn + '.' + this.type, this.options.selector, $.proxy(this.enter, this))
this.$element.on(eventOut + '.' + this.type, this.options.selector, $.proxy(this.leave, this))
}
}
this.options.selector ?
(this._options = $.extend({}, this.options, { trigger: 'manual', selector: '' })) :
this.fixTitle()
}
Tooltip.prototype.getDefaults = function () {
return Tooltip.DEFAULTS
}
Tooltip.prototype.getOptions = function (options) {
options = $.extend({}, this.getDefaults(), this.$element.data(), options)
if (options.delay && typeof options.delay == 'number') {
options.delay = {
show: options.delay,
hide: options.delay
}
}
return options
}
Tooltip.prototype.getDelegateOptions = function () {
var options = {}
var defaults = this.getDefaults()
this._options && $.each(this._options, function (key, value) {
if (defaults[key] != value) options[key] = value
})
return options
}
Tooltip.prototype.enter = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
if (obj instanceof $.Event) {
self.inState[obj.type == 'focusin' ? 'focus' : 'hover'] = true
}
if (self.tip().hasClass('in') || self.hoverState == 'in') {
self.hoverState = 'in'
return
}
clearTimeout(self.timeout)
self.hoverState = 'in'
if (!self.options.delay || !self.options.delay.show) return self.show()
self.timeout = setTimeout(function () {
if (self.hoverState == 'in') self.show()
}, self.options.delay.show)
}
Tooltip.prototype.isInStateTrue = function () {
for (var key in this.inState) {
if (this.inState[key]) return true
}
return false
}
Tooltip.prototype.leave = function (obj) {
var self = obj instanceof this.constructor ?
obj : $(obj.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(obj.currentTarget, this.getDelegateOptions())
$(obj.currentTarget).data('bs.' + this.type, self)
}
if (obj instanceof $.Event) {
self.inState[obj.type == 'focusout' ? 'focus' : 'hover'] = false
}
if (self.isInStateTrue()) return
clearTimeout(self.timeout)
self.hoverState = 'out'
if (!self.options.delay || !self.options.delay.hide) return self.hide()
self.timeout = setTimeout(function () {
if (self.hoverState == 'out') self.hide()
}, self.options.delay.hide)
}
Tooltip.prototype.show = function () {
var e = $.Event('show.bs.' + this.type)
if (this.hasContent() && this.enabled) {
this.$element.trigger(e)
var inDom = $.contains(this.$element[0].ownerDocument.documentElement, this.$element[0])
if (e.isDefaultPrevented() || !inDom) return
var that = this
var $tip = this.tip()
var tipId = this.getUID(this.type)
this.setContent()
$tip.attr('id', tipId)
this.$element.attr('aria-describedby', tipId)
if (this.options.animation) $tip.addClass('fade')
var placement = typeof this.options.placement == 'function' ?
this.options.placement.call(this, $tip[0], this.$element[0]) :
this.options.placement
var autoToken = /\s?auto?\s?/i
var autoPlace = autoToken.test(placement)
if (autoPlace) placement = placement.replace(autoToken, '') || 'top'
$tip
.detach()
.css({ top: 0, left: 0, display: 'block' })
.addClass(placement)
.data('bs.' + this.type, this)
this.options.container ? $tip.appendTo(this.options.container) : $tip.insertAfter(this.$element)
this.$element.trigger('inserted.bs.' + this.type)
var pos = this.getPosition()
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (autoPlace) {
var orgPlacement = placement
var viewportDim = this.getPosition(this.$viewport)
placement = placement == 'bottom' && pos.bottom + actualHeight > viewportDim.bottom ? 'top' :
placement == 'top' && pos.top - actualHeight < viewportDim.top ? 'bottom' :
placement == 'right' && pos.right + actualWidth > viewportDim.width ? 'left' :
placement == 'left' && pos.left - actualWidth < viewportDim.left ? 'right' :
placement
$tip
.removeClass(orgPlacement)
.addClass(placement)
}
var calculatedOffset = this.getCalculatedOffset(placement, pos, actualWidth, actualHeight)
this.applyPlacement(calculatedOffset, placement)
var complete = function () {
var prevHoverState = that.hoverState
that.$element.trigger('shown.bs.' + that.type)
that.hoverState = null
if (prevHoverState == 'out') that.leave(that)
}
$.support.transition && this.$tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
}
}
Tooltip.prototype.applyPlacement = function (offset, placement) {
var $tip = this.tip()
var width = $tip[0].offsetWidth
var height = $tip[0].offsetHeight
// manually read margins because getBoundingClientRect includes difference
var marginTop = parseInt($tip.css('margin-top'), 10)
var marginLeft = parseInt($tip.css('margin-left'), 10)
// we must check for NaN for ie 8/9
if (isNaN(marginTop)) marginTop = 0
if (isNaN(marginLeft)) marginLeft = 0
offset.top += marginTop
offset.left += marginLeft
// $.fn.offset doesn't round pixel values
// so we use setOffset directly with our own function B-0
$.offset.setOffset($tip[0], $.extend({
using: function (props) {
$tip.css({
top: Math.round(props.top),
left: Math.round(props.left)
})
}
}, offset), 0)
$tip.addClass('in')
// check to see if placing tip in new offset caused the tip to resize itself
var actualWidth = $tip[0].offsetWidth
var actualHeight = $tip[0].offsetHeight
if (placement == 'top' && actualHeight != height) {
offset.top = offset.top + height - actualHeight
}
var delta = this.getViewportAdjustedDelta(placement, offset, actualWidth, actualHeight)
if (delta.left) offset.left += delta.left
else offset.top += delta.top
var isVertical = /top|bottom/.test(placement)
var arrowDelta = isVertical ? delta.left * 2 - width + actualWidth : delta.top * 2 - height + actualHeight
var arrowOffsetPosition = isVertical ? 'offsetWidth' : 'offsetHeight'
$tip.offset(offset)
this.replaceArrow(arrowDelta, $tip[0][arrowOffsetPosition], isVertical)
}
Tooltip.prototype.replaceArrow = function (delta, dimension, isVertical) {
this.arrow()
.css(isVertical ? 'left' : 'top', 50 * (1 - delta / dimension) + '%')
.css(isVertical ? 'top' : 'left', '')
}
Tooltip.prototype.setContent = function () {
var $tip = this.tip()
var title = this.getTitle()
$tip.find('.tooltip-inner')[this.options.html ? 'html' : 'text'](title)
$tip.removeClass('fade in top bottom left right')
}
Tooltip.prototype.hide = function (callback) {
var that = this
var $tip = $(this.$tip)
var e = $.Event('hide.bs.' + this.type)
function complete() {
if (that.hoverState != 'in') $tip.detach()
that.$element
.removeAttr('aria-describedby')
.trigger('hidden.bs.' + that.type)
callback && callback()
}
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
$tip.removeClass('in')
$.support.transition && $tip.hasClass('fade') ?
$tip
.one('bsTransitionEnd', complete)
.emulateTransitionEnd(Tooltip.TRANSITION_DURATION) :
complete()
this.hoverState = null
return this
}
Tooltip.prototype.fixTitle = function () {
var $e = this.$element
if ($e.attr('title') || typeof $e.attr('data-original-title') != 'string') {
$e.attr('data-original-title', $e.attr('title') || '').attr('title', '')
}
}
Tooltip.prototype.hasContent = function () {
return this.getTitle()
}
Tooltip.prototype.getPosition = function ($element) {
$element = $element || this.$element
var el = $element[0]
var isBody = el.tagName == 'BODY'
var elRect = el.getBoundingClientRect()
if (elRect.width == null) {
// width and height are missing in IE8, so compute them manually; see https://github.com/twbs/bootstrap/issues/14093
elRect = $.extend({}, elRect, { width: elRect.right - elRect.left, height: elRect.bottom - elRect.top })
}
var elOffset = isBody ? { top: 0, left: 0 } : $element.offset()
var scroll = { scroll: isBody ? document.documentElement.scrollTop || document.body.scrollTop : $element.scrollTop() }
var outerDims = isBody ? { width: $(window).width(), height: $(window).height() } : null
return $.extend({}, elRect, scroll, outerDims, elOffset)
}
Tooltip.prototype.getCalculatedOffset = function (placement, pos, actualWidth, actualHeight) {
return placement == 'bottom' ? { top: pos.top + pos.height, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'top' ? { top: pos.top - actualHeight, left: pos.left + pos.width / 2 - actualWidth / 2 } :
placement == 'left' ? { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left - actualWidth } :
/* placement == 'right' */ { top: pos.top + pos.height / 2 - actualHeight / 2, left: pos.left + pos.width }
}
Tooltip.prototype.getViewportAdjustedDelta = function (placement, pos, actualWidth, actualHeight) {
var delta = { top: 0, left: 0 }
if (!this.$viewport) return delta
var viewportPadding = this.options.viewport && this.options.viewport.padding || 0
var viewportDimensions = this.getPosition(this.$viewport)
if (/right|left/.test(placement)) {
var topEdgeOffset = pos.top - viewportPadding - viewportDimensions.scroll
var bottomEdgeOffset = pos.top + viewportPadding - viewportDimensions.scroll + actualHeight
if (topEdgeOffset < viewportDimensions.top) { // top overflow
delta.top = viewportDimensions.top - topEdgeOffset
} else if (bottomEdgeOffset > viewportDimensions.top + viewportDimensions.height) { // bottom overflow
delta.top = viewportDimensions.top + viewportDimensions.height - bottomEdgeOffset
}
} else {
var leftEdgeOffset = pos.left - viewportPadding
var rightEdgeOffset = pos.left + viewportPadding + actualWidth
if (leftEdgeOffset < viewportDimensions.left) { // left overflow
delta.left = viewportDimensions.left - leftEdgeOffset
} else if (rightEdgeOffset > viewportDimensions.right) { // right overflow
delta.left = viewportDimensions.left + viewportDimensions.width - rightEdgeOffset
}
}
return delta
}
Tooltip.prototype.getTitle = function () {
var title
var $e = this.$element
var o = this.options
title = $e.attr('data-original-title')
|| (typeof o.title == 'function' ? o.title.call($e[0]) : o.title)
return title
}
Tooltip.prototype.getUID = function (prefix) {
do prefix += ~~(Math.random() * 1000000)
while (document.getElementById(prefix))
return prefix
}
Tooltip.prototype.tip = function () {
if (!this.$tip) {
this.$tip = $(this.options.template)
if (this.$tip.length != 1) {
throw new Error(this.type + ' `template` option must consist of exactly 1 top-level element!')
}
}
return this.$tip
}
Tooltip.prototype.arrow = function () {
return (this.$arrow = this.$arrow || this.tip().find('.tooltip-arrow'))
}
Tooltip.prototype.enable = function () {
this.enabled = true
}
Tooltip.prototype.disable = function () {
this.enabled = false
}
Tooltip.prototype.toggleEnabled = function () {
this.enabled = !this.enabled
}
Tooltip.prototype.toggle = function (e) {
var self = this
if (e) {
self = $(e.currentTarget).data('bs.' + this.type)
if (!self) {
self = new this.constructor(e.currentTarget, this.getDelegateOptions())
$(e.currentTarget).data('bs.' + this.type, self)
}
}
if (e) {
self.inState.click = !self.inState.click
if (self.isInStateTrue()) self.enter(self)
else self.leave(self)
} else {
self.tip().hasClass('in') ? self.leave(self) : self.enter(self)
}
}
Tooltip.prototype.destroy = function () {
var that = this
clearTimeout(this.timeout)
this.hide(function () {
that.$element.off('.' + that.type).removeData('bs.' + that.type)
if (that.$tip) {
that.$tip.detach()
}
that.$tip = null
that.$arrow = null
that.$viewport = null
})
}
// TOOLTIP PLUGIN DEFINITION
// =========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tooltip')
var options = typeof option == 'object' && option
if (!data && /destroy|hide/.test(option)) return
if (!data) $this.data('bs.tooltip', (data = new Tooltip(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tooltip
$.fn.tooltip = Plugin
$.fn.tooltip.Constructor = Tooltip
// TOOLTIP NO CONFLICT
// ===================
$.fn.tooltip.noConflict = function () {
$.fn.tooltip = old
return this
}
}(jQuery);
/* ========================================================================
* Bootstrap: popover.js v3.3.5
* http://getbootstrap.com/javascript/#popovers
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// POPOVER PUBLIC CLASS DEFINITION
// ===============================
var Popover = function (element, options) {
this.init('popover', element, options)
}
if (!$.fn.tooltip) throw new Error('Popover requires tooltip.js')
Popover.VERSION = '3.3.5'
Popover.DEFAULTS = $.extend({}, $.fn.tooltip.Constructor.DEFAULTS, {
placement: 'right',
trigger: 'click',
content: '',
template: '<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-title"></h3><div class="popover-content"></div></div>'
})
// NOTE: POPOVER EXTENDS tooltip.js
// ================================
Popover.prototype = $.extend({}, $.fn.tooltip.Constructor.prototype)
Popover.prototype.constructor = Popover
Popover.prototype.getDefaults = function () {
return Popover.DEFAULTS
}
Popover.prototype.setContent = function () {
var $tip = this.tip()
var title = this.getTitle()
var content = this.getContent()
$tip.find('.popover-title')[this.options.html ? 'html' : 'text'](title)
$tip.find('.popover-content').children().detach().end()[ // we use append for html objects to maintain js events
this.options.html ? (typeof content == 'string' ? 'html' : 'append') : 'text'
](content)
$tip.removeClass('fade top bottom left right in')
// IE8 doesn't accept hiding via the `:empty` pseudo selector, we have to do
// this manually by checking the contents.
if (!$tip.find('.popover-title').html()) $tip.find('.popover-title').hide()
}
Popover.prototype.hasContent = function () {
return this.getTitle() || this.getContent()
}
Popover.prototype.getContent = function () {
var $e = this.$element
var o = this.options
return $e.attr('data-content')
|| (typeof o.content == 'function' ?
o.content.call($e[0]) :
o.content)
}
Popover.prototype.arrow = function () {
return (this.$arrow = this.$arrow || this.tip().find('.arrow'))
}
// POPOVER PLUGIN DEFINITION
// =========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.popover')
var options = typeof option == 'object' && option
if (!data && /destroy|hide/.test(option)) return
if (!data) $this.data('bs.popover', (data = new Popover(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.popover
$.fn.popover = Plugin
$.fn.popover.Constructor = Popover
// POPOVER NO CONFLICT
// ===================
$.fn.popover.noConflict = function () {
$.fn.popover = old
return this
}
}(jQuery);
/* ========================================================================
* Bootstrap: tab.js v3.3.5
* http://getbootstrap.com/javascript/#tabs
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// TAB CLASS DEFINITION
// ====================
var Tab = function (element) {
// jscs:disable requireDollarBeforejQueryAssignment
this.element = $(element)
// jscs:enable requireDollarBeforejQueryAssignment
}
Tab.VERSION = '3.3.5'
Tab.TRANSITION_DURATION = 150
Tab.prototype.show = function () {
var $this = this.element
var $ul = $this.closest('ul:not(.dropdown-menu)')
var selector = $this.data('target')
if (!selector) {
selector = $this.attr('href')
selector = selector && selector.replace(/.*(?=#[^\s]*$)/, '') // strip for ie7
}
if ($this.parent('li').hasClass('active')) return
var $previous = $ul.find('.active:last a')
var hideEvent = $.Event('hide.bs.tab', {
relatedTarget: $this[0]
})
var showEvent = $.Event('show.bs.tab', {
relatedTarget: $previous[0]
})
$previous.trigger(hideEvent)
$this.trigger(showEvent)
if (showEvent.isDefaultPrevented() || hideEvent.isDefaultPrevented()) return
var $target = $(selector)
this.activate($this.closest('li'), $ul)
this.activate($target, $target.parent(), function () {
$previous.trigger({
type: 'hidden.bs.tab',
relatedTarget: $this[0]
})
$this.trigger({
type: 'shown.bs.tab',
relatedTarget: $previous[0]
})
})
}
Tab.prototype.activate = function (element, container, callback) {
var $active = container.find('> .active')
var transition = callback
&& $.support.transition
&& ($active.length && $active.hasClass('fade') || !!container.find('> .fade').length)
function next() {
$active
.removeClass('active')
.find('> .dropdown-menu > .active')
.removeClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', false)
element
.addClass('active')
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
if (transition) {
element[0].offsetWidth // reflow for transition
element.addClass('in')
} else {
element.removeClass('fade')
}
if (element.parent('.dropdown-menu').length) {
element
.closest('li.dropdown')
.addClass('active')
.end()
.find('[data-toggle="tab"]')
.attr('aria-expanded', true)
}
callback && callback()
}
$active.length && transition ?
$active
.one('bsTransitionEnd', next)
.emulateTransitionEnd(Tab.TRANSITION_DURATION) :
next()
$active.removeClass('in')
}
// TAB PLUGIN DEFINITION
// =====================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.tab')
if (!data) $this.data('bs.tab', (data = new Tab(this)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.tab
$.fn.tab = Plugin
$.fn.tab.Constructor = Tab
// TAB NO CONFLICT
// ===============
$.fn.tab.noConflict = function () {
$.fn.tab = old
return this
}
// TAB DATA-API
// ============
var clickHandler = function (e) {
e.preventDefault()
Plugin.call($(this), 'show')
}
$(document)
.on('click.bs.tab.data-api', '[data-toggle="tab"]', clickHandler)
.on('click.bs.tab.data-api', '[data-toggle="pill"]', clickHandler)
}(jQuery);
/* ========================================================================
* Bootstrap: affix.js v3.3.5
* http://getbootstrap.com/javascript/#affix
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// AFFIX CLASS DEFINITION
// ======================
var Affix = function (element, options) {
this.options = $.extend({}, Affix.DEFAULTS, options)
this.$target = $(this.options.target)
.on('scroll.bs.affix.data-api', $.proxy(this.checkPosition, this))
.on('click.bs.affix.data-api', $.proxy(this.checkPositionWithEventLoop, this))
this.$element = $(element)
this.affixed = null
this.unpin = null
this.pinnedOffset = null
this.checkPosition()
}
Affix.VERSION = '3.3.5'
Affix.RESET = 'affix affix-top affix-bottom'
Affix.DEFAULTS = {
offset: 0,
target: window
}
Affix.prototype.getState = function (scrollHeight, height, offsetTop, offsetBottom) {
var scrollTop = this.$target.scrollTop()
var position = this.$element.offset()
var targetHeight = this.$target.height()
if (offsetTop != null && this.affixed == 'top') return scrollTop < offsetTop ? 'top' : false
if (this.affixed == 'bottom') {
if (offsetTop != null) return (scrollTop + this.unpin <= position.top) ? false : 'bottom'
return (scrollTop + targetHeight <= scrollHeight - offsetBottom) ? false : 'bottom'
}
var initializing = this.affixed == null
var colliderTop = initializing ? scrollTop : position.top
var colliderHeight = initializing ? targetHeight : height
if (offsetTop != null && scrollTop <= offsetTop) return 'top'
if (offsetBottom != null && (colliderTop + colliderHeight >= scrollHeight - offsetBottom)) return 'bottom'
return false
}
Affix.prototype.getPinnedOffset = function () {
if (this.pinnedOffset) return this.pinnedOffset
this.$element.removeClass(Affix.RESET).addClass('affix')
var scrollTop = this.$target.scrollTop()
var position = this.$element.offset()
return (this.pinnedOffset = position.top - scrollTop)
}
Affix.prototype.checkPositionWithEventLoop = function () {
setTimeout($.proxy(this.checkPosition, this), 1)
}
Affix.prototype.checkPosition = function () {
if (!this.$element.is(':visible')) return
var height = this.$element.height()
var offset = this.options.offset
var offsetTop = offset.top
var offsetBottom = offset.bottom
var scrollHeight = Math.max($(document).height(), $(document.body).height())
if (typeof offset != 'object') offsetBottom = offsetTop = offset
if (typeof offsetTop == 'function') offsetTop = offset.top(this.$element)
if (typeof offsetBottom == 'function') offsetBottom = offset.bottom(this.$element)
var affix = this.getState(scrollHeight, height, offsetTop, offsetBottom)
if (this.affixed != affix) {
if (this.unpin != null) this.$element.css('top', '')
var affixType = 'affix' + (affix ? '-' + affix : '')
var e = $.Event(affixType + '.bs.affix')
this.$element.trigger(e)
if (e.isDefaultPrevented()) return
this.affixed = affix
this.unpin = affix == 'bottom' ? this.getPinnedOffset() : null
this.$element
.removeClass(Affix.RESET)
.addClass(affixType)
.trigger(affixType.replace('affix', 'affixed') + '.bs.affix')
}
if (affix == 'bottom') {
this.$element.offset({
top: scrollHeight - height - offsetBottom
})
}
}
// AFFIX PLUGIN DEFINITION
// =======================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.affix')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.affix', (data = new Affix(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.affix
$.fn.affix = Plugin
$.fn.affix.Constructor = Affix
// AFFIX NO CONFLICT
// =================
$.fn.affix.noConflict = function () {
$.fn.affix = old
return this
}
// AFFIX DATA-API
// ==============
$(window).on('load', function () {
$('[data-spy="affix"]').each(function () {
var $spy = $(this)
var data = $spy.data()
data.offset = data.offset || {}
if (data.offsetBottom != null) data.offset.bottom = data.offsetBottom
if (data.offsetTop != null) data.offset.top = data.offsetTop
Plugin.call($spy, data)
})
})
}(jQuery);
/* ========================================================================
* Bootstrap: collapse.js v3.3.5
* http://getbootstrap.com/javascript/#collapse
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// COLLAPSE PUBLIC CLASS DEFINITION
// ================================
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Collapse.DEFAULTS, options)
this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
'[data-toggle="collapse"][data-target="#' + element.id + '"]')
this.transitioning = null
if (this.options.parent) {
this.$parent = this.getParent()
} else {
this.addAriaAndCollapsedClass(this.$element, this.$trigger)
}
if (this.options.toggle) this.toggle()
}
Collapse.VERSION = '3.3.5'
Collapse.TRANSITION_DURATION = 350
Collapse.DEFAULTS = {
toggle: true
}
Collapse.prototype.dimension = function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
Collapse.prototype.show = function () {
if (this.transitioning || this.$element.hasClass('in')) return
var activesData
var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
if (actives && actives.length) {
activesData = actives.data('bs.collapse')
if (activesData && activesData.transitioning) return
}
var startEvent = $.Event('show.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
if (actives && actives.length) {
Plugin.call(actives, 'hide')
activesData || actives.data('bs.collapse', null)
}
var dimension = this.dimension()
this.$element
.removeClass('collapse')
.addClass('collapsing')[dimension](0)
.attr('aria-expanded', true)
this.$trigger
.removeClass('collapsed')
.attr('aria-expanded', true)
this.transitioning = 1
var complete = function () {
this.$element
.removeClass('collapsing')
.addClass('collapse in')[dimension]('')
this.transitioning = 0
this.$element
.trigger('shown.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
var scrollSize = $.camelCase(['scroll', dimension].join('-'))
this.$element
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
}
Collapse.prototype.hide = function () {
if (this.transitioning || !this.$element.hasClass('in')) return
var startEvent = $.Event('hide.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
var dimension = this.dimension()
this.$element[dimension](this.$element[dimension]())[0].offsetHeight
this.$element
.addClass('collapsing')
.removeClass('collapse in')
.attr('aria-expanded', false)
this.$trigger
.addClass('collapsed')
.attr('aria-expanded', false)
this.transitioning = 1
var complete = function () {
this.transitioning = 0
this.$element
.removeClass('collapsing')
.addClass('collapse')
.trigger('hidden.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
this.$element
[dimension](0)
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)
}
Collapse.prototype.toggle = function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
Collapse.prototype.getParent = function () {
return $(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
}, this))
.end()
}
Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
var isOpen = $element.hasClass('in')
$element.attr('aria-expanded', isOpen)
$trigger
.toggleClass('collapsed', !isOpen)
.attr('aria-expanded', isOpen)
}
function getTargetFromTrigger($trigger) {
var href
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
return $(target)
}
// COLLAPSE PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.collapse')
var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.collapse
$.fn.collapse = Plugin
$.fn.collapse.Constructor = Collapse
// COLLAPSE NO CONFLICT
// ====================
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
// COLLAPSE DATA-API
// =================
$(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
var $this = $(this)
if (!$this.attr('data-target')) e.preventDefault()
var $target = getTargetFromTrigger($this)
var data = $target.data('bs.collapse')
var option = data ? 'toggle' : $this.data()
Plugin.call($target, option)
})
}(jQuery);
/* ========================================================================
* Bootstrap: scrollspy.js v3.3.5
* http://getbootstrap.com/javascript/#scrollspy
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// SCROLLSPY CLASS DEFINITION
// ==========================
function ScrollSpy(element, options) {
this.$body = $(document.body)
this.$scrollElement = $(element).is(document.body) ? $(window) : $(element)
this.options = $.extend({}, ScrollSpy.DEFAULTS, options)
this.selector = (this.options.target || '') + ' .nav li > a'
this.offsets = []
this.targets = []
this.activeTarget = null
this.scrollHeight = 0
this.$scrollElement.on('scroll.bs.scrollspy', $.proxy(this.process, this))
this.refresh()
this.process()
}
ScrollSpy.VERSION = '3.3.5'
ScrollSpy.DEFAULTS = {
offset: 10
}
ScrollSpy.prototype.getScrollHeight = function () {
return this.$scrollElement[0].scrollHeight || Math.max(this.$body[0].scrollHeight, document.documentElement.scrollHeight)
}
ScrollSpy.prototype.refresh = function () {
var that = this
var offsetMethod = 'offset'
var offsetBase = 0
this.offsets = []
this.targets = []
this.scrollHeight = this.getScrollHeight()
if (!$.isWindow(this.$scrollElement[0])) {
offsetMethod = 'position'
offsetBase = this.$scrollElement.scrollTop()
}
this.$body
.find(this.selector)
.map(function () {
var $el = $(this)
var href = $el.data('target') || $el.attr('href')
var $href = /^#./.test(href) && $(href)
return ($href
&& $href.length
&& $href.is(':visible')
&& [[$href[offsetMethod]().top + offsetBase, href]]) || null
})
.sort(function (a, b) { return a[0] - b[0] })
.each(function () {
that.offsets.push(this[0])
that.targets.push(this[1])
})
}
ScrollSpy.prototype.process = function () {
var scrollTop = this.$scrollElement.scrollTop() + this.options.offset
var scrollHeight = this.getScrollHeight()
var maxScroll = this.options.offset + scrollHeight - this.$scrollElement.height()
var offsets = this.offsets
var targets = this.targets
var activeTarget = this.activeTarget
var i
if (this.scrollHeight != scrollHeight) {
this.refresh()
}
if (scrollTop >= maxScroll) {
return activeTarget != (i = targets[targets.length - 1]) && this.activate(i)
}
if (activeTarget && scrollTop < offsets[0]) {
this.activeTarget = null
return this.clear()
}
for (i = offsets.length; i--;) {
activeTarget != targets[i]
&& scrollTop >= offsets[i]
&& (offsets[i + 1] === undefined || scrollTop < offsets[i + 1])
&& this.activate(targets[i])
}
}
ScrollSpy.prototype.activate = function (target) {
this.activeTarget = target
this.clear()
var selector = this.selector +
'[data-target="' + target + '"],' +
this.selector + '[href="' + target + '"]'
var active = $(selector)
.parents('li')
.addClass('active')
if (active.parent('.dropdown-menu').length) {
active = active
.closest('li.dropdown')
.addClass('active')
}
active.trigger('activate.bs.scrollspy')
}
ScrollSpy.prototype.clear = function () {
$(this.selector)
.parentsUntil(this.options.target, '.active')
.removeClass('active')
}
// SCROLLSPY PLUGIN DEFINITION
// ===========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.scrollspy')
var options = typeof option == 'object' && option
if (!data) $this.data('bs.scrollspy', (data = new ScrollSpy(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.scrollspy
$.fn.scrollspy = Plugin
$.fn.scrollspy.Constructor = ScrollSpy
// SCROLLSPY NO CONFLICT
// =====================
$.fn.scrollspy.noConflict = function () {
$.fn.scrollspy = old
return this
}
// SCROLLSPY DATA-API
// ==================
$(window).on('load.bs.scrollspy.data-api', function () {
$('[data-spy="scroll"]').each(function () {
var $spy = $(this)
Plugin.call($spy, $spy.data())
})
})
}(jQuery);
/* ========================================================================
* Bootstrap: transition.js v3.3.5
* http://getbootstrap.com/javascript/#transitions
* ========================================================================
* Copyright 2011-2015 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
+function ($) {
'use strict';
// CSS TRANSITION SUPPORT (Shoutout: http://www.modernizr.com/)
// ============================================================
function transitionEnd() {
var el = document.createElement('bootstrap')
var transEndEventNames = {
WebkitTransition : 'webkitTransitionEnd',
MozTransition : 'transitionend',
OTransition : 'oTransitionEnd otransitionend',
transition : 'transitionend'
}
for (var name in transEndEventNames) {
if (el.style[name] !== undefined) {
return { end: transEndEventNames[name] }
}
}
return false // explicit for ie8 ( ._.)
}
// http://blog.alexmaccaw.com/css-transitions
$.fn.emulateTransitionEnd = function (duration) {
var called = false
var $el = this
$(this).one('bsTransitionEnd', function () { called = true })
var callback = function () { if (!called) $($el).trigger($.support.transition.end) }
setTimeout(callback, duration)
return this
}
$(function () {
$.support.transition = transitionEnd()
if (!$.support.transition) return
$.event.special.bsTransitionEnd = {
bindType: $.support.transition.end,
delegateType: $.support.transition.end,
handle: function (e) {
if ($(e.target).is(this)) return e.handleObj.handler.apply(this, arguments)
}
}
})
}(jQuery);<|fim▁end|> | var data = $this.data('bs.carousel') |
<|file_name|>bewerber.service.ts<|end_file_name|><|fim▁begin|>import {Injectable} from '@angular/core';
import {db, model} from 'baqend';
/**
* Dieser Service stellt den Bewerber zum eingeloggten User bereit (z.B. für Bewerber-Profil)
*/<|fim▁hole|> public getBewerber(): Promise<model.Bewerber> {
return db.Bewerber.find().equal('user', db.User.me).singleResult();
}
public getNewBewerber(): model.Bewerber {
const bewerber = new db.Bewerber();
bewerber.user = db.User.me;
bewerber.sprachen = [];
bewerber.vertragsarten = [];
bewerber.email = db.User.me.username;
return bewerber;
}
}<|fim▁end|> | @Injectable()
export class BewerberService {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.