prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Data_CSV.py<|end_file_name|><|fim▁begin|>#File to read the data from mysql and push into CSV.
# Python imports
import datetime as dt
import csv
import copy
import os
import pickle
# 3rd party imports
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# QSTK imports
from QSTK.qstkutil import qsdateutil as du
import QSTK.qstkutil.DataEvolved as de
def get_data(ls_symbols, ls_keys):
'''
@summary: Gets a data chunk for backtesting
@param dt_start: Start time
@param dt_end: End time
@param ls_symbols: symbols to use
@note: More data will be pulled from before and after the limits to ensure
valid data on the start/enddates which requires lookback/forward
@return: data dictionry
'''
print "Getting Data from MySQL"
# Modify dates to ensure enough data for all features
dt_start = dt.datetime(2005,1,1)
dt_end = dt.datetime(2012, 8, 31)
ldt_timestamps = du.getNYSEdays( dt_start, dt_end, dt.timedelta(hours=16) )
c_da = de.DataAccess('mysql')
ldf_data = c_da.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))
return d_data
def read_symbols(s_symbols_file):
ls_symbols=[]
file = open(s_symbols_file, 'r')
for f in file.readlines():
j = f[:-1]
ls_symbols.append(j)
file.close()
return ls_symbols
def csv_sym(sym, d_data, ls_keys, s_directory):
bool_first_iter = True
for key in ls_keys:
if bool_first_iter == True:
df_sym = d_data[key].reindex(columns = [sym])
df_sym = df_sym.rename(columns = {sym : key})
bool_first_iter = False
else:
df_temp = d_data[key].reindex(columns = [sym])
df_temp = df_temp.rename(columns = {sym : key})
df_sym = df_sym.join(df_temp, how= 'outer')
symfilename = sym.split('-')[0]
sym_file = open(s_directory + symfilename + '.csv', 'w')
sym_file.write("Date,Open,High,Low,Close,Volume,Adj Close \n")
ldt_timestamps = list(df_sym.index)
ldt_timestamps.reverse()
for date in ldt_timestamps:
date_to_csv = '{:%Y-%m-%d}'.format(date)
string_to_csv = date_to_csv
for key in ls_keys:<|fim▁hole|> string_to_csv = string_to_csv + ',' + str(df_sym[key][date])
string_to_csv = string_to_csv + '\n'
sym_file.write(string_to_csv)
def main(s_directory, s_symbols_file):
#ls_symbols = read_symbols(s_symbols_file)
ls_symbols = ['ACS-201002','BDK-201003','BJS-201004','BSC-201108','CCT-201111','EQ-200907','JAVA-201002','NCC-200901','NOVL-201104','PBG-201003','PTV-201011','ROH-200904','SGP-200911','SII-201008','WB-200901','WYE-200910','XTO-201006']
ls_keys = ['actual_open', 'actual_high', 'actual_low', 'actual_close', 'volume', 'close']
d_data = get_data(ls_symbols, ls_keys)
# print d_data
print "Creating CSV files now"
for sym in ls_symbols:
print sym
csv_sym(sym,d_data, ls_keys, s_directory)
print "Created all CSV files"
if __name__ == '__main__' :
s_directory = 'MLTData/'
s_directory = os.environ['QSDATA'] + '/Yahoo/'
s_symbols_file1 = 'MLTData/sp5002012.txt'
s_symbols_file2 = 'MLTData/index.txt'
s_symbols_file3 = 'MLTData/sp5002008.txt'
main(s_directory, s_symbols_file3)<|fim▁end|> | |
<|file_name|>unwind-uninitialized.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:fail
fn f() {
fail!();
}<|fim▁hole|>}<|fim▁end|> |
fn main() {
f();
let _a = @0; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras Applications are canned architectures with pre-trained weights."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function<|fim▁hole|>
from tensorflow.python.keras.applications import inception_v3
from tensorflow.python.keras.applications import mobilenet
from tensorflow.python.keras.applications import resnet50
from tensorflow.python.keras.applications import vgg16
from tensorflow.python.keras.applications import vgg19
from tensorflow.python.keras.applications import xception
from tensorflow.python.keras.applications.inception_v3 import InceptionV3
from tensorflow.python.keras.applications.mobilenet import MobileNet
from tensorflow.python.keras.applications.resnet50 import ResNet50
from tensorflow.python.keras.applications.vgg16 import VGG16
from tensorflow.python.keras.applications.vgg19 import VGG19
from tensorflow.python.keras.applications.xception import Xception
del absolute_import
del division
del print_function<|fim▁end|> | |
<|file_name|>app-stats.state.ts<|end_file_name|><|fim▁begin|>import {summary, stats} from '../../cloud/app-stats/app-stats.interface';<|fim▁hole|>import {App} from '../../cloud/apps/app.interface';
export const appStats = {
isFetchingAppStats: true,
isRefreshingAppStats: false,
isScalingApp: false,
showScaleDialog: false,
stats: <stats>{},
summary: <summary>{},
serviceBinding: [],
environmentVariables: {},
app: {} as App
};
export interface appStatsState {
isFetchingAppStats: boolean;
isRefreshingAppStats: boolean;
isScalingApp: boolean;
showScaleDialog: boolean;
stats?: stats;
summary?: summary;
serviceBinding: Array<any>;
environmentVariables: any;
app: App;
}<|fim▁end|> | |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># pylint: disable=invalid-unary-operand-type
from collections import OrderedDict
import json
import logging
from copy import deepcopy
from datetime import datetime, timedelta
from six import string_types
import requests
import sqlalchemy as sa
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean,
DateTime,
)
from sqlalchemy.orm import backref, relationship
from dateutil.parser import parse as dparse
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.postaggregator import (
Postaggregator, Quantile, Quantiles, Field, Const, HyperUniqueCardinality,
)
from pydruid.utils.having import Aggregation
from flask import Markup, escape
from flask_appbuilder.models.decorators import renders
from flask_appbuilder import Model<|fim▁hole|>
from flask_babel import lazy_gettext as _
from superset import conf, db, import_util, utils, sm, get_session
from superset.utils import (
flasher, MetricPermException, DimSelector, DTTM_ALIAS
)
from superset.connectors.base.models import BaseDatasource, BaseColumn, BaseMetric
from superset.models.helpers import AuditMixinNullable, QueryResult, set_perm
DRUID_TZ = conf.get("DRUID_TZ")
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
'type': 'javascript',
'fieldNames': field_names,
'name': name,
'function': function,
}
self.name = name
class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator
class DruidCluster(Model, AuditMixinNullable):
"""ORM object referencing the Druid clusters"""
__tablename__ = 'clusters'
type = "druid"
id = Column(Integer, primary_key=True)
verbose_name = Column(String(250), unique=True)
# short unique name, used in permissions
cluster_name = Column(String(250), unique=True)
coordinator_host = Column(String(255))
coordinator_port = Column(Integer, default=8081)
coordinator_endpoint = Column(
String(255), default='druid/coordinator/v1/metadata')
broker_host = Column(String(255))
broker_port = Column(Integer, default=8082)
broker_endpoint = Column(String(255), default='druid/v2')
metadata_last_refreshed = Column(DateTime)
cache_timeout = Column(Integer)
def __repr__(self):
return self.verbose_name if self.verbose_name else self.cluster_name
def get_pydruid_client(self):
cli = PyDruid(
"http://{0}:{1}/".format(self.broker_host, self.broker_port),
self.broker_endpoint)
return cli
def get_datasources(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/"
"{obj.coordinator_endpoint}/datasources"
).format(obj=self)
return json.loads(requests.get(endpoint).text)
def get_druid_version(self):
endpoint = (
"http://{obj.coordinator_host}:{obj.coordinator_port}/status"
).format(obj=self)
return json.loads(requests.get(endpoint).text)['version']
def refresh_datasources(self, datasource_name=None, merge_flag=False):
"""Refresh metadata of all datasources in the cluster
If ``datasource_name`` is specified, only that datasource is updated
"""
self.druid_version = self.get_druid_version()
for datasource in self.get_datasources():
if datasource not in conf.get('DRUID_DATA_SOURCE_BLACKLIST', []):
if not datasource_name or datasource_name == datasource:
DruidDatasource.sync_to_db(datasource, self, merge_flag)
@property
def perm(self):
return "[{obj.cluster_name}].(id:{obj.id})".format(obj=self)
def get_perm(self):
return self.perm
@property
def name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
@property
def unique_name(self):
return self.verbose_name if self.verbose_name else self.cluster_name
class DruidColumn(Model, BaseColumn):
"""ORM model for storing Druid datasource column metadata"""
__tablename__ = 'columns'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('columns', cascade='all, delete-orphan'),
enable_typechecks=False)
dimension_spec_json = Column(Text)
export_fields = (
'datasource_name', 'column_name', 'is_active', 'type', 'groupby',
'count_distinct', 'sum', 'avg', 'max', 'min', 'filterable',
'description', 'dimension_spec_json'
)
def __repr__(self):
return self.column_name
@property
def expression(self):
return self.dimension_spec_json
@property
def dimension_spec(self):
if self.dimension_spec_json:
return json.loads(self.dimension_spec_json)
def generate_metrics(self):
"""Generate metrics based on the column metadata"""
M = DruidMetric # noqa
metrics = []
metrics.append(DruidMetric(
metric_name='count',
verbose_name='COUNT(*)',
metric_type='count',
json=json.dumps({'type': 'count', 'name': 'count'})
))
# Somehow we need to reassign this for UDAFs
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.is_num:
mt = corrected_type.lower() + 'Sum'
name = 'sum__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='sum',
verbose_name='SUM({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.avg and self.is_num:
mt = corrected_type.lower() + 'Avg'
name = 'avg__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='avg',
verbose_name='AVG({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.min and self.is_num:
mt = corrected_type.lower() + 'Min'
name = 'min__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='min',
verbose_name='MIN({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.max and self.is_num:
mt = corrected_type.lower() + 'Max'
name = 'max__' + self.column_name
metrics.append(DruidMetric(
metric_name=name,
metric_type='max',
verbose_name='MAX({})'.format(self.column_name),
json=json.dumps({
'type': mt, 'name': name, 'fieldName': self.column_name})
))
if self.count_distinct:
name = 'count_distinct__' + self.column_name
if self.type == 'hyperUnique' or self.type == 'thetaSketch':
metrics.append(DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type=self.type,
json=json.dumps({
'type': self.type,
'name': name,
'fieldName': self.column_name
})
))
else:
mt = 'count_distinct'
metrics.append(DruidMetric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
metric_type='count_distinct',
json=json.dumps({
'type': 'cardinality',
'name': name,
'fieldNames': [self.column_name]})
))
session = get_session()
new_metrics = []
for metric in metrics:
m = (
session.query(M)
.filter(M.metric_name == metric.metric_name)
.filter(M.datasource_name == self.datasource_name)
.filter(DruidCluster.cluster_name == self.datasource.cluster_name)
.first()
)
metric.datasource_name = self.datasource_name
if not m:
new_metrics.append(metric)
session.add(metric)
session.flush()
@classmethod
def import_obj(cls, i_column):
def lookup_obj(lookup_column):
return db.session.query(DruidColumn).filter(
DruidColumn.datasource_name == lookup_column.datasource_name,
DruidColumn.column_name == lookup_column.column_name).first()
return import_util.import_simple_obj(db.session, i_column, lookup_obj)
class DruidMetric(Model, BaseMetric):
"""ORM object referencing Druid metrics for a datasource"""
__tablename__ = 'metrics'
datasource_name = Column(
String(255),
ForeignKey('datasources.datasource_name'))
# Setting enable_typechecks=False disables polymorphic inheritance.
datasource = relationship(
'DruidDatasource',
backref=backref('metrics', cascade='all, delete-orphan'),
enable_typechecks=False)
json = Column(Text)
export_fields = (
'metric_name', 'verbose_name', 'metric_type', 'datasource_name',
'json', 'description', 'is_restricted', 'd3format'
)
@property
def expression(self):
return self.json
@property
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception:
obj = {}
return obj
@property
def perm(self):
return (
"{parent_name}.[{obj.metric_name}](id:{obj.id})"
).format(obj=self,
parent_name=self.datasource.full_name
) if self.datasource else None
@classmethod
def import_obj(cls, i_metric):
def lookup_obj(lookup_metric):
return db.session.query(DruidMetric).filter(
DruidMetric.datasource_name == lookup_metric.datasource_name,
DruidMetric.metric_name == lookup_metric.metric_name).first()
return import_util.import_simple_obj(db.session, i_metric, lookup_obj)
class DruidDatasource(Model, BaseDatasource):
"""ORM object referencing Druid datasources (tables)"""
__tablename__ = 'datasources'
type = "druid"
query_langtage = "json"
cluster_class = DruidCluster
metric_class = DruidMetric
column_class = DruidColumn
baselink = "druiddatasourcemodelview"
# Columns
datasource_name = Column(String(255), unique=True)
is_hidden = Column(Boolean, default=False)
fetch_values_from = Column(String(100))
cluster_name = Column(
String(250), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'DruidCluster', backref='datasources', foreign_keys=[cluster_name])
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship(
sm.user_model,
backref=backref('datasources', cascade='all, delete-orphan'),
foreign_keys=[user_id])
export_fields = (
'datasource_name', 'is_hidden', 'description', 'default_endpoint',
'cluster_name', 'offset', 'cache_timeout', 'params'
)
@property
def database(self):
return self.cluster
@property
def connection(self):
return str(self.database)
@property
def num_cols(self):
return [c.column_name for c in self.columns if c.is_num]
@property
def name(self):
return self.datasource_name
@property
def schema(self):
ds_name = self.datasource_name or ''
name_pieces = ds_name.split('.')
if len(name_pieces) > 1:
return name_pieces[0]
else:
return None
@property
def schema_perm(self):
"""Returns schema permission if present, cluster one otherwise."""
return utils.get_schema_perm(self.cluster, self.schema)
def get_perm(self):
return (
"[{obj.cluster_name}].[{obj.datasource_name}]"
"(id:{obj.id})").format(obj=self)
@property
def link(self):
name = escape(self.datasource_name)
return Markup('<a href="{self.url}">{name}</a>').format(**locals())
@property
def full_name(self):
return utils.get_datasource_full_name(
self.cluster_name, self.datasource_name)
@property
def time_column_grains(self):
return {
"time_columns": [
'all', '5 seconds', '30 seconds', '1 minute',
'5 minutes', '1 hour', '6 hour', '1 day', '7 days',
'week', 'week_starting_sunday', 'week_ending_saturday',
'month',
],
"time_grains": ['now']
}
def __repr__(self):
return self.datasource_name
@renders('datasource_name')
def datasource_link(self):
url = "/superset/explore/{obj.type}/{obj.id}/".format(obj=self)
name = escape(self.datasource_name)
return Markup('<a href="{url}">{name}</a>'.format(**locals()))
def get_metric_obj(self, metric_name):
return [
m.json_obj for m in self.metrics
if m.metric_name == metric_name
][0]
@classmethod
def import_obj(cls, i_datasource, import_time=None):
"""Imports the datasource from the object to the database.
Metrics and columns and datasource will be overridden if exists.
This function can be used to import/export dashboards between multiple
superset instances. Audit metadata isn't copies over.
"""
def lookup_datasource(d):
return db.session.query(DruidDatasource).join(DruidCluster).filter(
DruidDatasource.datasource_name == d.datasource_name,
DruidCluster.cluster_name == d.cluster_name,
).first()
def lookup_cluster(d):
return db.session.query(DruidCluster).filter_by(
cluster_name=d.cluster_name).one()
return import_util.import_datasource(
db.session, i_datasource, lookup_cluster, lookup_datasource,
import_time)
@staticmethod
def version_higher(v1, v2):
"""is v1 higher than v2
>>> DruidDatasource.version_higher('0.8.2', '0.9.1')
False
>>> DruidDatasource.version_higher('0.8.2', '0.6.1')
True
>>> DruidDatasource.version_higher('0.8.2', '0.8.2')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9.BETA')
False
>>> DruidDatasource.version_higher('0.8.2', '0.9')
False
"""
def int_or_0(v):
try:
v = int(v)
except (TypeError, ValueError):
v = 0
return v
v1nums = [int_or_0(n) for n in v1.split('.')]
v2nums = [int_or_0(n) for n in v2.split('.')]
v1nums = (v1nums + [0, 0, 0])[:3]
v2nums = (v2nums + [0, 0, 0])[:3]
return v1nums[0] > v2nums[0] or \
(v1nums[0] == v2nums[0] and v1nums[1] > v2nums[1]) or \
(v1nums[0] == v2nums[0] and v1nums[1] == v2nums[1] and v1nums[2] > v2nums[2])
def latest_metadata(self):
"""Returns segment metadata from the latest segment"""
client = self.cluster.get_pydruid_client()
results = client.time_boundary(datasource=self.datasource_name)
if not results:
return
max_time = results[0]['result']['maxTime']
max_time = dparse(max_time)
# Query segmentMetadata for 7 days back. However, due to a bug,
# we need to set this interval to more than 1 day ago to exclude
# realtime segments, which triggered a bug (fixed in druid 0.8.2).
# https://groups.google.com/forum/#!topic/druid-user/gVCqqspHqOQ
lbound = (max_time - timedelta(days=7)).isoformat()
rbound = max_time.isoformat()
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = (max_time - timedelta(1)).isoformat()
segment_metadata = None
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=conf.get('DRUID_ANALYSIS_TYPES'))
except Exception as e:
logging.warning("Failed first attempt to get latest segment")
logging.exception(e)
if not segment_metadata:
# if no segments in the past 7 days, look at all segments
lbound = datetime(1901, 1, 1).isoformat()[:10]
rbound = datetime(2050, 1, 1).isoformat()[:10]
if not self.version_higher(self.cluster.druid_version, '0.8.2'):
rbound = datetime.now().isoformat()
try:
segment_metadata = client.segment_metadata(
datasource=self.datasource_name,
intervals=lbound + '/' + rbound,
merge=self.merge_flag,
analysisTypes=conf.get('DRUID_ANALYSIS_TYPES'))
except Exception as e:
logging.warning("Failed 2nd attempt to get latest segment")
logging.exception(e)
if segment_metadata:
return segment_metadata[-1]['columns']
def generate_metrics(self):
for col in self.columns:
col.generate_metrics()
@classmethod
def sync_to_db_from_config(cls, druid_config, user, cluster):
"""Merges the ds config from druid_config into one stored in the db."""
session = db.session()
datasource = (
session.query(cls)
.filter_by(
datasource_name=druid_config['name'])
.first()
)
# Create a new datasource.
if not datasource:
datasource = cls(
datasource_name=druid_config['name'],
cluster=cluster,
owner=user,
changed_by_fk=user.id,
created_by_fk=user.id,
)
session.add(datasource)
dimensions = druid_config['dimensions']
for dim in dimensions:
col_obj = (
session.query(DruidColumn)
.filter_by(
datasource_name=druid_config['name'],
column_name=dim)
.first()
)
if not col_obj:
col_obj = DruidColumn(
datasource_name=druid_config['name'],
column_name=dim,
groupby=True,
filterable=True,
# TODO: fetch type from Hive.
type="STRING",
datasource=datasource,
)
session.add(col_obj)
# Import Druid metrics
for metric_spec in druid_config["metrics_spec"]:
metric_name = metric_spec["name"]
metric_type = metric_spec["type"]
metric_json = json.dumps(metric_spec)
if metric_type == "count":
metric_type = "longSum"
metric_json = json.dumps({
"type": "longSum",
"name": metric_name,
"fieldName": metric_name,
})
metric_obj = (
session.query(DruidMetric)
.filter_by(
datasource_name=druid_config['name'],
metric_name=metric_name)
).first()
if not metric_obj:
metric_obj = DruidMetric(
metric_name=metric_name,
metric_type=metric_type,
verbose_name="%s(%s)" % (metric_type, metric_name),
datasource=datasource,
json=metric_json,
description=(
"Imported from the airolap config dir for %s" %
druid_config['name']),
)
session.add(metric_obj)
session.commit()
@classmethod
def sync_to_db(cls, name, cluster, merge):
"""Fetches metadata for that datasource and merges the Superset db"""
logging.info("Syncing Druid datasource [{}]".format(name))
session = get_session()
datasource = session.query(cls).filter_by(datasource_name=name).first()
if not datasource:
datasource = cls(datasource_name=name)
session.add(datasource)
flasher("Adding new datasource [{}]".format(name), "success")
else:
flasher("Refreshing datasource [{}]".format(name), "info")
session.flush()
datasource.cluster = cluster
datasource.merge_flag = merge
session.flush()
cols = datasource.latest_metadata()
if not cols:
logging.error("Failed at fetching the latest segment")
return
for col in cols:
# Skip the time column
if col == "__time":
continue
col_obj = (
session
.query(DruidColumn)
.filter_by(datasource_name=name, column_name=col)
.first()
)
datatype = cols[col]['type']
if not col_obj:
col_obj = DruidColumn(datasource_name=name, column_name=col)
session.add(col_obj)
if datatype == "STRING":
col_obj.groupby = True
col_obj.filterable = True
if datatype == "hyperUnique" or datatype == "thetaSketch":
col_obj.count_distinct = True
# If long or double, allow sum/min/max
if datatype == "LONG" or datatype == "DOUBLE":
col_obj.sum = True
col_obj.min = True
col_obj.max = True
if col_obj:
col_obj.type = cols[col]['type']
session.flush()
col_obj.datasource = datasource
col_obj.generate_metrics()
session.flush()
@staticmethod
def time_offset(granularity):
if granularity == 'week_ending_saturday':
return 6 * 24 * 3600 * 1000 # 6 days
return 0
# uses https://en.wikipedia.org/wiki/ISO_8601
# http://druid.io/docs/0.8.0/querying/granularities.html
# TODO: pass origin from the UI
@staticmethod
def granularity(period_name, timezone=None, origin=None):
if not period_name or period_name == 'all':
return 'all'
iso_8601_dict = {
'5 seconds': 'PT5S',
'30 seconds': 'PT30S',
'1 minute': 'PT1M',
'5 minutes': 'PT5M',
'1 hour': 'PT1H',
'6 hour': 'PT6H',
'one day': 'P1D',
'1 day': 'P1D',
'7 days': 'P7D',
'week': 'P1W',
'week_starting_sunday': 'P1W',
'week_ending_saturday': 'P1W',
'month': 'P1M',
}
granularity = {'type': 'period'}
if timezone:
granularity['timeZone'] = timezone
if origin:
dttm = utils.parse_human_datetime(origin)
granularity['origin'] = dttm.isoformat()
if period_name in iso_8601_dict:
granularity['period'] = iso_8601_dict[period_name]
if period_name in ('week_ending_saturday', 'week_starting_sunday'):
# use Sunday as start of the week
granularity['origin'] = '2016-01-03T00:00:00'
elif not isinstance(period_name, string_types):
granularity['type'] = 'duration'
granularity['duration'] = period_name
elif period_name.startswith('P'):
# identify if the string is the iso_8601 period
granularity['period'] = period_name
else:
granularity['type'] = 'duration'
granularity['duration'] = utils.parse_human_timedelta(
period_name).total_seconds() * 1000
return granularity
@staticmethod
def _metrics_and_post_aggs(metrics, metrics_dict):
all_metrics = []
post_aggs = {}
def recursive_get_fields(_conf):
_type = _conf.get('type')
_field = _conf.get('field')
_fields = _conf.get('fields')
field_names = []
if _type in ['fieldAccess', 'hyperUniqueCardinality',
'quantile', 'quantiles']:
field_names.append(_conf.get('fieldName', ''))
if _field:
field_names += recursive_get_fields(_field)
if _fields:
for _f in _fields:
field_names += recursive_get_fields(_f)
return list(set(field_names))
for metric_name in metrics:
metric = metrics_dict[metric_name]
if metric.metric_type != 'postagg':
all_metrics.append(metric_name)
else:
mconf = metric.json_obj
all_metrics += recursive_get_fields(mconf)
all_metrics += mconf.get('fieldNames', [])
if mconf.get('type') == 'javascript':
post_aggs[metric_name] = JavascriptPostAggregator(
name=mconf.get('name', ''),
field_names=mconf.get('fieldNames', []),
function=mconf.get('function', ''))
elif mconf.get('type') == 'quantile':
post_aggs[metric_name] = Quantile(
mconf.get('name', ''),
mconf.get('probability', ''),
)
elif mconf.get('type') == 'quantiles':
post_aggs[metric_name] = Quantiles(
mconf.get('name', ''),
mconf.get('probabilities', ''),
)
elif mconf.get('type') == 'fieldAccess':
post_aggs[metric_name] = Field(mconf.get('name'))
elif mconf.get('type') == 'constant':
post_aggs[metric_name] = Const(
mconf.get('value'),
output_name=mconf.get('name', '')
)
elif mconf.get('type') == 'hyperUniqueCardinality':
post_aggs[metric_name] = HyperUniqueCardinality(
mconf.get('name')
)
elif mconf.get('type') == 'arithmetic':
post_aggs[metric_name] = Postaggregator(
mconf.get('fn', "/"),
mconf.get('fields', []),
mconf.get('name', ''))
else:
post_aggs[metric_name] = CustomPostAggregator(
mconf.get('name', ''),
mconf)
return all_metrics, post_aggs
def values_for_column(self,
column_name,
limit=10000):
"""Retrieve some values for the given column"""
# TODO: Use Lexicographic TopNMetricSpec once supported by PyDruid
if self.fetch_values_from:
from_dttm = utils.parse_human_datetime(self.fetch_values_from)
else:
from_dttm = datetime(1970, 1, 1)
qry = dict(
datasource=self.datasource_name,
granularity="all",
intervals=from_dttm.isoformat() + '/' + datetime.now().isoformat(),
aggregations=dict(count=count("count")),
dimension=column_name,
metric="count",
threshold=limit,
)
client = self.cluster.get_pydruid_client()
client.topn(**qry)
df = client.export_pandas()
return [row[column_name] for row in df.to_records(index=False)]
def get_query_str(self, query_obj, phase=1, client=None):
return self.run_query(client=client, phase=phase, **query_obj)
def run_query( # noqa / druid
self,
groupby, metrics,
granularity,
from_dttm, to_dttm,
filter=None, # noqa
is_timeseries=True,
timeseries_limit=None,
timeseries_limit_metric=None,
row_limit=None,
inner_from_dttm=None, inner_to_dttm=None,
orderby=None,
extras=None, # noqa
select=None, # noqa
columns=None, phase=2, client=None, form_data=None):
"""Runs a query against Druid and returns a dataframe.
"""
# TODO refactor into using a TBD Query object
client = client or self.cluster.get_pydruid_client()
if not is_timeseries:
granularity = 'all'
inner_from_dttm = inner_from_dttm or from_dttm
inner_to_dttm = inner_to_dttm or to_dttm
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=DRUID_TZ)
timezone = from_dttm.tzname()
query_str = ""
metrics_dict = {m.metric_name: m for m in self.metrics}
columns_dict = {c.column_name: c for c in self.columns}
all_metrics, post_aggs = self._metrics_and_post_aggs(metrics, metrics_dict)
aggregations = OrderedDict()
for m in self.metrics:
if m.metric_name in all_metrics:
aggregations[m.metric_name] = m.json_obj
rejected_metrics = [
m.metric_name for m in self.metrics
if m.is_restricted and
m.metric_name in aggregations.keys() and
not sm.has_access('metric_access', m.perm)
]
if rejected_metrics:
raise MetricPermException(
"Access to the metrics denied: " + ', '.join(rejected_metrics)
)
# the dimensions list with dimensionSpecs expanded
dimensions = []
groupby = [gb for gb in groupby if gb in columns_dict]
for column_name in groupby:
col = columns_dict.get(column_name)
dim_spec = col.dimension_spec
if dim_spec:
dimensions.append(dim_spec)
else:
dimensions.append(column_name)
qry = dict(
datasource=self.datasource_name,
dimensions=dimensions,
aggregations=aggregations,
granularity=DruidDatasource.granularity(
granularity,
timezone=timezone,
origin=extras.get('druid_time_origin'),
),
post_aggregations=post_aggs,
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = self.get_filters(filter)
if filters:
qry['filter'] = filters
having_filters = self.get_having_filters(extras.get('having_druid'))
if having_filters:
qry['having'] = having_filters
orig_filters = filters
if len(groupby) == 0 and not having_filters:
del qry['dimensions']
client.timeseries(**qry)
if not having_filters and len(groupby) == 1:
qry['threshold'] = timeseries_limit or 1000
if row_limit and granularity == 'all':
qry['threshold'] = row_limit
qry['dimension'] = list(qry.get('dimensions'))[0]
del qry['dimensions']
qry['metric'] = list(qry['aggregations'].keys())[0]
client.topn(**qry)
elif len(groupby) > 1 or having_filters:
# If grouping on multiple fields or using a having filter
# we have to force a groupby query
if timeseries_limit and is_timeseries:
order_by = metrics[0] if metrics else self.metrics[0]
if timeseries_limit_metric:
order_by = timeseries_limit_metric
# Limit on the number of timeseries, doing a two-phases query
pre_qry = deepcopy(qry)
pre_qry['granularity'] = "all"
pre_qry['limit_spec'] = {
"type": "default",
"limit": timeseries_limit,
'intervals': (
inner_from_dttm.isoformat() + '/' +
inner_to_dttm.isoformat()),
"columns": [{
"dimension": order_by,
"direction": "descending",
}],
}
client.groupby(**pre_qry)
query_str += "// Two phase query\n// Phase 1\n"
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
query_str += "\n"
if phase == 1:
return query_str
query_str += (
"//\nPhase 2 (built based on phase one's results)\n")
df = client.export_pandas()
if df is not None and not df.empty:
dims = qry['dimensions']
filters = []
for unused, row in df.iterrows():
fields = []
for dim in dims:
f = Dimension(dim) == row[dim]
fields.append(f)
if len(fields) > 1:
filt = Filter(type="and", fields=fields)
filters.append(filt)
elif fields:
filters.append(fields[0])
if filters:
ff = Filter(type="or", fields=filters)
if not orig_filters:
qry['filter'] = ff
else:
qry['filter'] = Filter(type="and", fields=[
ff,
orig_filters])
qry['limit_spec'] = None
if row_limit:
qry['limit_spec'] = {
"type": "default",
"limit": row_limit,
"columns": [{
"dimension": (
metrics[0] if metrics else self.metrics[0]),
"direction": "descending",
}],
}
client.groupby(**qry)
query_str += json.dumps(
client.query_builder.last_query.query_dict, indent=2)
return query_str
def query(self, query_obj):
qry_start_dttm = datetime.now()
client = self.cluster.get_pydruid_client()
query_str = self.get_query_str(
client=client, query_obj=query_obj, phase=2)
df = client.export_pandas()
if df is None or df.size == 0:
raise Exception(_("No data was returned."))
df.columns = [
DTTM_ALIAS if c == 'timestamp' else c for c in df.columns]
is_timeseries = query_obj['is_timeseries'] \
if 'is_timeseries' in query_obj else True
if (
not is_timeseries and
DTTM_ALIAS in df.columns):
del df[DTTM_ALIAS]
# Reordering columns
cols = []
if DTTM_ALIAS in df.columns:
cols += [DTTM_ALIAS]
cols += [col for col in query_obj['groupby'] if col in df.columns]
cols += [col for col in query_obj['metrics'] if col in df.columns]
df = df[cols]
time_offset = DruidDatasource.time_offset(query_obj['granularity'])
def increment_timestamp(ts):
dt = utils.parse_human_datetime(ts).replace(
tzinfo=DRUID_TZ)
return dt + timedelta(milliseconds=time_offset)
if DTTM_ALIAS in df.columns and time_offset:
df[DTTM_ALIAS] = df[DTTM_ALIAS].apply(increment_timestamp)
return QueryResult(
df=df,
query=query_str,
duration=datetime.now() - qry_start_dttm)
def get_filters(self, raw_filters): # noqa
filters = None
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ('in', 'not in'):
eq = [
types.replace("'", '').strip()
if isinstance(types, string_types)
else types
for types in eq]
elif not isinstance(flt['val'], string_types):
eq = eq[0] if len(eq) > 0 else ''
if col in self.num_cols:
if op in ('in', 'not in'):
eq = [utils.string_to_num(v) for v in eq]
else:
eq = utils.string_to_num(eq)
if op == '==':
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
if len(eq) > 1:
for s in eq:
fields.append(Dimension(col) == s)
cond = Filter(type="or", fields=fields)
elif len(eq) == 1:
cond = Dimension(col) == eq[0]
if op == 'not in':
cond = ~cond
elif op == 'regex':
cond = Filter(type="regex", pattern=eq, dimension=col)
elif op == '>=':
cond = Dimension(col) >= eq
elif op == '<=':
cond = Dimension(col) <= eq
elif op == '>':
cond = Dimension(col) > eq
elif op == '<':
cond = Dimension(col) < eq
if filters:
filters = Filter(type="and", fields=[
cond,
filters
])
else:
filters = cond
return filters
def _get_having_obj(self, col, op, eq):
cond = None
if op == '==':
if col in self.column_names:
cond = DimSelector(dimension=col, value=eq)
else:
cond = Aggregation(col) == eq
elif op == '>':
cond = Aggregation(col) > eq
elif op == '<':
cond = Aggregation(col) < eq
return cond
def get_having_filters(self, raw_filters):
filters = None
reversed_op_map = {
'!=': '==',
'>=': '<',
'<=': '>'
}
for flt in raw_filters:
if not all(f in flt for f in ['col', 'op', 'val']):
continue
col = flt['col']
op = flt['op']
eq = flt['val']
cond = None
if op in ['==', '>', '<']:
cond = self._get_having_obj(col, op, eq)
elif op in reversed_op_map:
cond = ~self._get_having_obj(col, reversed_op_map[op], eq)
if filters:
filters = filters & cond
else:
filters = cond
return filters
@classmethod
def query_datasources_by_name(
cls, session, database, datasource_name, schema=None):
return (
session.query(cls)
.filter_by(cluster_name=database.id)
.filter_by(datasource_name=datasource_name)
.all()
)
sa.event.listen(DruidDatasource, 'after_insert', set_perm)
sa.event.listen(DruidDatasource, 'after_update', set_perm)<|fim▁end|> | |
<|file_name|>anim-scroll.js<|end_file_name|><|fim▁begin|>/*
YUI 3.8.0 (build 5744)
Copyright 2012 Yahoo! Inc. All rights reserved.
Licensed under the BSD License.
http://yuilibrary.com/license/
*/
YUI.add('anim-scroll', function (Y, NAME) {
/**
* Adds support for the <code>scroll</code> property in <code>to</code>
* and <code>from</code> attributes.
* @module anim
* @submodule anim-scroll
*/
var NUM = Number;
//TODO: deprecate for scrollTop/Left properties?
Y.Anim.behaviors.scroll = {
set: function(anim, att, from, to, elapsed, duration, fn) {
var
node = anim._node,
val = ([
fn(elapsed, NUM(from[0]), NUM(to[0]) - NUM(from[0]), duration),
fn(elapsed, NUM(from[1]), NUM(to[1]) - NUM(from[1]), duration)
]);
<|fim▁hole|> if (val[1]) {
node.set('scrollTop', val[1]);
}
},
get: function(anim) {
var node = anim._node;
return [node.get('scrollLeft'), node.get('scrollTop')];
}
};
}, '3.8.0', {"requires": ["anim-base"]});<|fim▁end|> | if (val[0]) {
node.set('scrollLeft', val[0]);
}
|
<|file_name|>Colors_test.py<|end_file_name|><|fim▁begin|># Tests for Pmw color handling.
import Tkinter
import Test
import Pmw
Test.initialise()
testData = ()
defaultPalette = Pmw.Color.getdefaultpalette(Test.root)
c = Tkinter.Button
colors = ('red', 'orange', 'yellow', 'green', 'blue', 'purple', 'white')
normalcolors = map(Pmw.Color.changebrightness,
(Test.root,) * len(colors), colors, (0.85,) * len(colors))
kw = {}
tests = (
(Pmw.Color.setscheme, (Test.root, normalcolors[0]), {'foreground' : 'white'}),
)
testData = testData + ((c, ((tests, kw),)),)
for color in normalcolors[1:]:
kw = {'text' : color}
tests = (
(c.pack, ()),
('state', 'active'),
)
testData = testData + ((c, ((tests, kw),)),)
kw = {}
tests = (
(Pmw.Color.setscheme, (Test.root, color), {'foreground' : 'red'}),
)
testData = testData + ((c, ((tests, kw),)),)
<|fim▁hole|>tests = (
(Pmw.Color.setscheme, (Test.root,), defaultPalette),
)
testData = testData + ((c, ((tests, kw),)),)
if __name__ == '__main__':
Test.runTests(testData)<|fim▁end|> | # Restore the default colors.
kw = {} |
<|file_name|>MorsHtmlHighlighter.cpp<|end_file_name|><|fim▁begin|>#include "MorsHtmlHighlighter.h"
enum Construct {
DocType,
Entity,
Tag,
Comment,
AttributeName,
AttributeValue
};
enum State {
State_Text = -1,
State_DocType,
State_Comment,
State_TagStart,
State_TagName,
State_InsideTag,
State_AttributeName,
State_SingleQuote,
State_DoubleQuote,
State_AttributeValue,
};
MorsHtmlHighlighter::MorsHtmlHighlighter(QTextDocument *document) : QSyntaxHighlighter(document)
{
colors_[DocType] = QColor(192, 192, 192);
colors_[Entity] = QColor(128, 128, 128);
colors_[Tag] = QColor(136, 18, 128);
colors_[Comment] = QColor( 35, 110, 37);
colors_[AttributeName] = QColor(153, 69, 0);
colors_[AttributeValue] = QColor( 36, 36, 170);
}
void MorsHtmlHighlighter::highlightBlock(const QString &text)
{
int state = previousBlockState();
int len = text.length();
int start = 0;
int pos = 0;
while (pos < len)
{
switch (state) {
case State_Text:
default:
while (pos < len) {
QChar ch = text.at(pos);
if (ch == '<') {
if (text.mid(pos, 4) == "<!--") {
state = State_Comment;
} else {
if (text.mid(pos, 9).toUpper() == "<!DOCTYPE")
state = State_DocType;
else
state = State_TagStart;
}
break;
} else if (ch == '&') {
start = pos;
while (pos < len
&& text.at(pos++) != ';')
;
setFormat(start, pos - start, colors_[Entity]);
} else {
++pos;
}
}
break;
case State_Comment:
start = pos;
while (pos < len) {
if (text.mid(pos, 3) == "-->") {
pos += 3;
state = State_Text;
break;
} else {
++pos;
}
}
setFormat(start, pos - start, colors_[Comment]);
break;
case State_DocType:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '>') {
state = State_Text;
break;
}
}
setFormat(start, pos - start, colors_[DocType]);
break;
// at '<' in e.g. "<span>foo</span>"
case State_TagStart:
start = pos + 1;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '>') {
state = State_Text;
break;
}
if (!ch.isSpace()) {
--pos;
state = State_TagName;
break;
}
}
break;
// at 'b' in e.g "<blockquote>foo</blockquote>"
case State_TagName:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch.isSpace()) {
--pos;
state = State_InsideTag;
break;
}
if (ch == '>') {
state = State_Text;
break;
}
}
setFormat(start, pos - start, colors_[Tag]);
break;
// anywhere after tag name and before tag closing ('>')
case State_InsideTag:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '/')
continue;
if (ch == '>') {
state = State_Text;
break;
}
if (!ch.isSpace()) {
--pos;
state = State_AttributeName;
break;
}
}
break;
// at 's' in e.g. <img src=bla.png/>
case State_AttributeName:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '=') {
state = State_AttributeValue;
break;
}
if (ch == '>' || ch == '/') {
state = State_InsideTag;
break;
}
}
setFormat(start, pos - start, colors_[AttributeName]);
break;
// after '=' in e.g. <img src=bla.png/>
case State_AttributeValue:
start = pos;
// find first non-space character
while (pos < len) {
QChar ch = text.at(pos);
++pos;
// handle opening single quote
if (ch == '\'') {
state = State_SingleQuote;
break;
}
// handle opening double quote
if (ch == '"') {
state = State_DoubleQuote;
break;
}
if (!ch.isSpace())
break;
}
if (state == State_AttributeValue) {
// attribute value without quote
// just stop at non-space or tag delimiter
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
if (ch.isSpace())
break;
if (ch == '>' || ch == '/')
break;
++pos;
}
state = State_InsideTag;
setFormat(start, pos - start, colors_[AttributeValue]);
}
break;
// after the opening single quote in an attribute value
case State_SingleQuote:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '\'')
break;
}
state = State_InsideTag;
setFormat(start, pos - start, colors_[AttributeValue]);<|fim▁hole|> break;
// after the opening double quote in an attribute value
case State_DoubleQuote:
start = pos;
while (pos < len) {
QChar ch = text.at(pos);
++pos;
if (ch == '"')
break;
}
state = State_InsideTag;
setFormat(start, pos - start, colors_[AttributeValue]);
break;
}
}
setCurrentBlockState(state);
}<|fim▁end|> | |
<|file_name|>smallcommitmetadata.py<|end_file_name|><|fim▁begin|># Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the<|fim▁hole|># GNU General Public License version 2.
# smallcommitmetadata.py - stores a small amount of metadata associated with a commit
from . import json
from .node import bin, hex
from .util import altsortdict
# Stores a mapping of (node, category) -> data, with a FIFO-limited number of entries
class smallcommitmetadata(object):
def __init__(self, vfs, entrylimit):
self.vfs = vfs
self.limit = entrylimit
self.contents = altsortdict()
self.reload()
def reload(self):
"""Read the database from disk."""
if not self.vfs.exists("commit_metadata"):
self.contents = altsortdict()
return
try:
entries = json.loads(self.vfs.tryreadutf8("commit_metadata"))[-self.limit :]
except ValueError:
entries = []
for entry in entries:
self.contents[(bin(entry["node"]), entry["category"])] = entry["data"]
def write(self):
"""Write the database to disk."""
with self.vfs("commit_metadata", "w", atomictemp=True) as f:
entries = [
{"node": hex(node), "category": category, "data": data}
for ((node, category), data) in self.contents.items()
]
json.dump(entries, f)
def store(self, node, category, data):
"""Adds a new entry with the specified node and category, and updates the data on disk. Returns the removed entry, if any."""
self.contents[(node, category)] = data
popped = None
while len(self.contents) > self.limit:
popped = self.contents.popitem(last=False)
self.write()
return popped
def delete(self, node, category):
"""Removes the entry with matching node and category and returns its value."""
value = self.contents[(node, category)]
del self.contents[(node, category)]
return value
def read(self, node, category):
"""Returns the value of the entry with specified node and category."""
return self.contents[(node, category)]
def find(self, node=None, category=None):
"""Returns a map of all entries with matching node and/or category. If both are None, returns all entries."""
return altsortdict(
(
((node_, category_), data)
for ((node_, category_), data) in self.contents.items()
if node is None or node == node_
if category is None or category == category_
)
)
def finddelete(self, node=None, category=None):
"""Removes and returns any entries with matching node and/or category."""
entriestoremove = [
((node_, category_), data_)
for ((node_, category_), data_) in self.contents.items()
if node is None or node == node_
if category is None or category == category_
]
for (key, _value) in entriestoremove:
del self.contents[key]
return altsortdict(entriestoremove)
def clear(self):
"""Removes and returns all entries."""
deleted = self.contents
self.contents = altsortdict()
return deleted<|fim▁end|> | |
<|file_name|>chip-grid-harness.spec.ts<|end_file_name|><|fim▁begin|>import {HarnessLoader} from '@angular/cdk-experimental/testing';
import {TestbedHarnessEnvironment} from '@angular/cdk-experimental/testing/testbed';
import {Component} from '@angular/core';<|fim▁hole|>import {MatChipsModule} from '../index';
import {MatChipGridHarness} from './chip-grid-harness';
let fixture: ComponentFixture<ChipGridHarnessTest>;
let loader: HarnessLoader;
describe('MatChipGridHarness', () => {
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [MatChipsModule],
declarations: [ChipGridHarnessTest],
}).compileComponents();
fixture = TestBed.createComponent(ChipGridHarnessTest);
fixture.detectChanges();
loader = TestbedHarnessEnvironment.loader(fixture);
});
it('should get correct number of grid harnesses', async () => {
const harnesses = await loader.getAllHarnesses(MatChipGridHarness);
expect(harnesses.length).toBe(1);
});
it('should get correct number of rows', async () => {
const harnesses = await loader.getAllHarnesses(MatChipGridHarness);
const rows = await harnesses[0].getRows();
expect(rows.length).toBe(3);
});
it('should get the chip input harness', async () => {
const harnesses = await loader.getAllHarnesses(MatChipGridHarness);
const input = await harnesses[0].getTextInput();
expect(input).not.toBe(null);
});
});
@Component({
template: `
<mat-chip-grid #grid>
<mat-chip-row> Chip A </mat-chip-row>
<mat-chip-row> Chip B </mat-chip-row>
<mat-chip-row> Chip C </mat-chip-row>
<input [matChipInputFor]="grid" />
</mat-chip-grid>
`
})
class ChipGridHarnessTest {}<|fim▁end|> | import {ComponentFixture, TestBed} from '@angular/core/testing'; |
<|file_name|>runConfigs.py<|end_file_name|><|fim▁begin|>#Constants<|fim▁hole|>PROD = 'prod'
LOCAL = 'local'
NOPI = 'nopi'
#Set configs here
ENV = PROD
loggingEnabled = True<|fim▁end|> | |
<|file_name|>DOThumbnailPriority.java<|end_file_name|><|fim▁begin|>/*
* PS3 Media Server, for streaming any medias to your PS3.
* Copyright (C) 2012 Ph.Waeber
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License only.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package net.pms.medialibrary.commons.dataobjects;
import net.pms.medialibrary.commons.enumarations.ThumbnailPrioType;
public class DOThumbnailPriority {
private long id;
private ThumbnailPrioType thumbnailPriorityType;
private String picturePath;
private int seekPosition;
private int priorityIndex;
public DOThumbnailPriority(){
this(-1, ThumbnailPrioType.THUMBNAIL, "", 0);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, String picturePath, int priorityIndex){
this(id, thumbnailPriorityType, -1, picturePath, priorityIndex);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, int seekPosition, int priorityIndex){
this(id, thumbnailPriorityType, seekPosition, "", priorityIndex);
}
public DOThumbnailPriority(long id, ThumbnailPrioType thumbnailPriorityType, int seekPosition, String picturePath, int priorityIndex){
setId(id);
setThumbnailPriorityType(thumbnailPriorityType);
setSeekPosition(seekPosition);
setPicturePath(picturePath);
setPriorityIndex(priorityIndex);
}
public void setThumbnailPriorityType(ThumbnailPrioType thumbnailPriorityType) {
this.thumbnailPriorityType = thumbnailPriorityType;
}
public ThumbnailPrioType getThumbnailPriorityType() {
return thumbnailPriorityType;
}
public void setPicturePath(String picturePath) {
this.picturePath = picturePath;
}
public String getPicturePath() {
return picturePath;
}
public void setSeekPosition(int seekPosition) {
this.seekPosition = seekPosition;
}<|fim▁hole|> public int getSeekPosition() {
return seekPosition;
}
public void setPriorityIndex(int priorityIndex) {
this.priorityIndex = priorityIndex;
}
public int getPriorityIndex() {
return priorityIndex;
}
public void setId(long id) {
this.id = id;
}
public long getId() {
return id;
}
@Override
public boolean equals(Object obj){
if(!(obj instanceof DOThumbnailPriority)){
return false;
}
DOThumbnailPriority compObj = (DOThumbnailPriority) obj;
if(getId() == compObj.getId()
&& getThumbnailPriorityType() == compObj.getThumbnailPriorityType()
&& getPicturePath().equals(compObj.getPicturePath())
&& getSeekPosition() == compObj.getSeekPosition()
&& getPriorityIndex() == compObj.getPriorityIndex()){
return true;
}
return false;
}
@Override
public int hashCode(){
int hashCode = 24 + String.valueOf(getId()).hashCode();
hashCode *= 24 + getPicturePath().hashCode();
hashCode *= 24 + getSeekPosition();
hashCode *= 24 + getPriorityIndex();
return hashCode;
}
@Override
public DOThumbnailPriority clone(){
return new DOThumbnailPriority(getId(), getThumbnailPriorityType(), getSeekPosition(), getPicturePath(), getPriorityIndex());
}
@Override
public String toString(){
return String.format("id=%s, prioIndex=%s, type=%s, seekPos=%s, picPath=%s", getId(), getPriorityIndex(), getThumbnailPriorityType(), getSeekPosition(), getPicturePath());
}
}<|fim▁end|> | |
<|file_name|>test_compat.py<|end_file_name|><|fim▁begin|>from morepath import compat
def test_text_type():
assert isinstance(u'foo', compat.text_type)
assert not isinstance(b'foo', compat.text_type)
def test_string_types():
assert isinstance('foo', compat.string_types)
assert isinstance(u'foo', compat.string_types)
if compat.PY3:
assert not isinstance(b'foo', compat.string_types)
else:
assert isinstance(b'foo', compat.string_types)
def test_bytes_():
text = u'Z\N{latin small letter u with diaeresis}rich'
code = compat.bytes_(text)
assert isinstance(code, bytes)
assert code == compat.bytes_(code)
def test_withclass():
class Meta(type):<|fim▁hole|> pass
class Class(compat.with_metaclass(Meta)):
pass
assert type(Class) == Meta
assert Class.__bases__ == (object,)<|fim▁end|> | |
<|file_name|>Crypto.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Crypto.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
from .JSClass import JSClass
class Crypto(JSClass):
def __init__(self):<|fim▁hole|> return False
@property
def version(self):
return "2.4"
def disableRightClick(self):
pass
def importUserCertificates(self, nickname, cmmfResponse, forceToBackUp): # pylint:disable=unused-argument
return ""
def logout(self):
pass<|fim▁end|> | pass
@property
def enableSmartCardEvents(self): |
<|file_name|>keyframes.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Keyframes: https://drafts.csswg.org/css-animations/#keyframes
#![deny(missing_docs)]
use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser};
use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule};
use error_reporting::NullReporter;
use parser::{LengthParsingMode, ParserContext, log_css_error};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, PropertyId};
use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
use properties::LonghandIdSet;
use properties::animated_properties::TransitionProperty;
use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
use shared_lock::{SharedRwLock, SharedRwLockReadGuard, Locked, ToCssWithGuard};
use std::fmt;
use std::sync::Arc;
use style_traits::ToCss;
use stylesheets::{CssRuleType, Stylesheet, VendorPrefix};
/// A number from 0 to 1, indicating the percentage of the animation when this
/// keyframe should run.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl ToCss for KeyframePercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
write!(dest, "{}%", self.0 * 100.0)
}
}
impl KeyframePercentage {
/// Trivially constructs a new `KeyframePercentage`.
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage >= 0. && percentage <= 1. {
KeyframePercentage::new(percentage)
} else {
return Err(());
}
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, PartialEq)]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
/// Return the list of percentages this selector contains.
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
/// Parse a keyframe selector from CSS input.
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug)]
pub struct Keyframe {
/// The selector this keyframe was specified from.
pub selector: KeyframeSelector,
/// The declaration block that was declared inside this keyframe.
///
/// Note that `!important` rules in keyframes don't apply, but we keep this
/// `Arc` just for convenience.
pub block: Arc<Locked<PropertyDeclarationBlock>>,
}
impl ToCssWithGuard for Keyframe {
fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(iter.next().unwrap().to_css(dest));
for percentage in iter {
try!(write!(dest, ", "));
try!(percentage.to_css(dest));
}
try!(dest.write_str(" { "));
try!(self.block.read_with(guard).to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
/// Parse a CSS keyframe.
pub fn parse(css: &str, parent_stylesheet: &Stylesheet)
-> Result<Arc<Locked<Self>>, ()> {
let error_reporter = NullReporter;
let context = ParserContext::new(parent_stylesheet.origin,
&parent_stylesheet.url_data,
&error_reporter,
Some(CssRuleType::Keyframe),
LengthParsingMode::Default,
parent_stylesheet.quirks_mode);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
shared_lock: &parent_stylesheet.shared_lock,
};
parse_one_rule(&mut input, &mut rule_parser)<|fim▁hole|>/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
///
/// TODO: Find a better name for this?
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// A step formed by a declaration block specified by the CSS.
Declarations {
/// The declaration block per se.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<Locked<PropertyDeclarationBlock>>
},
/// A synthetic step computed from the current computed values at the time
/// of the animation.
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue,
guard: &SharedRwLockReadGuard) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read_with(guard).declarations().iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
/// Return specified TransitionTimingFunction if this KeyframesSteps has 'animation-timing-function'.
pub fn get_animation_timing_function(&self, guard: &SharedRwLockReadGuard)
-> Option<SpecifiedTimingFunction> {
if !self.declared_timing_function {
return None;
}
match self.value {
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read_with(guard);
let &(ref declaration, _) =
guard.get(PropertyDeclarationId::Longhand(LonghandId::AnimationTimingFunction)).unwrap();
match *declaration {
PropertyDeclaration::AnimationTimingFunction(ref value) => {
// Use the first value.
Some(value.0[0])
},
PropertyDeclaration::CSSWideKeyword(..) => None,
PropertyDeclaration::WithVariables(..) => None,
_ => panic!(),
}
},
KeyframesStepValue::ComputedValues => {
panic!("Shouldn't happen to set animation-timing-function in missing keyframes")
},
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
/// The difference steps of the animation.
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
/// Vendor prefix type the @keyframes has.
pub vendor_prefix: Option<VendorPrefix>,
}
/// Get all the animated properties in a keyframes animation.
fn get_animated_properties(keyframes: &[Arc<Locked<Keyframe>>], guard: &SharedRwLockReadGuard)
-> Vec<TransitionProperty> {
let mut ret = vec![];
let mut seen = LonghandIdSet::new();
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for keyframe in keyframes {
let keyframe = keyframe.read_with(&guard);
let block = keyframe.block.read_with(guard);
for &(ref declaration, importance) in block.declarations().iter() {
assert!(!importance.important());
if let Some(property) = TransitionProperty::from_declaration(declaration) {
if !seen.has_transition_property_bit(&property) {
seen.set_transition_property_bit(&property);
ret.push(property);
}
}
}
}
ret
}
impl KeyframesAnimation {
/// Create a keyframes animation from a given list of keyframes.
///
/// This will return a keyframe animation with empty steps and
/// properties_changed if the list of keyframes is empty, or there are no
// animated properties obtained from the keyframes.
///
/// Otherwise, this will compute and sort the steps used for the animation,
/// and return the animation object.
pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>],
vendor_prefix: Option<VendorPrefix>,
guard: &SharedRwLockReadGuard)
-> Self {
let mut result = KeyframesAnimation {
steps: vec![],
properties_changed: vec![],
vendor_prefix: vendor_prefix,
};
if keyframes.is_empty() {
return result;
}
result.properties_changed = get_animated_properties(keyframes, guard);
if result.properties_changed.is_empty() {
return result;
}
for keyframe in keyframes {
let keyframe = keyframe.read_with(&guard);
for percentage in keyframe.selector.0.iter() {
result.steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}, guard));
}
}
// Sort by the start percentage, so we can easily find a frame.
result.steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if result.steps[0].start_percentage.0 != 0. {
result.steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues,
guard));
}
if result.steps.last().unwrap().start_percentage.0 != 1. {
result.steps.push(KeyframesStep::new(KeyframePercentage::new(1.),
KeyframesStepValue::ComputedValues,
guard));
}
result
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
shared_lock: &'a SharedRwLock,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser, shared_lock: &SharedRwLock)
-> Vec<Arc<Locked<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser {
context: context,
shared_lock: shared_lock,
}).filter_map(Result::ok).collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<Locked<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<Locked<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}
}
}
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let context = ParserContext::new_with_rule_type(self.context, Some(CssRuleType::Keyframe));
let parser = KeyframeDeclarationParser {
context: &context,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(parsed) => parsed.expand_push_into(&mut block, Importance::Normal),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, &context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(self.shared_lock.wrap(Keyframe {
selector: prelude,
block: Arc::new(self.shared_lock.wrap(block)),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = ParsedDeclaration;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = ParsedDeclaration;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<ParsedDeclaration, ()> {
let id = try!(PropertyId::parse(name.into()));
match ParsedDeclaration::parse(id, self.context, input) {
Ok(parsed) => {
// In case there is still unparsed text in the declaration, we should roll back.
if !input.is_exhausted() {
Err(())
} else {
Ok(parsed)
}
}
Err(_) => Err(())
}
}
}<|fim▁end|> | }
}
/// A keyframes step value. This can be a synthetised keyframes animation, that |
<|file_name|>htmltableheadercellelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTableHeaderCellElementBinding;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::htmltablecellelement::HTMLTableCellElement;
use dom::node::Node;
use string_cache::Atom;
#[dom_struct]
pub struct HTMLTableHeaderCellElement {
htmltablecellelement: HTMLTableCellElement,
}
impl HTMLTableHeaderCellElement {
fn new_inherited(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLTableHeaderCellElement {
HTMLTableHeaderCellElement {
htmltablecellelement:
HTMLTableCellElement::new_inherited(localName, prefix, document)<|fim▁hole|> #[allow(unrooted_must_root)]
pub fn new(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTableHeaderCellElement> {
Node::reflect_node(box HTMLTableHeaderCellElement::new_inherited(localName, prefix, document),
document,
HTMLTableHeaderCellElementBinding::Wrap)
}
}<|fim▁end|> | }
}
|
<|file_name|>0003_auto__del_field_customuser_email_on_new_foller__add_field_customuser_e.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'CustomUser.email_on_new_foller'
db.delete_column('auth_user', 'email_on_new_foller')
# Adding field 'CustomUser.email_on_new_follower'
db.add_column('auth_user', 'email_on_new_follower',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)<|fim▁hole|> # Adding field 'CustomUser.email_on_new_foller'
db.add_column('auth_user', 'email_on_new_foller',
self.gf('django.db.models.fields.BooleanField')(default=True),
keep_default=False)
# Deleting field 'CustomUser.email_on_new_follower'
db.delete_column('auth_user', 'email_on_new_follower')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'main.customuser': {
'Meta': {'object_name': 'CustomUser', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_on_new_follower': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'main.embeditem': {
'Meta': {'object_name': 'EmbedItem'},
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'embedly_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'original_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
},
u'main.follow': {
'Meta': {'unique_together': "[['user', 'target']]", 'object_name': 'Follow'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'target': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': u"orm['main.CustomUser']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'friends'", 'to': u"orm['main.CustomUser']"})
},
u'main.imageitem': {
'Meta': {'object_name': 'ImageItem'},
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'link_to_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'src': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
},
u'main.membership': {
'Meta': {'unique_together': "[['page', 'user']]", 'object_name': 'Membership'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']"})
},
u'main.page': {
'Meta': {'object_name': 'Page'},
'admin_textitem_bg_color': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'admin_textitem_bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'admin_textitem_color': ('django.db.models.fields.CharField', [], {'default': "'#000'", 'max_length': '32', 'blank': 'True'}),
'admin_textitem_font': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'admin_textitem_font_size': ('django.db.models.fields.PositiveIntegerField', [], {'default': '13', 'null': 'True', 'blank': 'True'}),
'bg_color': ('django.db.models.fields.CharField', [], {'default': "'#fafafa'", 'max_length': '32', 'blank': 'True'}),
'bg_fn': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bg_texture': ('django.db.models.fields.CharField', [], {'default': "'light_wool_midalpha.png'", 'max_length': '1024', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'default_textitem_bg_color': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '32', 'blank': 'True'}),
'default_textitem_bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'default_textitem_color': ('django.db.models.fields.CharField', [], {'default': "'#000'", 'max_length': '32', 'blank': 'True'}),
'default_textitem_font': ('django.db.models.fields.CharField', [], {'default': "'Arial'", 'max_length': '32', 'blank': 'True'}),
'default_textitem_font_size': ('django.db.models.fields.PositiveIntegerField', [], {'default': '13', 'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'image_writability': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'published_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'short_url': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'text_writability': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'use_custom_admin_style': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'main.pageview': {
'Meta': {'object_name': 'PageView'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'sessionid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'})
},
u'main.textitem': {
'Meta': {'object_name': 'TextItem'},
'bg_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'bg_texture': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'border_radius': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'border_width': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.CustomUser']", 'null': 'True'}),
'creator_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'creator_session_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'creator_window_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'editable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'font': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'font_size': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'height': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('uuidfield.fields.UUIDField', [], {'unique': 'True', 'max_length': '32', 'primary_key': 'True'}),
'link_to_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['main.Page']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'width': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'x': ('django.db.models.fields.IntegerField', [], {}),
'y': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['main']<|fim▁end|> |
def backwards(self, orm): |
<|file_name|>fn_to_numeric_cast_any.rs<|end_file_name|><|fim▁begin|>use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::source::snippet_with_applicability;
use rustc_errors::Applicability;
use rustc_hir::Expr;
use rustc_lint::LateContext;
use rustc_middle::ty::{self, Ty};
use super::FN_TO_NUMERIC_CAST_ANY;
pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>, cast_expr: &Expr<'_>, cast_from: Ty<'_>, cast_to: Ty<'_>) {
// We allow casts from any function type to any function type.
match cast_to.kind() {
ty::FnDef(..) | ty::FnPtr(..) => return,
_ => { /* continue to checks */ },
}
match cast_from.kind() {<|fim▁hole|> span_lint_and_sugg(
cx,
FN_TO_NUMERIC_CAST_ANY,
expr.span,
&format!("casting function pointer `{}` to `{}`", from_snippet, cast_to),
"did you mean to invoke the function?",
format!("{}() as {}", from_snippet, cast_to),
applicability,
);
},
_ => {},
}
}<|fim▁end|> | ty::FnDef(..) | ty::FnPtr(_) => {
let mut applicability = Applicability::MaybeIncorrect;
let from_snippet = snippet_with_applicability(cx, cast_expr.span, "..", &mut applicability);
|
<|file_name|>quick_switcher.py<|end_file_name|><|fim▁begin|>#-*- coding:utf-8 -*-
"""
This file is part of OpenSesame.
OpenSesame is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenSesame is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with OpenSesame. If not, see <http://www.gnu.org/licenses/>.
"""
from libopensesame.py3compat import *
from libqtopensesame.extensions import base_extension
class quick_switcher(base_extension):
"""
desc:
The quick-switcher allows you to quickly navigate to items and
functions, and to quickly activate menu actions.
"""
# We need to update or fully refresh the dialog after several structural
# changes.
def event_startup(self):
self.d = None
def event_open_experiment(self, path):
self.d = None
def event_rename_item(self, from_name, to_name):
if self.d is not None:
self.d.rename_item(from_name, to_name)
def event_new_item(self, name, _type):
if self.d is not None:
self.d.add_item(name)
def event_delete_item(self, name):
if self.d is not None:
self.d.delete_item(name)
def event_purge_unused_items(self):
self.d = None
def event_regenerate(self):
self.d = None
def event_change_item(self, name):
if self.d is not None:
if self.experiment.items._type(name) == u'inline_script':
self.d.refresh_item(name)
def event_open_item(self, name):
if self.d is not None:
self.d.bump_item(name)
def init_dialog(self):
"""
desc:
Re-init the dialog.
"""
self.set_busy()<|fim▁hole|>
def activate(self):
"""
desc:
Pops up the quick-switcher dialog.
"""
if not hasattr(self, u'd') or self.d is None:
self.init_dialog()
self.d.items_list_widget.sortItems()
self.d.exec_()<|fim▁end|> | from quick_switcher_dialog.dialog import quick_switcher
self.d = quick_switcher(self.main_window)
self.set_busy(False) |
<|file_name|>Utils.ts<|end_file_name|><|fim▁begin|>import { gt } from 'ramda';
export function Update<T>(initialState:T, overrideObject:Partial<T>):T {
return Object.assign({}, initialState, overrideObject);
}
export type Element = 'input' | 'textarea' | 'select' | 'tab-selection' | 'toggle' | 'checkbox' | 'radio';
export const DefaultClassName = 'form-element';
export const appendToWrapperClass = (addClassName:string, element:Element = 'input') => {
const baseClassName = DefaultClassName,
elementClassName = `${baseClassName}-${element}`;
let classNames:string[] = [];<|fim▁hole|>
return classNames
.filter(_ => gt(_.length, 0))
.join(' ');
};<|fim▁end|> |
classNames.push(baseClassName, elementClassName, addClassName); |
<|file_name|>cssrulelist.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::CSSRuleListBinding;
use dom::bindings::codegen::Bindings::CSSRuleListBinding::CSSRuleListMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::js::{JS, MutNullableJS, Root};
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::csskeyframerule::CSSKeyframeRule;
use dom::cssrule::CSSRule;
use dom::cssstylesheet::CSSStyleSheet;
use dom::window::Window;
use dom_struct::dom_struct;
use parking_lot::RwLock;
use std::sync::Arc;
use style::stylesheets::{CssRules, KeyframesRule, RulesMutateError};
#[allow(unsafe_code)]
unsafe_no_jsmanaged_fields!(RulesSource);
unsafe_no_jsmanaged_fields!(CssRules);
impl From<RulesMutateError> for Error {
fn from(other: RulesMutateError) -> Self {
match other {
RulesMutateError::Syntax => Error::Syntax,
RulesMutateError::IndexSize => Error::IndexSize,
RulesMutateError::HierarchyRequest => Error::HierarchyRequest,
RulesMutateError::InvalidState => Error::InvalidState,
}
}
}
#[dom_struct]
pub struct CSSRuleList {
reflector_: Reflector,
parent_stylesheet: JS<CSSStyleSheet>,
#[ignore_heap_size_of = "Arc"]
rules: RulesSource,
dom_rules: DOMRefCell<Vec<MutNullableJS<CSSRule>>>
}
pub enum RulesSource {
Rules(Arc<RwLock<CssRules>>),
Keyframes(Arc<RwLock<KeyframesRule>>),
}
impl CSSRuleList {
#[allow(unrooted_must_root)]
pub fn new_inherited(parent_stylesheet: &CSSStyleSheet, rules: RulesSource) -> CSSRuleList {
let dom_rules = match rules {
RulesSource::Rules(ref rules) => {
rules.read().0.iter().map(|_| MutNullableJS::new(None)).collect()
}
RulesSource::Keyframes(ref rules) => {
rules.read().keyframes.iter().map(|_| MutNullableJS::new(None)).collect()
}
};
CSSRuleList {
reflector_: Reflector::new(),
parent_stylesheet: JS::from_ref(parent_stylesheet),
rules: rules,
dom_rules: DOMRefCell::new(dom_rules),
}<|fim▁hole|> rules: RulesSource) -> Root<CSSRuleList> {
reflect_dom_object(box CSSRuleList::new_inherited(parent_stylesheet, rules),
window,
CSSRuleListBinding::Wrap)
}
/// Should only be called for CssRules-backed rules. Use append_lazy_rule
/// for keyframes-backed rules.
pub fn insert_rule(&self, rule: &str, idx: u32, nested: bool) -> Fallible<u32> {
let css_rules = if let RulesSource::Rules(ref rules) = self.rules {
rules
} else {
panic!("Called insert_rule on non-CssRule-backed CSSRuleList");
};
let global = self.global();
let window = global.as_window();
let index = idx as usize;
let parent_stylesheet = self.parent_stylesheet.style_stylesheet();
let new_rule = css_rules.write().insert_rule(rule, parent_stylesheet, index, nested)?;
let parent_stylesheet = &*self.parent_stylesheet;
let dom_rule = CSSRule::new_specific(&window, parent_stylesheet, new_rule);
self.dom_rules.borrow_mut().insert(index, MutNullableJS::new(Some(&*dom_rule)));
Ok((idx))
}
// In case of a keyframe rule, index must be valid.
pub fn remove_rule(&self, index: u32) -> ErrorResult {
let index = index as usize;
match self.rules {
RulesSource::Rules(ref css_rules) => {
css_rules.write().remove_rule(index)?;
let mut dom_rules = self.dom_rules.borrow_mut();
dom_rules[index].get().map(|r| r.detach());
dom_rules.remove(index);
Ok(())
}
RulesSource::Keyframes(ref kf) => {
// https://drafts.csswg.org/css-animations/#dom-csskeyframesrule-deleterule
let mut dom_rules = self.dom_rules.borrow_mut();
dom_rules[index].get().map(|r| r.detach());
dom_rules.remove(index);
kf.write().keyframes.remove(index);
Ok(())
}
}
}
// Remove parent stylesheets from all children
pub fn deparent_all(&self) {
for rule in self.dom_rules.borrow().iter() {
rule.get().map(|r| Root::upcast(r).deparent());
}
}
pub fn item(&self, idx: u32) -> Option<Root<CSSRule>> {
self.dom_rules.borrow().get(idx as usize).map(|rule| {
rule.or_init(|| {
let parent_stylesheet = &self.parent_stylesheet;
match self.rules {
RulesSource::Rules(ref rules) => {
CSSRule::new_specific(self.global().as_window(),
parent_stylesheet,
rules.read().0[idx as usize].clone())
}
RulesSource::Keyframes(ref rules) => {
Root::upcast(CSSKeyframeRule::new(self.global().as_window(),
parent_stylesheet,
rules.read()
.keyframes[idx as usize]
.clone()))
}
}
})
})
}
/// Add a rule to the list of DOM rules. This list is lazy,
/// so we just append a placeholder.
///
/// Should only be called for keyframes-backed rules, use insert_rule
/// for CssRules-backed rules
pub fn append_lazy_dom_rule(&self) {
if let RulesSource::Rules(..) = self.rules {
panic!("Can only call append_lazy_rule with keyframes-backed CSSRules");
}
self.dom_rules.borrow_mut().push(MutNullableJS::new(None));
}
}
impl CSSRuleListMethods for CSSRuleList {
// https://drafts.csswg.org/cssom/#ref-for-dom-cssrulelist-item-1
fn Item(&self, idx: u32) -> Option<Root<CSSRule>> {
self.item(idx)
}
// https://drafts.csswg.org/cssom/#dom-cssrulelist-length
fn Length(&self) -> u32 {
self.dom_rules.borrow().len() as u32
}
// check-tidy: no specs after this line
fn IndexedGetter(&self, index: u32) -> Option<Root<CSSRule>> {
self.Item(index)
}
}<|fim▁end|> | }
#[allow(unrooted_must_root)]
pub fn new(window: &Window, parent_stylesheet: &CSSStyleSheet, |
<|file_name|>homework_3_chipotle.py<|end_file_name|><|fim▁begin|>'''
Python Homework with Chipotle data
https://github.com/TheUpshot/chipotle
'''
'''
BASIC LEVEL
PART 1: Read in the file with csv.reader() and store it in an object called 'file_nested_list'.
Hint: This is a TSV file, and csv.reader() needs to be told how to handle it.
https://docs.python.org/2/library/csv.html<|fim▁hole|>#[your code here]
import csv
with open("chipotle.tsv", mode="rU") as f:
file_nested_list = [row for row in csv.reader(f, delimiter="\t")]
#WITHOUT csv.reader()
#with open("chipotle.tsv", mode="rU") as f:
# file_nested_list = [row.split("\t") for row in f]
'''
BASIC LEVEL
PART 2: Separate 'file_nested_list' into the 'header' and the 'data'.
'''
#[your code here]
header = file_nested_list[0]
data = file_nested_list[1:]
'''
INTERMEDIATE LEVEL
PART 3: Calculate the average price of an order.
Hint: Examine the data to see if the 'quantity' column is relevant to this calculation.
Hint: Think carefully about the simplest way to do this! Break the problem into steps
and then code each step
'''
ANSWER == 18.81
#slice the data list to include only the order_id column (sublist)
order_id = []
for row in data:
row[0]
order_id.append(row[0])
print order_id[0:5] #check to make sure the loop sliced the correct column
number_orders = len(set(order_id)) #count distinct of order numbers are store it in a variable
print number_orders #print out order number should be 1834
#create a list of item prices from the item_price column (list).
#First remove"$" character and then converting the string into a float
#need to convert to float because of decimals
#Can all be accomplished in a single for loop
price = []
for row in data:
row[-1][1:6]
price.append(float(row[-1][1:6]))
type(price) #confirm that this is a list
type(price[0]) #confirm that values in list are floats
print price
#Create a list of order quantities and convert the strings into integers
#quantity = []
#for row in data:
# row[1]
# quantity.append(int(row[1]))
#type(quantity) #confirm that this is a list
#type(quantity[0]) #confirm that values in list are integers
#Get total price by doing elementwise multiplication to our two lists: quantity and price
#total_price = [a*b for a,b in zip(price,quantity)]
#use sum function to create a single flaot value
#we can use the sum function without multiplying price by the quantit column
#because the price column/var already reflects the quantity multiplier
sum_total_price = sum(price)
print sum_total_price
avg_order_price = (sum_total_price/number_orders)
print avg_order_price
'''
INTERMEDIATE LEVEL
PART 4: Create a list (or set) of all unique sodas and soft drinks that they sell.
Note: Just look for 'Canned Soda' and 'Canned Soft Drink', and ignore other drinks like 'Izze'.
'''
soda_list = []
for row in data:
if (row[2] == "Canned Soda" or row[2] == "Canned Soft Drink"):
soda_list.append(row[3])
unique_sodas = set(soda_list)
print unique_sodas
'''
ADVANCED LEVEL
PART 5: Calculate the average number of toppings per burrito.
Note: Let's ignore the 'quantity' column to simplify this task.
Hint: Think carefully about the easiest way to count the number of toppings!
'''
ANSWER == 5.40
'''
NOTE: much more complicated code below, below is the condensed version
'''
http://stackoverflow.com/questions/823561/what-does-mean-in-python
burrito_orders = 0
toppings_number = 0
for row in data:
if "Burrito" in row[2]:
burrito_orders += 1
toppings_number += (row[3].count(',') + 1)
avg_toppings = (round(toppings_number/float(burrito_orders), 2))
print avg_toppings
##create a list that contains only burrito toppings
#toppings_list = []
#for row in data:
# if (row[2] == "Steak Burrito" or row[2] == "Chicken Burrito" or row[2] == "Veggie Burrito" or row[2] == "Carnitas Burrito" or row[2] == "Barbacoa Burrito" or row[2] == "Burrito"):
# toppings_list.append(row[3])
#print toppings_list #1172
#
##find the number of burritos
##check this using excel...bad I know....but I don't trust other ways of checking.
##plus it's probably more defensible to tell your stakeholder you checked this way rather
##than some complex other logic using code...
#number_burrito_orders = len(toppings_list)
#print number_burrito_orders
#
##find the total number of toppings using list comprehension but only works for lists with
##one level of nesting
#num_toppings = [item for sublist in toppings_list for item in sublist].count(",")
#print num_toppings #number of burrito toppings = 5151, this number is too low
##a visual inspection of the data suggests that there are closer to 7-10 toppings per order
##thus the order number should be somewhere around 9-10K
#
##create a function to flatten the list, pulled shamelessly from stack exchange
#def flatten(x):
# result = []
# for el in x:
# if hasattr(el, "__iter__") and not isinstance(el, basestring):
# result.extend(flatten(el))
# else:
# result.append(el)
# return result
#
##store flattened list in var
#flat_toppings_list = flatten(toppings_list)
#print flat_toppings_list
#
##loop through flattened list and count each comma and add 1
#number_toppings = []
#for item in flat_toppings_list:
# item.count(",")
# number_toppings.append(item.count(",") + 1)
#
##create a var with the sum of toppings
#sum_number_toppings = sum(number_toppings)
#print sum_number_toppings
#
#avg_toppings = (round(sum_number_toppings / float(number_burrito_orders), 2))
#print avg_toppings
'''
ADVANCED LEVEL
PART 6: Create a dictionary in which the keys represent chip orders and
the values represent the total number of orders.
Expected output: {'Chips and Roasted Chili-Corn Salsa': 18, ... }
Note: Please take the 'quantity' column into account!
Optional: Learn how to use 'defaultdict' to simplify your code.
'''
from collections import defaultdict
chips = defaultdict(int)
for row in data:
if "Chips" in row[2]:
chips[row[2]] += int(row[1])<|fim▁end|> | '''
|
<|file_name|>test_webserver_command.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import os
import subprocess
import tempfile
import time
import unittest
from unittest import mock
import psutil
import pytest
from airflow import settings
from airflow.cli import cli_parser
from airflow.cli.commands import webserver_command
from airflow.cli.commands.webserver_command import GunicornMonitor
from airflow.utils.cli import setup_locations
from tests.test_utils.config import conf_vars
class TestGunicornMonitor(unittest.TestCase):
def setUp(self) -> None:
self.monitor = GunicornMonitor(
gunicorn_master_pid=1,
num_workers_expected=4,
master_timeout=60,
worker_refresh_interval=60,
worker_refresh_batch_size=2,
reload_on_plugin_change=True,
)
mock.patch.object(self.monitor, '_generate_plugin_state', return_value={}).start()
mock.patch.object(self.monitor, '_get_num_ready_workers_running', return_value=4).start()
mock.patch.object(self.monitor, '_get_num_workers_running', return_value=4).start()
mock.patch.object(self.monitor, '_spawn_new_workers', return_value=None).start()
mock.patch.object(self.monitor, '_kill_old_workers', return_value=None).start()
mock.patch.object(self.monitor, '_reload_gunicorn', return_value=None).start()
@mock.patch('airflow.cli.commands.webserver_command.sleep')
def test_should_wait_for_workers_to_start(self, mock_sleep):
self.monitor._get_num_ready_workers_running.return_value = 0
self.monitor._get_num_workers_running.return_value = 4
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
@mock.patch('airflow.cli.commands.webserver_command.sleep')
def test_should_kill_excess_workers(self, mock_sleep):
self.monitor._get_num_ready_workers_running.return_value = 10
self.monitor._get_num_workers_running.return_value = 10
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member
self.monitor._kill_old_workers.assert_called_once_with(2) # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
@mock.patch('airflow.cli.commands.webserver_command.sleep')
def test_should_start_new_workers_when_missing(self, mock_sleep):
self.monitor._get_num_ready_workers_running.return_value = 2
self.monitor._get_num_workers_running.return_value = 2
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_called_once_with(2) # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
@mock.patch('airflow.cli.commands.webserver_command.sleep')
def test_should_start_new_workers_when_refresh_interval_has_passed(self, mock_sleep):
self.monitor._last_refresh_time -= 200
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_called_once_with(2) # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
self.assertAlmostEqual(self.monitor._last_refresh_time, time.monotonic(), delta=5)
@mock.patch('airflow.cli.commands.webserver_command.sleep')
def test_should_reload_when_plugin_has_been_changed(self, mock_sleep):
self.monitor._generate_plugin_state.return_value = {'AA': 12}
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
self.monitor._generate_plugin_state.return_value = {'AA': 32}
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_not_called() # pylint: disable=no-member
self.monitor._generate_plugin_state.return_value = {'AA': 32}
self.monitor._check_workers()
self.monitor._spawn_new_workers.assert_not_called() # pylint: disable=no-member
self.monitor._kill_old_workers.assert_not_called() # pylint: disable=no-member
self.monitor._reload_gunicorn.assert_called_once_with() # pylint: disable=no-member
self.assertAlmostEqual(self.monitor._last_refresh_time, time.monotonic(), delta=5)
class TestGunicornMonitorGeneratePluginState(unittest.TestCase):
@staticmethod
def _prepare_test_file(filepath: str, size: int):
os.makedirs(os.path.dirname(filepath), exist_ok=True)
with open(filepath, "w") as file:
file.write("A" * size)
file.flush()
def test_should_detect_changes_in_directory(self):
with tempfile.TemporaryDirectory() as tempdir, mock.patch(
"airflow.cli.commands.webserver_command.settings.PLUGINS_FOLDER", tempdir
):
self._prepare_test_file(f"{tempdir}/file1.txt", 100)
self._prepare_test_file(f"{tempdir}/nested/nested/nested/nested/file2.txt", 200)
self._prepare_test_file(f"{tempdir}/file3.txt", 300)
monitor = GunicornMonitor(
gunicorn_master_pid=1,
num_workers_expected=4,
master_timeout=60,
worker_refresh_interval=60,
worker_refresh_batch_size=2,
reload_on_plugin_change=True,
)
# When the files have not changed, the result should be constant
state_a = monitor._generate_plugin_state()
state_b = monitor._generate_plugin_state()
self.assertEqual(state_a, state_b)
self.assertEqual(3, len(state_a))
# Should detect new file
self._prepare_test_file(f"{tempdir}/file4.txt", 400)
state_c = monitor._generate_plugin_state()
self.assertNotEqual(state_b, state_c)
self.assertEqual(4, len(state_c))
# Should detect changes in files
self._prepare_test_file(f"{tempdir}/file4.txt", 450)
state_d = monitor._generate_plugin_state()
self.assertNotEqual(state_c, state_d)
self.assertEqual(4, len(state_d))
# Should support large files
self._prepare_test_file(f"{tempdir}/file4.txt", 4000000)
state_d = monitor._generate_plugin_state()
self.assertNotEqual(state_c, state_d)
self.assertEqual(4, len(state_d))
class TestCLIGetNumReadyWorkersRunning(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
def setUp(self):
self.children = mock.MagicMock()
self.child = mock.MagicMock()
self.process = mock.MagicMock()
self.monitor = GunicornMonitor(
gunicorn_master_pid=1,
num_workers_expected=4,
master_timeout=60,
worker_refresh_interval=60,
worker_refresh_batch_size=2,
reload_on_plugin_change=True,
)
def test_ready_prefix_on_cmdline(self):
self.child.cmdline.return_value = [settings.GUNICORN_WORKER_READY_PREFIX]
self.process.children.return_value = [self.child]
with mock.patch('psutil.Process', return_value=self.process):
self.assertEqual(self.monitor._get_num_ready_workers_running(), 1)
def test_ready_prefix_on_cmdline_no_children(self):
self.process.children.return_value = []
with mock.patch('psutil.Process', return_value=self.process):
self.assertEqual(self.monitor._get_num_ready_workers_running(), 0)
def test_ready_prefix_on_cmdline_zombie(self):
self.child.cmdline.return_value = []
self.process.children.return_value = [self.child]
with mock.patch('psutil.Process', return_value=self.process):
self.assertEqual(self.monitor._get_num_ready_workers_running(), 0)
def test_ready_prefix_on_cmdline_dead_process(self):
self.child.cmdline.side_effect = psutil.NoSuchProcess(11347)
self.process.children.return_value = [self.child]
with mock.patch('psutil.Process', return_value=self.process):
self.assertEqual(self.monitor._get_num_ready_workers_running(), 0)
class TestCliWebServer(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.parser = cli_parser.get_parser()
def setUp(self) -> None:
self._check_processes()
self._clean_pidfiles()
def _check_processes(self, ignore_running=False):
# Confirm that webserver hasn't been launched.
# pgrep returns exit status 1 if no process matched.
exit_code_pgrep_webserver = subprocess.Popen(["pgrep", "-c", "-f", "airflow webserver"]).wait()
exit_code_pgrep_gunicorn = subprocess.Popen(["pgrep", "-c", "-f", "gunicorn"]).wait()
if exit_code_pgrep_webserver != 1 or exit_code_pgrep_gunicorn != 1:
subprocess.Popen(["ps", "-ax"]).wait()
if exit_code_pgrep_webserver != 1:
subprocess.Popen(["pkill", "-9", "-f", "airflow webserver"]).wait()
if exit_code_pgrep_gunicorn != 1:
subprocess.Popen(["pkill", "-9", "-f", "gunicorn"]).wait()
if not ignore_running:
raise AssertionError(
"Background processes are running that prevent the test from passing successfully."
)
def tearDown(self) -> None:
self._check_processes(ignore_running=True)
self._clean_pidfiles()
<|fim▁hole|> if os.path.exists(pidfile_webserver):
os.remove(pidfile_webserver)
if os.path.exists(pidfile_monitor):
os.remove(pidfile_monitor)
def _wait_pidfile(self, pidfile):
start_time = time.monotonic()
while True:
try:
with open(pidfile) as file:
return int(file.read())
except Exception: # pylint: disable=broad-except
if start_time - time.monotonic() > 60:
raise
time.sleep(1)
def test_cli_webserver_foreground(self):
with mock.patch.dict(
"os.environ",
AIRFLOW__CORE__DAGS_FOLDER="/dev/null",
AIRFLOW__CORE__LOAD_EXAMPLES="False",
AIRFLOW__WEBSERVER__WORKERS="1",
):
# Run webserver in foreground and terminate it.
proc = subprocess.Popen(["airflow", "webserver"])
self.assertEqual(None, proc.poll())
# Wait for process
time.sleep(10)
# Terminate webserver
proc.terminate()
# -15 - the server was stopped before it started
# 0 - the server terminated correctly
self.assertIn(proc.wait(60), (-15, 0))
def test_cli_webserver_foreground_with_pid(self):
with tempfile.TemporaryDirectory(prefix='tmp-pid') as tmpdir:
pidfile = f"{tmpdir}/pidfile"
with mock.patch.dict(
"os.environ",
AIRFLOW__CORE__DAGS_FOLDER="/dev/null",
AIRFLOW__CORE__LOAD_EXAMPLES="False",
AIRFLOW__WEBSERVER__WORKERS="1",
):
proc = subprocess.Popen(["airflow", "webserver", "--pid", pidfile])
self.assertEqual(None, proc.poll())
# Check the file specified by --pid option exists
self._wait_pidfile(pidfile)
# Terminate webserver
proc.terminate()
self.assertEqual(0, proc.wait(60))
@pytest.mark.quarantined
def test_cli_webserver_background(self):
with tempfile.TemporaryDirectory(prefix="gunicorn") as tmpdir, mock.patch.dict(
"os.environ",
AIRFLOW__CORE__DAGS_FOLDER="/dev/null",
AIRFLOW__CORE__LOAD_EXAMPLES="False",
AIRFLOW__WEBSERVER__WORKERS="1",
):
pidfile_webserver = f"{tmpdir}/pidflow-webserver.pid"
pidfile_monitor = f"{tmpdir}/pidflow-webserver-monitor.pid"
stdout = f"{tmpdir}/airflow-webserver.out"
stderr = f"{tmpdir}/airflow-webserver.err"
logfile = f"{tmpdir}/airflow-webserver.log"
try:
# Run webserver as daemon in background. Note that the wait method is not called.
proc = subprocess.Popen(
[
"airflow",
"webserver",
"--daemon",
"--pid",
pidfile_webserver,
"--stdout",
stdout,
"--stderr",
stderr,
"--log-file",
logfile,
]
)
self.assertEqual(None, proc.poll())
pid_monitor = self._wait_pidfile(pidfile_monitor)
self._wait_pidfile(pidfile_webserver)
# Assert that gunicorn and its monitor are launched.
self.assertEqual(
0, subprocess.Popen(["pgrep", "-f", "-c", "airflow webserver --daemon"]).wait()
)
self.assertEqual(0, subprocess.Popen(["pgrep", "-c", "-f", "gunicorn: master"]).wait())
# Terminate monitor process.
proc = psutil.Process(pid_monitor)
proc.terminate()
self.assertIn(proc.wait(120), (0, None))
self._check_processes()
except Exception:
# List all logs
subprocess.Popen(["ls", "-lah", tmpdir]).wait()
# Dump all logs
subprocess.Popen(["bash", "-c", f"ls {tmpdir}/* | xargs -n 1 -t cat"]).wait()
raise
# Patch for causing webserver timeout
@mock.patch(
"airflow.cli.commands.webserver_command.GunicornMonitor._get_num_workers_running", return_value=0
)
def test_cli_webserver_shutdown_when_gunicorn_master_is_killed(self, _):
# Shorten timeout so that this test doesn't take too long time
args = self.parser.parse_args(['webserver'])
with conf_vars({('webserver', 'web_server_master_timeout'): '10'}):
with self.assertRaises(SystemExit) as e:
webserver_command.webserver(args)
self.assertEqual(e.exception.code, 1)
def test_cli_webserver_debug(self):
env = os.environ.copy()
proc = psutil.Popen(["airflow", "webserver", "--debug"], env=env)
time.sleep(3) # wait for webserver to start
return_code = proc.poll()
self.assertEqual(
None, return_code, f"webserver terminated with return code {return_code} in debug mode"
)
proc.terminate()
self.assertEqual(-15, proc.wait(60))
def test_cli_webserver_access_log_format(self):
# json access log format
access_logformat = (
"{\"ts\":\"%(t)s\",\"remote_ip\":\"%(h)s\",\"request_id\":\"%({"
"X-Request-Id}i)s\",\"code\":\"%(s)s\",\"request_method\":\"%(m)s\","
"\"request_path\":\"%(U)s\",\"agent\":\"%(a)s\",\"response_time\":\"%(D)s\","
"\"response_length\":\"%(B)s\"} "
)
with tempfile.TemporaryDirectory() as tmpdir, mock.patch.dict(
"os.environ",
AIRFLOW__CORE__DAGS_FOLDER="/dev/null",
AIRFLOW__CORE__LOAD_EXAMPLES="False",
AIRFLOW__WEBSERVER__WORKERS="1",
):
access_logfile = f"{tmpdir}/access.log"
# Run webserver in foreground and terminate it.
proc = subprocess.Popen(
[
"airflow",
"webserver",
"--access-logfile",
access_logfile,
"--access-logformat",
access_logformat,
]
)
self.assertEqual(None, proc.poll())
# Wait for webserver process
time.sleep(10)
proc2 = subprocess.Popen(["curl", "http://localhost:8080"])
proc2.wait(10)
try:
file = open(access_logfile)
log = json.loads(file.read())
self.assertEqual('127.0.0.1', log.get('remote_ip'))
self.assertEqual(len(log), 9)
self.assertEqual('GET', log.get('request_method'))
except OSError:
print("access log file not found at " + access_logfile)
# Terminate webserver
proc.terminate()
# -15 - the server was stopped before it started
# 0 - the server terminated correctly
self.assertIn(proc.wait(60), (-15, 0))
self._check_processes()<|fim▁end|> | def _clean_pidfiles(self):
pidfile_webserver = setup_locations("webserver")[0]
pidfile_monitor = setup_locations("webserver-monitor")[0] |
<|file_name|>Things.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package clay;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
*
* @author MultiTool
Need spring force of repulsion, spring force of attraction
also need binding radius and un-binding (bond breaking) radius.
*/
public class Things {
public static int NDims = 2;
/* **************************************************************************** */
public static class Point {
double[] V = new double[NDims];
/* **************************************************************************** */
public double Magnitude() {// pythagoras
double dif, sumsq = 0.0;
for (int dcnt = 0; dcnt < NDims; dcnt++) {
dif = this.V[dcnt];
sumsq += dif * dif;
}
return Math.sqrt(sumsq);
}
/* **************************************************************************** */
public double DeltaMag(Point other) {// pythagoras
double dif, sumsq = 0.0;
for (int dcnt = 0; dcnt < NDims; dcnt++) {
dif = this.V[dcnt] - other.V[dcnt];
sumsq += dif * dif;
}
return Math.sqrt(sumsq);
}
/* **************************************************************************** */
public void DeltaVec(Point origin, Point diff) {
for (int dcnt = 0; dcnt < NDims; dcnt++) {
diff.V[dcnt] = this.V[dcnt] - origin.V[dcnt];
}
}
/* **************************************************************************** */
public void AddVec(Point other) {
for (int dcnt = 0; dcnt < NDims; dcnt++) {
this.V[dcnt] += other.V[dcnt];
}
}
/* **************************************************************************** */
public void Unitize() {
double length = this.Magnitude();
for (int dcnt = 0; dcnt < NDims; dcnt++) {
this.V[dcnt] = this.V[dcnt] / length;
}
}
/* **************************************************************************** */
public void Multiply(double magnitude) {
for (int dcnt = 0; dcnt < NDims; dcnt++) {
this.V[dcnt] *= magnitude;
}
}
/* **************************************************************************** */
public void Clear() {
for (int dcnt = 0; dcnt < NDims; dcnt++) {
this.V[dcnt] = 0.0;
}
}
/* **************************************************************************** */
public void Copy(Point other) {
System.arraycopy(other.V, 0, this.V, 0, NDims);
}
}
/* **************************************************************************** */
public static class SpringVec extends Point {
public int GenStamp;
}
/* **************************************************************************** */
public static class Link {
public Atom mine;
public Link parallel;
public static double RestingRadius = 10;
public static double Radius = 5.0;
public static double BindingRadius = 5.0;
public static double BreakingRadius = BindingRadius + 1.0;
public SpringVec Spring;
// should we have two links between atoms or one?
// with a planet/grav model, two gravity wells would fit
// a spring is symmetrical.
public void InterLink(Link other) {
other.parallel = this;
this.parallel = other;
other.Spring = this.Spring = new SpringVec();// to do: rewrite this for single two-way links rather than twin links.
}
public Atom GetOtherAtom() {
return this.parallel.mine;
}
public void CalcForceVector(Point Delta) {
Point diffv;
double distortion, dif;
Atom me = this.mine;
Atom you = this.GetOtherAtom();
diffv = new Point();
me.Loc.DeltaVec(you.Loc, diffv);
dif = diffv.Magnitude();
distortion = dif - Link.RestingRadius;// distortion is displacement from resting length of spring
diffv.Unitize();
diffv.Multiply(-distortion);// to do: since this force is applied once to each atom, the displacement back to resting is wrongly doubled
// other issue is that we are wastefully calculating the dif vector twice, once for each end of the link pair.
// a = f/m
// the link pair COULD have a common storage object that keeps reusable info such as vector. baroque. <|fim▁hole|> // normalize my direction vector, multiply by magnitude of distortion.
}
}
/* **************************************************************************** */
public static class Atom {
// public double Radius = 5.0;
// public double BindingRadius = 5.0;
// public double BreakingRadius = BindingRadius + 1.0;
public Point Loc = new Point();
public Point LocNext = new Point();
public Point Vel = new Point();
public Map<Atom, Link> Bindings;
public Atom() {
this.Bindings = new HashMap<>();
}
public void Bind(Atom other) {
Link OtherLnk = new Link();
OtherLnk.mine = other;
Link MeLnk = new Link();
MeLnk.mine = this;
MeLnk.InterLink(OtherLnk);
this.Bindings.put(other, MeLnk);
other.Bindings.put(this, OtherLnk);
}
public void Rollover() {
this.Loc.Copy(this.LocNext);
}
/* **************************************************************************** */
public void Seek_Bindings(Atom[] Atoms) {
Atom you;// ultimately replace this with 2d array-based collision detection.
double dif;
int NumAtoms = Atoms.length;
for (int acnt1 = 0; acnt1 < NumAtoms; acnt1++) {
you = Atoms[acnt1];
if (this != you) {
if (!this.Bindings.containsKey(you)) {// Find out if you are already connected to me.
dif = this.Loc.DeltaMag(you.Loc);
if (dif < Link.BindingRadius) {// if not bound, then bind
this.Bind(you);
}
}
}
}
}
/* **************************************************************************** */
public void Seek_Unbindings() {
Atom YouAtom;
Link MeLnk;
double dif;
Iterator it = this.Bindings.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pair = (Map.Entry) it.next();
MeLnk = (Link) pair.getValue(); // System.out.println(pair.getKey() + " = " + pair.getValue());
//YouAtom = MeLnk.GetOtherAtom();
YouAtom = (Atom) pair.getKey();
dif = this.Loc.DeltaMag(YouAtom.Loc);
if (dif > Link.BreakingRadius) {// if bound, then break
// here we remove from my table via iterator, and from your table via remove(key)
YouAtom.Bindings.remove(this);
it.remove();
MeLnk.mine = null;
MeLnk.parallel.mine = null;
}
}
}
}
/* **************************************************************************** */
public int NumAtoms = 100;
public Atom[] Atoms;
public int GenCnt = 0;
/* **************************************************************************** */
public Things() {
Atoms = new Atom[NumAtoms];
}
/* **************************************************************************** */
public void React() {
GenCnt++;
Atom me;
Link MeLnk;
Point DiffV = new Point();
for (int acnt0 = 0; acnt0 < NumAtoms; acnt0++) {
me = this.Atoms[acnt0];
me.LocNext.Clear();
Map<Atom, Link> yall = me.Bindings;
int younum = yall.size();
for (int acnt1 = 0; acnt1 < younum; acnt1++) {
MeLnk = yall.get(acnt1);
if (MeLnk.Spring.GenStamp < GenCnt) {
MeLnk.CalcForceVector(DiffV);// f=ma but m is always 1 for now
MeLnk.Spring.Copy(DiffV);
MeLnk.Spring.GenStamp = GenCnt;
}
// to do: gotta fix this so force is going opposite directions for each end of the spring
me.LocNext.AddVec(MeLnk.Spring);// Accumulate all displacements into my next move.
//you = MeLnk.GetOtherAtom(); dif = me.Loc.DeltaMag(you.Loc);
// do physics here
// spring physics, then define new locations and speeds
// phys 0: go through all my neighbors and see which springs are bent. apply force to myself accordingly for each spring.
// phys 1: after all personal next locs are calculated, then rollover for everybody.
}
}
}
/* **************************************************************************** */
public void Rebind() {
Atom me;
for (int acnt0 = 0; acnt0 < NumAtoms; acnt0++) {
me = this.Atoms[acnt0];
/*
I can scan all nbrs for new bindings.
Though I only need to scan my own connections for UNbindings. so I'm already pointing to the link in question when/if I want to break it.
*/
me.Seek_Unbindings();
me.Seek_Bindings(this.Atoms);
}
}
}
/*
Set<String> keys = hm.keySet();
for(String key: keys){
System.out.println("Value of "+key+" is: "+hm.get(key));
}
Enumeration e = ht.elements();
while (e.hasMoreElements()){
System.out.println(e.nextElement());
}
public static void printMap(Map mp) {// http://stackoverflow.com/questions/1066589/iterate-through-a-hashmap
Iterator it = mp.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pair = (Map.Entry)it.next();
System.out.println(pair.getKey() + " = " + pair.getValue());
it.remove(); // avoids a ConcurrentModificationException
}
}
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
// ...
}
for (Object value : map.values()) {
// ...
}
*/<|fim▁end|> |
// get locations of both of my ends
// get my distortion from my resting length |
<|file_name|>InvestmentAccountOwnershipInformation4.go<|end_file_name|><|fim▁begin|>package iso20022
<|fim▁hole|>// Characteristics of the ownership of an investment account.
type InvestmentAccountOwnershipInformation4 struct {
// Organised structure that is set up for a particular purpose, eg, a business, government body, department, charity, or financial institution.
Organisation *Organisation3 `xml:"Org"`
// Human entity, as distinguished from a corporate entity (which is sometimes referred to as an 'artificial person').
IndividualPerson *IndividualPerson11 `xml:"IndvPrsn"`
// Status of an identity check to prevent money laundering. This includes the counter-terrorism check.
MoneyLaunderingCheck *MoneyLaunderingCheck1Code `xml:"MnyLndrgChck,omitempty"`
// Status of an identity check to prevent money laundering. This includes the counter-terrorism check.
ExtendedMoneyLaunderingCheck *Extended350Code `xml:"XtndedMnyLndrgChck,omitempty"`
// Percentage of ownership or beneficiary ownership of the shares/units in the account. All subsequent subscriptions and or redemptions will be allocated using the same percentage.
OwnershipBeneficiaryRate *PercentageRate `xml:"OwnrshBnfcryRate,omitempty"`
// Unique identification, as assigned by an organisation, to unambiguously identify a party.
ClientIdentification *Max35Text `xml:"ClntId,omitempty"`
// Indicates whether an owner of an investment account may benefit from a fiscal exemption or amnesty for instance for declaring overseas investments.
FiscalExemption *YesNoIndicator `xml:"FsclXmptn,omitempty"`
// Indicates whether the account owner signature is required to authorise transactions on the account.
SignatoryRightIndicator *YesNoIndicator `xml:"SgntryRghtInd,omitempty"`
// Information related to the party profile to be inserted or deleted.
ModifiedInvestorProfileValidation []*ModificationScope11 `xml:"ModfdInvstrPrflVldtn,omitempty"`
}
func (i *InvestmentAccountOwnershipInformation4) AddOrganisation() *Organisation3 {
i.Organisation = new(Organisation3)
return i.Organisation
}
func (i *InvestmentAccountOwnershipInformation4) AddIndividualPerson() *IndividualPerson11 {
i.IndividualPerson = new(IndividualPerson11)
return i.IndividualPerson
}
func (i *InvestmentAccountOwnershipInformation4) SetMoneyLaunderingCheck(value string) {
i.MoneyLaunderingCheck = (*MoneyLaunderingCheck1Code)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) SetExtendedMoneyLaunderingCheck(value string) {
i.ExtendedMoneyLaunderingCheck = (*Extended350Code)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) SetOwnershipBeneficiaryRate(value string) {
i.OwnershipBeneficiaryRate = (*PercentageRate)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) SetClientIdentification(value string) {
i.ClientIdentification = (*Max35Text)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) SetFiscalExemption(value string) {
i.FiscalExemption = (*YesNoIndicator)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) SetSignatoryRightIndicator(value string) {
i.SignatoryRightIndicator = (*YesNoIndicator)(&value)
}
func (i *InvestmentAccountOwnershipInformation4) AddModifiedInvestorProfileValidation() *ModificationScope11 {
newValue := new(ModificationScope11)
i.ModifiedInvestorProfileValidation = append(i.ModifiedInvestorProfileValidation, newValue)
return newValue
}<|fim▁end|> | |
<|file_name|>exception_window.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import html
from typing import TYPE_CHECKING, Optional, Set
from PyQt5.QtCore import QObject
import PyQt5.QtCore as QtCore
from PyQt5.QtWidgets import (QWidget, QLabel, QPushButton, QTextEdit,
QMessageBox, QHBoxLayout, QVBoxLayout)
from electrum_grs.i18n import _
from electrum_grs.base_crash_reporter import BaseCrashReporter
from electrum_grs.logging import Logger
from electrum_grs import constants
from electrum_grs.network import Network
from .util import MessageBoxMixin, read_QIcon, WaitingDialog
if TYPE_CHECKING:
from electrum_grs.simple_config import SimpleConfig
from electrum_grs.wallet import Abstract_Wallet
class Exception_Window(BaseCrashReporter, QWidget, MessageBoxMixin, Logger):
_active_window = None
def __init__(self, config: 'SimpleConfig', exctype, value, tb):
BaseCrashReporter.__init__(self, exctype, value, tb)
self.network = Network.get_instance()
self.config = config<|fim▁hole|>
QWidget.__init__(self)
self.setWindowTitle('Electrum-GRS - ' + _('An Error Occurred'))
self.setMinimumSize(600, 300)
Logger.__init__(self)
main_box = QVBoxLayout()
heading = QLabel('<h2>' + BaseCrashReporter.CRASH_TITLE + '</h2>')
main_box.addWidget(heading)
main_box.addWidget(QLabel(BaseCrashReporter.CRASH_MESSAGE))
main_box.addWidget(QLabel(BaseCrashReporter.REQUEST_HELP_MESSAGE))
collapse_info = QPushButton(_("Show report contents"))
collapse_info.clicked.connect(
lambda: self.msg_box(QMessageBox.NoIcon,
self, _("Report contents"), self.get_report_string(),
rich_text=True))
main_box.addWidget(collapse_info)
main_box.addWidget(QLabel(BaseCrashReporter.DESCRIBE_ERROR_MESSAGE))
self.description_textfield = QTextEdit()
self.description_textfield.setFixedHeight(50)
self.description_textfield.setPlaceholderText(self.USER_COMMENT_PLACEHOLDER)
main_box.addWidget(self.description_textfield)
main_box.addWidget(QLabel(BaseCrashReporter.ASK_CONFIRM_SEND))
buttons = QHBoxLayout()
report_button = QPushButton(_('Send Bug Report'))
report_button.clicked.connect(self.send_report)
report_button.setIcon(read_QIcon("tab_send.png"))
buttons.addWidget(report_button)
never_button = QPushButton(_('Never'))
never_button.clicked.connect(self.show_never)
buttons.addWidget(never_button)
close_button = QPushButton(_('Not Now'))
close_button.clicked.connect(self.close)
buttons.addWidget(close_button)
main_box.addLayout(buttons)
self.setLayout(main_box)
self.show()
def send_report(self):
def on_success(response):
# note: 'response' coming from (remote) crash reporter server.
# It contains a URL to the GitHub issue, so we allow rich text.
self.show_message(parent=self,
title=_("Crash report"),
msg=response,
rich_text=True)
self.close()
def on_failure(exc_info):
e = exc_info[1]
self.logger.error('There was a problem with the automatic reporting', exc_info=exc_info)
self.show_critical(parent=self,
msg=(_('There was a problem with the automatic reporting:') + '<br/>' +
repr(e)[:120] + '<br/><br/>' +
_("Please report this issue manually") +
f' <a href="{constants.GIT_REPO_ISSUES_URL}">on GitHub</a>.'),
rich_text=True)
proxy = self.network.proxy
task = lambda: BaseCrashReporter.send_report(self, self.network.asyncio_loop, proxy)
msg = _('Sending crash report...')
WaitingDialog(self, msg, task, on_success, on_failure)
def on_close(self):
Exception_Window._active_window = None
self.close()
def show_never(self):
self.config.set_key(BaseCrashReporter.config_key, False)
self.close()
def closeEvent(self, event):
self.on_close()
event.accept()
def get_user_description(self):
return self.description_textfield.toPlainText()
def get_wallet_type(self):
wallet_types = Exception_Hook._INSTANCE.wallet_types_seen
return ",".join(wallet_types)
def _get_traceback_str_to_display(self) -> str:
# The msg_box that shows the report uses rich_text=True, so
# if traceback contains special HTML characters, e.g. '<',
# they need to be escaped to avoid formatting issues.
traceback_str = super()._get_traceback_str_to_display()
return html.escape(traceback_str)
def _show_window(*args):
if not Exception_Window._active_window:
Exception_Window._active_window = Exception_Window(*args)
class Exception_Hook(QObject, Logger):
_report_exception = QtCore.pyqtSignal(object, object, object, object)
_INSTANCE = None # type: Optional[Exception_Hook] # singleton
def __init__(self, *, config: 'SimpleConfig'):
QObject.__init__(self)
Logger.__init__(self)
assert self._INSTANCE is None, "Exception_Hook is supposed to be a singleton"
self.config = config
self.wallet_types_seen = set() # type: Set[str]
sys.excepthook = self.handler
self._report_exception.connect(_show_window)
@classmethod
def maybe_setup(cls, *, config: 'SimpleConfig', wallet: 'Abstract_Wallet' = None) -> None:
if not config.get(BaseCrashReporter.config_key, default=True):
return
if not cls._INSTANCE:
cls._INSTANCE = Exception_Hook(config=config)
if wallet:
cls._INSTANCE.wallet_types_seen.add(wallet.wallet_type)
def handler(self, *exc_info):
self.logger.error('exception caught by crash reporter', exc_info=exc_info)
self._report_exception.emit(self.config, *exc_info)<|fim▁end|> | |
<|file_name|>backend.go<|end_file_name|><|fim▁begin|>package mqttpubsub
import (
"encoding/json"
"fmt"
"sync"
"time"
log "github.com/Sirupsen/logrus"
"github.com/brocaar/loraserver/api/gw"
"github.com/brocaar/lorawan"
"github.com/eclipse/paho.mqtt.golang"
)
// Backend implements a MQTT pub-sub backend.
type Backend struct {
conn mqtt.Client
txPacketChan chan gw.TXPacketBytes
gateways map[lorawan.EUI64]struct{}
mutex sync.RWMutex
}
// NewBackend creates a new Backend.
func NewBackend(server, username, password string) (*Backend, error) {
b := Backend{
txPacketChan: make(chan gw.TXPacketBytes),
gateways: make(map[lorawan.EUI64]struct{}),
}
opts := mqtt.NewClientOptions()
opts.AddBroker(server)
opts.SetUsername(username)
opts.SetPassword(password)
opts.SetOnConnectHandler(b.onConnected)
opts.SetConnectionLostHandler(b.onConnectionLost)
log.WithField("server", server).Info("backend: connecting to mqtt broker")
b.conn = mqtt.NewClient(opts)
if token := b.conn.Connect(); token.Wait() && token.Error() != nil {
return nil, token.Error()
}
return &b, nil
}
// Close closes the backend.
func (b *Backend) Close() {
b.conn.Disconnect(250) // wait 250 milisec to complete pending actions
}
// TXPacketChan returns the TXPacketBytes channel.
func (b *Backend) TXPacketChan() chan gw.TXPacketBytes {
return b.txPacketChan
}
// SubscribeGatewayTX subscribes the backend to the gateway TXPacketBytes
// topic (packets the gateway needs to transmit).
func (b *Backend) SubscribeGatewayTX(mac lorawan.EUI64) error {
defer b.mutex.Unlock()
b.mutex.Lock()
topic := fmt.Sprintf("gateway/%s/tx", mac.String())
log.WithField("topic", topic).Info("backend: subscribing to topic")
if token := b.conn.Subscribe(topic, 0, b.txPacketHandler); token.Wait() && token.Error() != nil {
return token.Error()
}
b.gateways[mac] = struct{}{}
return nil
}
// UnSubscribeGatewayTX unsubscribes the backend from the gateway TXPacketBytes
// topic.
func (b *Backend) UnSubscribeGatewayTX(mac lorawan.EUI64) error {
defer b.mutex.Unlock()
b.mutex.Lock()
topic := fmt.Sprintf("gateway/%s/tx", mac.String())
log.WithField("topic", topic).Info("backend: unsubscribing from topic")
if token := b.conn.Unsubscribe(topic); token.Wait() && token.Error() != nil {
return token.Error()
}
delete(b.gateways, mac)<|fim▁hole|>
// PublishGatewayRX publishes a RX packet to the MQTT broker.
func (b *Backend) PublishGatewayRX(mac lorawan.EUI64, rxPacket gw.RXPacketBytes) error {
topic := fmt.Sprintf("gateway/%s/rx", mac.String())
return b.publish(topic, rxPacket)
}
// PublishGatewayStats publishes a GatewayStatsPacket to the MQTT broker.
func (b *Backend) PublishGatewayStats(mac lorawan.EUI64, stats gw.GatewayStatsPacket) error {
topic := fmt.Sprintf("gateway/%s/stats", mac.String())
return b.publish(topic, stats)
}
func (b *Backend) publish(topic string, v interface{}) error {
bytes, err := json.Marshal(v)
if err != nil {
return err
}
log.WithField("topic", topic).Info("backend: publishing packet")
if token := b.conn.Publish(topic, 0, false, bytes); token.Wait() && token.Error() != nil {
return token.Error()
}
return nil
}
func (b *Backend) txPacketHandler(c mqtt.Client, msg mqtt.Message) {
log.WithField("topic", msg.Topic()).Info("backend: packet received")
var txPacket gw.TXPacketBytes
if err := json.Unmarshal(msg.Payload(), &txPacket); err != nil {
log.Errorf("backend: decode tx packet error: %s", err)
return
}
b.txPacketChan <- txPacket
}
func (b *Backend) onConnected(c mqtt.Client) {
defer b.mutex.RUnlock()
b.mutex.RLock()
log.Info("backend: connected to mqtt broker")
if len(b.gateways) > 0 {
for {
log.WithField("topic_count", len(b.gateways)).Info("backend: re-registering to gateway topics")
topics := make(map[string]byte)
for k := range b.gateways {
topics[fmt.Sprintf("gateway/%s/tx", k)] = 0
}
if token := b.conn.SubscribeMultiple(topics, b.txPacketHandler); token.Wait() && token.Error() != nil {
log.WithField("topic_count", len(topics)).Errorf("backend: subscribe multiple failed: %s", token.Error())
time.Sleep(time.Second)
continue
}
return
}
}
}
func (b *Backend) onConnectionLost(c mqtt.Client, reason error) {
log.Errorf("backend: mqtt connection error: %s", reason)
}<|fim▁end|> | return nil
} |
<|file_name|>rdio.py<|end_file_name|><|fim▁begin|>import urllib.request, urllib.parse, urllib.error
from oauth2 import Request as OAuthRequest, SignatureMethod_HMAC_SHA1
try:
import json as simplejson
except ImportError:<|fim▁hole|> import simplejson
except ImportError:
from django.utils import simplejson
from social_auth.backends import ConsumerBasedOAuth, OAuthBackend, BaseOAuth2
from social_auth.utils import dsa_urlopen
class RdioBaseBackend(OAuthBackend):
def get_user_id(self, details, response):
return response['key']
def get_user_details(self, response):
return {
'username': response['username'],
'first_name': response['firstName'],
'last_name': response['lastName'],
'fullname': response['displayName'],
}
class RdioOAuth1Backend(RdioBaseBackend):
"""Rdio OAuth authentication backend"""
name = 'rdio-oauth1'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
]
@classmethod
def tokens(cls, instance):
token = super(RdioOAuth1Backend, cls).tokens(instance)
if token and 'access_token' in token:
token = dict(tok.split('=')
for tok in token['access_token'].split('&'))
return token
class RdioOAuth2Backend(RdioBaseBackend):
name = 'rdio-oauth2'
EXTRA_DATA = [
('key', 'rdio_id'),
('icon', 'rdio_icon_url'),
('url', 'rdio_profile_url'),
('username', 'rdio_username'),
('streamRegion', 'rdio_stream_region'),
('refresh_token', 'refresh_token', True),
('token_type', 'token_type', True),
]
class RdioOAuth1(ConsumerBasedOAuth):
AUTH_BACKEND = RdioOAuth1Backend
REQUEST_TOKEN_URL = 'http://api.rdio.com/oauth/request_token'
AUTHORIZATION_URL = 'https://www.rdio.com/oauth/authorize'
ACCESS_TOKEN_URL = 'http://api.rdio.com/oauth/access_token'
RDIO_API_BASE = 'http://api.rdio.com/1/'
SETTINGS_KEY_NAME = 'RDIO_OAUTH1_KEY'
SETTINGS_SECRET_NAME = 'RDIO_OAUTH1_SECRET'
def user_data(self, access_token, *args, **kwargs):
"""Return user data provided"""
params = {
'method': 'currentUser',
'extras': 'username,displayName,streamRegion',
}
request = self.oauth_post_request(access_token, self.RDIO_API_BASE,
params=params)
response = dsa_urlopen(request.url, request.to_postdata())
json = '\n'.join(response.readlines())
try:
return simplejson.loads(json)['result']
except ValueError:
return None
def oauth_post_request(self, token, url, params):
"""Generate OAuth request, setups callback url"""
if 'oauth_verifier' in self.data:
params['oauth_verifier'] = self.data['oauth_verifier']
request = OAuthRequest.from_consumer_and_token(self.consumer,
token=token,
http_url=url,
parameters=params,
http_method='POST')
request.sign_request(SignatureMethod_HMAC_SHA1(), self.consumer, token)
return request
class RdioOAuth2(BaseOAuth2):
AUTH_BACKEND = RdioOAuth2Backend
AUTHORIZATION_URL = 'https://www.rdio.com/oauth2/authorize'
ACCESS_TOKEN_URL = 'https://www.rdio.com/oauth2/token'
RDIO_API_BASE = 'https://www.rdio.com/api/1/'
SETTINGS_KEY_NAME = 'RDIO_OAUTH2_KEY'
SETTINGS_SECRET_NAME = 'RDIO_OAUTH2_SECRET'
SCOPE_VAR_NAME = 'RDIO2_PERMISSIONS'
EXTRA_PARAMS_VAR_NAME = 'RDIO2_EXTRA_PARAMS'
def user_data(self, access_token, *args, **kwargs):
params = {
'method': 'currentUser',
'extras': 'username,displayName,streamRegion',
'access_token': access_token,
}
response = dsa_urlopen(self.RDIO_API_BASE, urllib.parse.urlencode(params))
try:
return simplejson.load(response)['result']
except ValueError:
return None
# Backend definition
BACKENDS = {
'rdio-oauth1': RdioOAuth1,
'rdio-oauth2': RdioOAuth2
}<|fim▁end|> | try: |
<|file_name|>language-container-directive.js<|end_file_name|><|fim▁begin|>// NOTE: nbApp is defined in app.js
nbApp.directive("languageContainerDirective", function() {
return {
restrict : 'E',
templateUrl : 'js/templates/language-container.html',
scope: {
color: "@",
language: "@",
reading: "@",
writing: "@",
listening: "@",
speaking: "@",
flag: "@",
},
link: function(scope, element, attrs) {
scope.color = attrs.color;
scope.language = attrs.language;
scope.reading = attrs.reading;
scope.writing = attrs.writing;
scope.listening = attrs.listening;
scope.speaking = attrs.speaking;
scope.flag = attrs.flag;
scope.$watch('language', function(nV, oV) {
if(nV){
RadarChart.defaultConfig.color = function() {};
RadarChart.defaultConfig.radius = 3;
RadarChart.defaultConfig.w = 250;
RadarChart.defaultConfig.h = 250;
/*
* 0 - No Practical Proficiency
* 1 - Elementary Proficiency
* 2 - Limited Working Proficiency
* 3 - Minimum Professional Proficiency
* 4 - Full Professional Proficiency
* 5 - Native or Bilingual Proficiency
Read: the ability to read and understand texts written in the language
Write: the ability to formulate written texts in the language
Listen: the ability to follow and understand speech in the language<|fim▁hole|> var data = [
{
className: attrs.language, // optional can be used for styling
axes: [
{axis: "Reading", value: attrs.reading},
{axis: "Writing", value: attrs.writing},
{axis: "Listening", value: attrs.listening},
{axis: "Speaking", value: attrs.speaking},
]
},
];
function mapData() {
return data.map(function(d) {
return {
className: d.className,
axes: d.axes.map(function(axis) {
return {axis: axis.axis, value: axis.value};
})
};
});
}
// chart.config.w;
// chart.config.h;
// chart.config.axisText = true;
// chart.config.levels = 5;
// chart.config.maxValue = 5;
// chart.config.circles = true;
// chart.config.actorLegend = 1;
var chart = RadarChart.chart();
var cfg = chart.config(); // retrieve default config
cfg = chart.config({axisText: true, levels: 5, maxValue: 5, circles: true}); // retrieve default config
var svg = d3.select('.' + attrs.language).append('svg')
.attr('width', 250)
.attr('height', 270);
svg.append('g').classed('single', 1).datum(mapData()).call(chart);
console.log('Rendering new language Radar Viz! --> ' + attrs.language);
}
})
}
};
});<|fim▁end|> | Speak: the ability to produce speech in the language and be understood by its speakers.
*/ |
<|file_name|>PlatformGameObstacleMove.cpp<|end_file_name|><|fim▁begin|>#include "PlatformGameObstacleMove.h"
#include "../Utils/StringUtility.h"
PlatformGameObstacleMove::PlatformGameObstacleMove(Node& parentNode, const Vec2& localPosition, string csbPath, string colliderSpritePath)
<|fim▁hole|> name = "letter";
correct = false;
}
PlatformGameObstacleMove::~PlatformGameObstacleMove() {
}<|fim▁end|> | : GameObject(parentNode, localPosition, csbPath, colliderSpritePath)
{
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate wireless;
mod json;
mod stdin;
use self::wireless::{Countries, Country, LTECell, LTECells, Network, Networks};
pub struct Data {
pub countries: Countries,
pub lte_cells: LTECells,
pub networks: Networks,
}
impl Data {
pub fn add_country(&mut self) {
let mcc = match stdin::read_u16("MCC: ", None) {
Some(a) => a,
None => return,
};
let name = match stdin::read_string("Name: ", None) {
Some(a) => a,
None => return,
};
let country = Country { mcc: mcc, name: name };
self.countries.insert(country.mcc, country);
}
pub fn add_lte_cell(&mut self) {
let mcc = match stdin::read_u16("MCC: ", None) {
Some(a) => a,
None => return,
};
let mnc = match stdin::read_u16("MNC: ", None) {
Some(a) => a,
None => return,
};
let id = match stdin::read_u32("CID: ", None) {
Some(a) => a,
None => return,
};
let cell = LTECell { mcc: mcc, mnc: mnc, id: id };
if self.lte_cells.contains_key(&cell.global_id()) {
println!("Duplicate cell in database.");
}
self.lte_cells.insert(cell.global_id(), cell);
}
pub fn add_network(&mut self) {
let mcc = match stdin::read_u16("MCC: ", None) {
Some(a) => a,
None => return,
};
let mnc = match stdin::read_u16("MNC: ", None) {
Some(a) => a,
None => return,
};
let name = match stdin::read_string("Name: ", None) {
Some(a) => a,
None => return,
};
let network = Network { mcc: mcc, mnc: mnc, name: name };
self.networks.insert(network.id(), network);
}
pub fn print_countries(&self) {
for country in self.countries.values() {
println!("#{}: {}", country.mcc, country.name);
}
}
pub fn print_lte_cells(&self) {
for lte_cell in self.lte_cells.values() {
println!("#{}-{}-{}", lte_cell.mcc, lte_cell.mnc, lte_cell.id);
}
}
pub fn print_networks(&self) {
for network in self.networks.values() {
if network.mnc % 10 == 0 {
println!("#{}-{:02}: {}", network.mcc, network.mnc / 10, network.name);
} else {
println!("#{}-{:03}: {}", network.mcc, network.mnc, network.name);<|fim▁hole|> pub fn read() -> Data { json::read() }
pub fn write(&self) { json::write(self); }
}<|fim▁end|> | }
}
} |
<|file_name|>fileinput_locale_cz.js<|end_file_name|><|fim▁begin|>/*!
* FileInput Czech Translations
*
* This file must be loaded after 'fileinput.js'. Patterns in braces '{}', or
* any HTML markup tags in the messages must not be converted or translated.
*
* @see http://github.com/kartik-v/bootstrap-fileinput
*
* NOTE: this file must be saved in UTF-8 encoding.
*/
(function ($) {
"use strict";
$.fn.fileinputLocales['cz'] = {
fileSingle: 'soubor',
filePlural: 'soubory',
browseLabel: 'Vybrat …',
removeLabel: 'Odstranit',
removeTitle: 'Vyčistit vybrané soubory',
cancelLabel: 'Storno',
cancelTitle: 'Přerušit nahrávání',
uploadLabel: 'Nahrát',
uploadTitle: 'Nahrát vybrané soubory',
msgNo: 'Ne',
msgCancelled: 'Zrušeno',
msgZoomTitle: 'zobrazit podrobnosti',
msgZoomModalHeading: 'Detailní náhled',
msgSizeTooLarge: 'Soubor "{name}" (<b>{size} KB</b>): překročení - maximální povolená velikost <b>{maxSize} KB</b>.',
msgFilesTooLess: 'Musíte vybrat nejméně <b>{n}</b> {files} pro nahrání.',
msgFilesTooMany: 'Počet vybraných souborů pro nahrání <b>({n})</b>: překročení - maximální povolený limit <b>{m}</b>.',
msgFileNotFound: 'Soubor "{name}" nebyl nalezen!',
msgFileSecured: 'Zabezpečení souboru znemožnilo číst soubor "{name}".',
msgFileNotReadable: 'Soubor "{name}" není čitelný.',
msgFilePreviewAborted: 'Náhled souboru byl přerušen pro "{name}".',
msgFilePreviewError: 'Nastala chyba při načtení souboru "{name}".',
msgInvalidFileType: 'Neplatný typ souboru "{name}". Pouze "{types}" souborů jsou podporovány.',
msgInvalidFileExtension: 'Neplatná extenze souboru "{name}". Pouze "{extensions}" souborů jsou podporovány.',
msgUploadAborted: 'Soubor nahrávání byl přerušen',
msgValidationError: 'Chyba ověření',
<|fim▁hole|> msgSelected: '{n} {files} vybrano',
msgFoldersNotAllowed: 'Táhni a pusť pouze soubory! Vynechané {n} pustěné složk(y).',
msgImageWidthSmall: 'Šířka image soubor "{name}", musí být alespoň {size} px.',
msgImageHeightSmall: 'Výška image soubor "{name}", musí být alespoň {size} px.',
msgImageWidthLarge: 'Šířka obrazového souboru "{name}" nelze překročit {size} px.',
msgImageHeightLarge: 'Výška obrazového souboru "{name}" nelze překročit {size} px.',
msgImageResizeError: 'Nelze získat rozměry obrázku změnit velikost.',
msgImageResizeException: 'Chyba při změně velikosti obrázku.<pre>{errors}</pre>',
dropZoneTitle: 'Táhni a pusť soubory sem …',
fileActionSettings: {
removeTitle: 'Odstranit soubor',
uploadTitle: 'nahrát soubor',
indicatorNewTitle: 'Ještě nenahrál',
indicatorSuccessTitle: 'Nahraný',
indicatorErrorTitle: 'Nahrát Chyba',
indicatorLoadingTitle: 'Nahrávání ...'
}
};
})(window.jQuery);<|fim▁end|> | msgLoading: 'Nahrávání souboru {index} z {files} …',
msgProgress: 'Nahrávání souboru {index} z {files} - {name} - {percent}% dokončeno.',
|
<|file_name|>figure_gamma.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import absolute_import, print_function
import copy
import matplotlib
from grid_cell_model.submitting import flagparse
import noisefigs
from noisefigs.env import NoiseEnvironment
import config
parser = flagparse.FlagParser()
parser.add_flag('--gammaSweep')
args = parser.parse_args()
<|fim▁hole|>env = NoiseEnvironment(user_config=config.get_config())
if args.gammaSweep or args.all:
env.register_plotter(noisefigs.plotters.GammaSweepsPlotter)
env.plot()<|fim▁end|> | |
<|file_name|>ipfix.js<|end_file_name|><|fim▁begin|>//IP Flow Information Export (IPFIX) Entities
// Last Updated 2013-01-15
// http://www.iana.org/assignments/ipfix/ipfix.xml
var entities = [];
//ipfix-information-elements
entities['elements'] = {
"1":{"name":"octetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"octets"},
"2":{"name":"packetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"packets"},
"3":{"name":"deltaFlowCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter"},
"4":{"name":"protocolIdentifier","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"5":{"name":"ipClassOfService","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"6":{"name":"tcpControlBits","dataType":"unsigned8","dataTypeSemantics":"flags","group":"minMax"},
"7":{"name":"sourceTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"8":{"name":"sourceIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"ipHeader"},
"9":{"name":"sourceIPv4PrefixLength","dataType":"unsigned8","group":"ipHeader","units":"bits"},
"10":{"name":"ingressInterface","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"11":{"name":"destinationTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"12":{"name":"destinationIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"ipHeader"},
"13":{"name":"destinationIPv4PrefixLength","dataType":"unsigned8","group":"ipHeader","units":"bits"},
"14":{"name":"egressInterface","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"15":{"name":"ipNextHopIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"derived"},
"16":{"name":"bgpSourceAsNumber","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"derived"},
"17":{"name":"bgpDestinationAsNumber","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"derived"},
"18":{"name":"bgpNextHopIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"derived"},
"19":{"name":"postMCastPacketDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"packets"},
"20":{"name":"postMCastOctetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"octets"},
"21":{"name":"flowEndSysUpTime","dataType":"unsigned32","group":"timestamp","units":"milliseconds"},
"22":{"name":"flowStartSysUpTime","dataType":"unsigned32","group":"timestamp","units":"milliseconds"},
"23":{"name":"postOctetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"octets"},
"24":{"name":"postPacketDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"packets"},
"25":{"name":"minimumIpTotalLength","dataType":"unsigned64","group":"minMax","units":"octets"},
"26":{"name":"maximumIpTotalLength","dataType":"unsigned64","group":"minMax","units":"octets"},
"27":{"name":"sourceIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"ipHeader"},
"28":{"name":"destinationIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"ipHeader"},
"29":{"name":"sourceIPv6PrefixLength","dataType":"unsigned8","group":"ipHeader","units":"bits"},
"30":{"name":"destinationIPv6PrefixLength","dataType":"unsigned8","group":"ipHeader","units":"bits"},
"31":{"name":"flowLabelIPv6","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"ipHeader"},
"32":{"name":"icmpTypeCodeIPv4","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"33":{"name":"igmpType","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"transportHeader"},
"36":{"name":"flowActiveTimeout","dataType":"unsigned16","group":"misc","units":"seconds"},
"37":{"name":"flowIdleTimeout","dataType":"unsigned16","group":"misc","units":"seconds"},
"40":{"name":"exportedOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"octets"},
"41":{"name":"exportedMessageTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"messages"},
"42":{"name":"exportedFlowRecordTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"flows"},
"44":{"name":"sourceIPv4Prefix","dataType":"ipv4Address","group":"ipHeader"},
"45":{"name":"destinationIPv4Prefix","dataType":"ipv4Address","group":"ipHeader"},
"46":{"name":"mplsTopLabelType","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"derived"},
"47":{"name":"mplsTopLabelIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"derived"},
"52":{"name":"minimumTTL","dataType":"unsigned8","group":"minMax","units":"hops"},
"53":{"name":"maximumTTL","dataType":"unsigned8","group":"minMax","units":"hops"},
"54":{"name":"fragmentIdentification","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"ipHeader"},
"55":{"name":"postIpClassOfService","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"56":{"name":"sourceMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier","group":"subIpHeader"},
"57":{"name":"postDestinationMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier","group":"subIpHeader"},
"58":{"name":"vlanId","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"subIpHeader"},
"59":{"name":"postVlanId","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"subIpHeader"},
"60":{"name":"ipVersion","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"61":{"name":"flowDirection","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"misc"},
"62":{"name":"ipNextHopIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"derived"},
"63":{"name":"bgpNextHopIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"derived"},
"64":{"name":"ipv6ExtensionHeaders","dataType":"unsigned32","dataTypeSemantics":"flags","group":"minMax"},
"70":{"name":"mplsTopLabelStackSection","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"71":{"name":"mplsLabelStackSection2","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"72":{"name":"mplsLabelStackSection3","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"73":{"name":"mplsLabelStackSection4","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"74":{"name":"mplsLabelStackSection5","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"75":{"name":"mplsLabelStackSection6","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"76":{"name":"mplsLabelStackSection7","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"77":{"name":"mplsLabelStackSection8","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"78":{"name":"mplsLabelStackSection9","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"79":{"name":"mplsLabelStackSection10","dataType":"octetArray","dataTypeSemantics":"identifier","group":"subIpHeader"},
"80":{"name":"destinationMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier","group":"subIpHeader"},
"81":{"name":"postSourceMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier","group":"subIpHeader"},
"82":{"name":"interfaceName","dataType":"string"},"83":{"name":"interfaceDescription","dataType":"string"},
"85":{"name":"octetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"octets"},
"86":{"name":"packetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"88":{"name":"fragmentOffset","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"ipHeader"},
"90":{"name":"mplsVpnRouteDistinguisher","dataType":"octetArray","dataTypeSemantics":"identifier","group":"derived"},
"91":{"name":"mplsTopLabelPrefixLength","dataType":"unsigned8","dataTypeSemantics":"identifier","units":"bits"},
"94":{"name":"applicationDescription","dataType":"string"},
"95":{"name":"applicationId","dataType":"octetArray","dataTypeSemantics":"identifier"},
"96":{"name":"applicationName","dataType":"string"},
"98":{"name":"postIpDiffServCodePoint","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"99":{"name":"multicastReplicationFactor","dataType":"unsigned32","dataTypeSemantics":"quantity"},
"101":{"name":"classificationEngineId","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"128":{"name":"bgpNextAdjacentAsNumber","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"derived"},
"129":{"name":"bgpPrevAdjacentAsNumber","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"derived"},
"130":{"name":"exporterIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"config"},
"131":{"name":"exporterIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"config"},
"132":{"name":"droppedOctetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"octets"},
"133":{"name":"droppedPacketDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","group":"flowCounter","units":"packets"},
"134":{"name":"droppedOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"octets"},
"135":{"name":"droppedPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"136":{"name":"flowEndReason","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"misc"},
"137":{"name":"commonPropertiesId","dataType":"unsigned64","dataTypeSemantics":"identifier","group":"scope"},
"138":{"name":"observationPointId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"139":{"name":"icmpTypeCodeIPv6","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"140":{"name":"mplsTopLabelIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"derived"},
"141":{"name":"lineCardId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"142":{"name":"portId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"143":{"name":"meteringProcessId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"144":{"name":"exportingProcessId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"145":{"name":"templateId","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"scope"},
"146":{"name":"wlanChannelId","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"subIpHeader"},
"147":{"name":"wlanSSID","dataType":"string","group":"subIpHeader"},
"148":{"name":"flowId","dataType":"unsigned64","dataTypeSemantics":"identifier","group":"scope"},
"149":{"name":"observationDomainId","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"scope"},
"150":{"name":"flowStartSeconds","dataType":"dateTimeSeconds","group":"timestamp","units":"seconds"},
"151":{"name":"flowEndSeconds","dataType":"dateTimeSeconds","group":"timestamp","units":"seconds"},
"152":{"name":"flowStartMilliseconds","dataType":"dateTimeMilliseconds","group":"timestamp","units":"milliseconds"},
"153":{"name":"flowEndMilliseconds","dataType":"dateTimeMilliseconds","group":"timestamp","units":"milliseconds"},
"154":{"name":"flowStartMicroseconds","dataType":"dateTimeMicroseconds","group":"timestamp","units":"microseconds"},
"155":{"name":"flowEndMicroseconds","dataType":"dateTimeMicroseconds","group":"timestamp","units":"microseconds"},
"156":{"name":"flowStartNanoseconds","dataType":"dateTimeNanoseconds","group":"timestamp","units":"nanoseconds"},
"157":{"name":"flowEndNanoseconds","dataType":"dateTimeNanoseconds","group":"timestamp","units":"nanoseconds"},
"158":{"name":"flowStartDeltaMicroseconds","dataType":"unsigned32","group":"timestamp","units":"microseconds"},
"159":{"name":"flowEndDeltaMicroseconds","dataType":"unsigned32","group":"timestamp","units":"microseconds"},
"160":{"name":"systemInitTimeMilliseconds","dataType":"dateTimeMilliseconds","group":"timestamp","units":"milliseconds"},
"161":{"name":"flowDurationMilliseconds","dataType":"unsigned32","group":"misc","units":"milliseconds"},
"162":{"name":"flowDurationMicroseconds","dataType":"unsigned32","group":"misc","units":"microseconds"},
"163":{"name":"observedFlowTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"flows"},
"164":{"name":"ignoredPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"packets"},
"165":{"name":"ignoredOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"octets"},
"166":{"name":"notSentFlowTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"flows"},
"167":{"name":"notSentPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"packets"},
"168":{"name":"notSentOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"processCounter","units":"octets"},
"169":{"name":"destinationIPv6Prefix","dataType":"ipv6Address","group":"ipHeader"},
"170":{"name":"sourceIPv6Prefix","dataType":"ipv6Address","group":"ipHeader"},
"171":{"name":"postOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"octets"},
"172":{"name":"postPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"173":{"name":"flowKeyIndicator","dataType":"unsigned64","dataTypeSemantics":"flags","group":"config"},
"174":{"name":"postMCastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"175":{"name":"postMCastOctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"octets"},
"176":{"name":"icmpTypeIPv4","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"transportHeader"},
"177":{"name":"icmpCodeIPv4","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"transportHeader"},
"178":{"name":"icmpTypeIPv6","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"transportHeader"},
"179":{"name":"icmpCodeIPv6","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"transportHeader"},
"180":{"name":"udpSourcePort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"181":{"name":"udpDestinationPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"182":{"name":"tcpSourcePort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"183":{"name":"tcpDestinationPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"transportHeader"},
"184":{"name":"tcpSequenceNumber","dataType":"unsigned32","group":"transportHeader"},
"185":{"name":"tcpAcknowledgementNumber","dataType":"unsigned32","group":"transportHeader"},
"186":{"name":"tcpWindowSize","dataType":"unsigned16","group":"transportHeader"},
"187":{"name":"tcpUrgentPointer","dataType":"unsigned16","group":"transportHeader"},
"188":{"name":"tcpHeaderLength","dataType":"unsigned8","group":"transportHeader","units":"octets"},
"189":{"name":"ipHeaderLength","dataType":"unsigned8","group":"ipHeader","units":"octets"},
"190":{"name":"totalLengthIPv4","dataType":"unsigned16","group":"ipHeader","units":"octets"},
"191":{"name":"payloadLengthIPv6","dataType":"unsigned16","group":"ipHeader","units":"octets"},
"192":{"name":"ipTTL","dataType":"unsigned8","group":"ipHeader","units":"hops"},
"193":{"name":"nextHeaderIPv6","dataType":"unsigned8","group":"ipHeader"},
"194":{"name":"mplsPayloadLength","dataType":"unsigned32","group":"subIpHeader","units":"octets"},
"195":{"name":"ipDiffServCodePoint","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"196":{"name":"ipPrecedence","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"ipHeader"},
"197":{"name":"fragmentFlags","dataType":"unsigned8","dataTypeSemantics":"flags","group":"ipHeader"},
"198":{"name":"octetDeltaSumOfSquares","dataType":"unsigned64","group":"flowCounter"},
"199":{"name":"octetTotalSumOfSquares","dataType":"unsigned64","group":"flowCounter","units":"octets"},
"200":{"name":"mplsTopLabelTTL","dataType":"unsigned8","group":"subIpHeader","units":"hops"},
"201":{"name":"mplsLabelStackLength","dataType":"unsigned32","group":"subIpHeader","units":"octets"},
"202":{"name":"mplsLabelStackDepth","dataType":"unsigned32","group":"subIpHeader","units":"label stack entries"},
"203":{"name":"mplsTopLabelExp","dataType":"unsigned8","dataTypeSemantics":"flags","group":"subIpHeader"},
"204":{"name":"ipPayloadLength","dataType":"unsigned32","group":"derived","units":"octets"},
"205":{"name":"udpMessageLength","dataType":"unsigned16","group":"transportHeader","units":"octets"},
"206":{"name":"isMulticast","dataType":"unsigned8","dataTypeSemantics":"flags","group":"ipHeader"},
"207":{"name":"ipv4IHL","dataType":"unsigned8","group":"ipHeader","units":"4 octets"},
"208":{"name":"ipv4Options","dataType":"unsigned32","dataTypeSemantics":"flags","group":"minMax"},
"209":{"name":"tcpOptions","dataType":"unsigned64","dataTypeSemantics":"flags","group":"minMax"},
"210":{"name":"paddingOctets","dataType":"octetArray","group":"padding"},
"211":{"name":"collectorIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier","group":"config"},
"212":{"name":"collectorIPv6Address","dataType":"ipv6Address","dataTypeSemantics":"identifier","group":"config"},
"213":{"name":"exportInterface","dataType":"unsigned32","dataTypeSemantics":"identifier","group":"config"},
"214":{"name":"exportProtocolVersion","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"config"},
"215":{"name":"exportTransportProtocol","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"config"},
"216":{"name":"collectorTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"config"},
"217":{"name":"exporterTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier","group":"config"},
"218":{"name":"tcpSynTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"219":{"name":"tcpFinTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"220":{"name":"tcpRstTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"221":{"name":"tcpPshTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"222":{"name":"tcpAckTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"223":{"name":"tcpUrgTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","group":"flowCounter","units":"packets"},
"224":{"name":"ipTotalLength","dataType":"unsigned64","group":"ipHeader","units":"octets"},
"225":{"name":"postNATSourceIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier"},
"226":{"name":"postNATDestinationIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier"},
"227":{"name":"postNAPTSourceTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"228":{"name":"postNAPTDestinationTransportPort","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"229":{"name":"natOriginatingAddressRealm","dataType":"unsigned8","dataTypeSemantics":"flags"},
"230":{"name":"natEvent","dataType":"unsigned8"},
"231":{"name":"initiatorOctets","dataType":"unsigned64","units":"octets"},
"232":{"name":"responderOctets","dataType":"unsigned64","units":"octets"},
"233":{"name":"firewallEvent","dataType":"unsigned8"},
"234":{"name":"ingressVRFID","dataType":"unsigned32"},
"235":{"name":"egressVRFID","dataType":"unsigned32"},
"236":{"name":"VRFname","dataType":"string"},
"237":{"name":"postMplsTopLabelExp","dataType":"unsigned8","dataTypeSemantics":"flags","group":"subIpHeader"},
"238":{"name":"tcpWindowScale","dataType":"unsigned16","group":"transportHeader"},
"239":{"name":"biflowDirection","dataType":"unsigned8","dataTypeSemantics":"identifier","group":"misc"},
"240":{"name":"ethernetHeaderLength","dataType":"unsigned8","dataTypeSemantics":"identifier","units":"octets"},
"241":{"name":"ethernetPayloadLength","dataType":"unsigned16","dataTypeSemantics":"identifier","units":"octets"},
"242":{"name":"ethernetTotalLength","dataType":"unsigned16","dataTypeSemantics":"identifier","units":"octets"},
"243":{"name":"dot1qVlanId","dataType":"unsigned16","dataTypeSemantics":"identifier","units":"octets"},
"244":{"name":"dot1qPriority","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"245":{"name":"dot1qCustomerVlanId","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"246":{"name":"dot1qCustomerPriority","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"247":{"name":"metroEvcId","dataType":"string"},
"248":{"name":"metroEvcType","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"249":{"name":"pseudoWireId","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"250":{"name":"pseudoWireType","dataType":"unsigned16","dataTypeSemantics":"identifier"},<|fim▁hole|> "254":{"name":"postDot1qVlanId","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"255":{"name":"postDot1qCustomerVlanId","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"256":{"name":"ethernetType","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"257":{"name":"postIpPrecedence","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"258":{"name":"collectionTimeMilliseconds","dataType":"dateTimeMilliseconds"},
"259":{"name":"exportSctpStreamId","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"260":{"name":"maxExportSeconds","dataType":"dateTimeSeconds","units":"seconds"},
"261":{"name":"maxFlowEndSeconds","dataType":"dateTimeSeconds","units":"seconds"},
"262":{"name":"messageMD5Checksum","dataType":"octetArray"},
"263":{"name":"messageScope","dataType":"unsigned8"},
"264":{"name":"minExportSeconds","dataType":"dateTimeSeconds","units":"seconds"},
"265":{"name":"minFlowStartSeconds","dataType":"dateTimeSeconds","units":"seconds"},
"266":{"name":"opaqueOctets","dataType":"octetArray"},
"267":{"name":"sessionScope","dataType":"unsigned8"},
"268":{"name":"maxFlowEndMicroseconds","dataType":"dateTimeMicroseconds","units":"microseconds"},
"269":{"name":"maxFlowEndMilliseconds","dataType":"dateTimeMilliseconds","units":"milliseconds"},
"270":{"name":"maxFlowEndNanoseconds","dataType":"dateTimeNanoseconds","units":"nanoseconds"},
"271":{"name":"minFlowStartMicroseconds","dataType":"dateTimeMicroseconds","units":"microseconds"},
"272":{"name":"minFlowStartMilliseconds","dataType":"dateTimeMilliseconds","units":"milliseconds"},
"273":{"name":"minFlowStartNanoseconds","dataType":"dateTimeNanoseconds","units":"nanoseconds"},
"274":{"name":"collectorCertificate","dataType":"octetArray"},
"275":{"name":"exporterCertificate","dataType":"octetArray"},
"276":{"name":"dataRecordsReliability","dataType":"boolean","dataTypeSemantics":"identifier"},
"277":{"name":"observationPointType","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"278":{"name":"connectionCountNew","dataType":"unsigned32","dataTypeSemantics":"deltaCounter"},
"279":{"name":"connectionSumDuration","dataType":"unsigned64"},
"280":{"name":"connectionTransactionId","dataType":"unsigned64","dataTypeSemantics":"identifier"},
"281":{"name":"postNATSourceIPv6Address","dataType":"ipv6Address"},
"282":{"name":"postNATDestinationIPv6Address","dataType":"ipv6Address"},
"283":{"name":"natPoolId","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"284":{"name":"natPoolName","dataType":"string"},
"285":{"name":"anonymizationFlags","dataType":"unsigned16","dataTypeSemantics":"flags"},
"286":{"name":"anonymizationTechnique","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"287":{"name":"informationElementIndex","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"288":{"name":"p2pTechnology","dataType":"string"},
"289":{"name":"tunnelTechnology","dataType":"string"},
"290":{"name":"encryptedTechnology","dataType":"string"},
"291":{"name":"basicList","dataType":"basicList","dataTypeSemantics":"list"},
"292":{"name":"subTemplateList","dataType":"subTemplateList","dataTypeSemantics":"list"},
"293":{"name":"subTemplateMultiList","dataType":"subTemplateMultiList","dataTypeSemantics":"list"},
"294":{"name":"bgpValidityState","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"295":{"name":"IPSecSPI","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"296":{"name":"greKey","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"297":{"name":"natType","dataType":"unsigned8","dataTypeSemantics":"identifier"},
"298":{"name":"initiatorPackets","dataType":"unsigned64","dataTypeSemantics":"identifier","units":"packets"},
"299":{"name":"responderPackets","dataType":"unsigned64","dataTypeSemantics":"identifier","units":"packets"},
"300":{"name":"observationDomainName","dataType":"string"},
"301":{"name":"selectionSequenceId","dataType":"unsigned64","dataTypeSemantics":"identifier"},
"302":{"name":"selectorId","dataType":"unsigned64","dataTypeSemantics":"identifier"},
"303":{"name":"informationElementId","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"304":{"name":"selectorAlgorithm","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"305":{"name":"samplingPacketInterval","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"packets"},
"306":{"name":"samplingPacketSpace","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"packets"},
"307":{"name":"samplingTimeInterval","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"microseconds"},
"308":{"name":"samplingTimeSpace","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"microseconds"},
"309":{"name":"samplingSize","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"packets"},
"310":{"name":"samplingPopulation","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"packets"},
"311":{"name":"samplingProbability","dataType":"float64","dataTypeSemantics":"quantity"},
"312":{"name":"dataLinkFrameSize","dataType":"unsigned16"},
"313":{"name":"ipHeaderPacketSection","dataType":"octetArray"},
"314":{"name":"ipPayloadPacketSection","dataType":"octetArray"},
"315":{"name":"dataLinkFrameSection","dataType":"octetArray"},
"316":{"name":"mplsLabelStackSection","dataType":"octetArray"},
"317":{"name":"mplsPayloadPacketSection","dataType":"octetArray"},
"318":{"name":"selectorIdTotalPktsObserved","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"319":{"name":"selectorIdTotalPktsSelected","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"320":{"name":"absoluteError","dataType":"float64","dataTypeSemantics":"quantity","units":"The units of the Information Element for which the error is specified."},
"321":{"name":"relativeError","dataType":"float64","dataTypeSemantics":"quantity"},
"322":{"name":"observationTimeSeconds","dataType":"dateTimeSeconds","dataTypeSemantics":"quantity","units":"seconds"},
"323":{"name":"observationTimeMilliseconds","dataType":"dateTimeMilliseconds","dataTypeSemantics":"quantity","units":"milliseconds"},
"324":{"name":"observationTimeMicroseconds","dataType":"dateTimeMicroseconds","dataTypeSemantics":"quantity","units":"microseconds"},
"325":{"name":"observationTimeNanoseconds","dataType":"dateTimeNanoseconds","dataTypeSemantics":"quantity","units":"nanoseconds"},
"326":{"name":"digestHashValue","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"327":{"name":"hashIPPayloadOffset","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"328":{"name":"hashIPPayloadSize","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"329":{"name":"hashOutputRangeMin","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"330":{"name":"hashOutputRangeMax","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"331":{"name":"hashSelectedRangeMin","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"332":{"name":"hashSelectedRangeMax","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"333":{"name":"hashDigestOutput","dataType":"boolean","dataTypeSemantics":"quantity"},
"334":{"name":"hashInitialiserValue","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"335":{"name":"selectorName","dataType":"string"},
"336":{"name":"upperCILimit","dataType":"float64","dataTypeSemantics":"quantity"},
"337":{"name":"lowerCILimit","dataType":"float64","dataTypeSemantics":"quantity"},
"338":{"name":"confidenceLevel","dataType":"float64","dataTypeSemantics":"quantity"},
"339":{"name":"informationElementDataType","dataType":"unsigned8"},
"340":{"name":"informationElementDescription","dataType":"string"},
"341":{"name":"informationElementName","dataType":"string"},
"342":{"name":"informationElementRangeBegin","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"343":{"name":"informationElementRangeEnd","dataType":"unsigned64","dataTypeSemantics":"quantity"},
"344":{"name":"informationElementSemantics","dataType":"unsigned8"},
"345":{"name":"informationElementUnits","dataType":"unsigned16"},
"346":{"name":"privateEnterpriseNumber","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"347":{"name":"virtualStationInterfaceId","dataType":"octetArray","dataTypeSemantics":"identifier"},
"348":{"name":"virtualStationInterfaceName","dataType":"string"},
"349":{"name":"virtualStationUUID","dataType":"octetArray","dataTypeSemantics":"identifier"},
"350":{"name":"virtualStationName","dataType":"string"},
"351":{"name":"layer2SegmentId","dataType":"unsigned64","dataTypeSemantics":"identifier"},
"352":{"name":"layer2OctetDeltaCount","dataType":"unsigned64","dataTypeSemantics":"deltaCounter","units":"octets"},
"353":{"name":"layer2OctetTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"octets"},
"354":{"name":"ingressUnicastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"355":{"name":"ingressMulticastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"356":{"name":"ingressBroadcastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"357":{"name":"egressUnicastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"358":{"name":"egressBroadcastPacketTotalCount","dataType":"unsigned64","dataTypeSemantics":"totalCounter","units":"packets"},
"359":{"name":"monitoringIntervalStartMilliSeconds","dataType":"dateTimeMilliseconds","units":"milliseconds"},
"360":{"name":"monitoringIntervalEndMilliSeconds","dataType":"dateTimeMilliseconds","units":"milliseconds"},
"361":{"name":"portRangeStart","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"362":{"name":"portRangeEnd","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"363":{"name":"portRangeStepSize","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"364":{"name":"portRangeNumPorts","dataType":"unsigned16","dataTypeSemantics":"identifier"},
"365":{"name":"staMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier"},
"366":{"name":"staIPv4Address","dataType":"ipv4Address","dataTypeSemantics":"identifier"},
"367":{"name":"wtpMacAddress","dataType":"macAddress","dataTypeSemantics":"identifier"},
"368":{"name":"ingressInterfaceType","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"369":{"name":"egressInterfaceType","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"370":{"name":"rtpSequenceNumber","dataType":"unsigned16"},
"371":{"name":"userName","dataType":"string"},
"372":{"name":"applicationCategoryName","dataType":"string"},
"373":{"name":"applicationSubCategoryName","dataType":"string"},
"374":{"name":"applicationGroupName","dataType":"string"},
"375":{"name":"originalFlowsPresent","dataType":"unsigned64","dataTypeSemantics":"deltaCounter"},
"376":{"name":"originalFlowsInitiated","dataType":"unsigned64","dataTypeSemantics":"deltaCounter"},
"377":{"name":"originalFlowsCompleted","dataType":"unsigned64","dataTypeSemantics":"deltaCounter"},
"378":{"name":"distinctCountOfSourceIPAddress","dataType":"unsigned64","dataTypeSemantics":"totalCounter"},
"379":{"name":"distinctCountOfDestinationIPAddress","dataType":"unsigned64","dataTypeSemantics":"totalCounter"},
"380":{"name":"distinctCountOfSourceIPv4Address","dataType":"unsigned32","dataTypeSemantics":"totalCounter"},
"381":{"name":"distinctCountOfDestinationIPv4Address","dataType":"unsigned32","dataTypeSemantics":"totalCounter"},
"382":{"name":"distinctCountOfSourceIPv6Address","dataType":"unsigned64","dataTypeSemantics":"totalCounter"},
"383":{"name":"distinctCountOfDestinationIPv6Address","dataType":"unsigned64","dataTypeSemantics":"totalCounter"},
"384":{"name":"valueDistributionMethod","dataType":"unsigned8"},
"385":{"name":"rfc3550JitterMilliseconds","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"milliseconds"},
"386":{"name":"rfc3550JitterMicroseconds","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"microseconds"},
"387":{"name":"rfc3550JitterNanoseconds","dataType":"unsigned32","dataTypeSemantics":"quantity","units":"nanoseconds"}
}
//ipfix-mpls-label-type
entities['mpls'] = {
"1":{"description":"TE-MIDPT: Any TE tunnel mid-point or tail label"},
"2":{"description":"Pseudowire: Any PWE3 or Cisco AToM based label"},
"3":{"description":"VPN: Any label associated with VPN"},
"4":{"description":"BGP: Any label associated with BGP or BGP routing"},
"5":{"description":"LDP: Any label associated with dynamically assigned labels using LDP"}
}
//classification-engine-ids
entities['engineIds'] = {
"1":{"description":"IANA-L3", "length":"1"},
"2":{"description":"PANA-L3", "length":"1"},
"3":{"description":"IANA-L4", "length":"2"},
"4":{"description":"PANA-L4", "length":"2"},
"6":{"description":"USER-Defined", "length":"3"},
"12":{"description":"PANA-L2", "length":"5"},
"13":{"description":"PANA-L7", "length":"3"},
"18":{"description":"ETHERTYPE", "length":"2"},
"19":{"description":"LLC", "length":"1"},
"20":{"description":"PANA-L7-PEN", "length":"3"},
}
//ipfix-version-numbers
entities['version'] = {
"9":{"version":"Cisco Systems NetFlow Version 9"},
"10":{"version":"IPFIX as documented in RFC5101"}
}
//ipfix-set-ids
entities['setIds'] = {
"2":{"setId":"Template Set"},
"3":{"setId":"Option Template Set"}
}
//ipfix-information-element-data-types
entities['dataTypes'] = {
"octetArray":{},
"unsigned8":{},
"unsigned16":{},
"unsigned32":{},
"unsigned64":{},
"signed8":{},
"signed16":{},
"signed32":{},
"signed64":{},
"float32":{},
"float64":{},
"boolean":{},
"macAddress":{ "key":"%0-%1-%2-%3-%4-%5"},
"string":{},
"dateTimeSeconds":{},
"dateTimeMilliseconds":{},
"dateTimeMicroseconds":{},
"dateTimeNanoseconds":{},
"ipv4Address":{"key":"%0.%1.%2.%3"},
"ipv6Address":{"key":"%0:%1:%2:%3:%4:%5:%6:%7"},
"basicList":{},
"subTemplateList":{},
"subTemplateMultiList":{}
}
//ipfix-information-element-semantics
entities['ieSemantics'] = {
"0":{"description":"default"},
"1":{"description":"quantity"},
"2":{"description":"totalCounter"},
"3":{"description":"deltaCounter"},
"4":{"description":"identifier"},
"5":{"description":"flags"},
"6":{"description":"list"}
}
//ipfix-information-element-units
entities['units'] = {
"0":{"name":"none"},
"1":{"name":"bits"},
"2":{"name":"octets"},
"3":{"name":"packets"},
"4":{"name":"flows"},
"5":{"name":"seconds"},
"6":{"name":"milliseconds"},
"7":{"name":"microseconds"},
"8":{"name":"nanoseconds"},
"9":{"name":"4-octet words"},
"10":{"name":"messages"},
"11":{"name":"hops"},
"12":{"name":"entries"}
}
//ipfix-structured-data-types-semantics
entities['sdSemantics'] = {
"0x00":{"name":"noneOf"},
"0x01":{"name":"exactlyOneOf"},
"0x02":{"name":"oneOrMoreOf"},
"0x03":{"name":"allOf"},
"0x04":{"name":"ordered"},
"0xFF":{"name":"undefined"},
}
exports.entities = entities;<|fim▁end|> | "251":{"name":"pseudoWireControlWord","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"252":{"name":"ingressPhysicalInterface","dataType":"unsigned32","dataTypeSemantics":"identifier"},
"253":{"name":"egressPhysicalInterface","dataType":"unsigned32","dataTypeSemantics":"identifier"}, |
<|file_name|>postCouchPotato.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
import logging
from extensions import valid_tagging_extensions
from readSettings import ReadSettings
from mkvtomp4 import MkvtoMp4
from tmdb_mp4 import tmdb_mp4
from autoprocess import plex
from post_processor import PostProcessor
from logging.config import fileConfig
logpath = '/var/log/sickbeard_mp4_automator'
if os.name == 'nt':
logpath = os.path.dirname(sys.argv[0])
elif not os.path.isdir(logpath):
try:
os.mkdir(logpath)<|fim▁hole|>configPath = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), 'logging.ini')).replace("\\", "\\\\")
logPath = os.path.abspath(os.path.join(logpath, 'index.log')).replace("\\", "\\\\")
fileConfig(configPath, defaults={'logfilename': logPath})
log = logging.getLogger("CouchPotatoPostConversion")
log.info('MP4 Automator - Post processing script initialized')
settings = ReadSettings(os.path.dirname(sys.argv[0]), "autoProcess.ini")
converter = MkvtoMp4(settings)
imdbid = sys.argv[1]
inputfile = sys.argv[2]
original = sys.argv[3]
log.debug("IMDBID: %s" % imdbid)
log.debug("Input file path: %s" % inputfile)
log.debug("Original file name: %s" % original)
try:
log.info('Processing file: %s', inputfile)
if MkvtoMp4(settings).validSource(inputfile):
log.info('File is valid')
output = converter.process(inputfile, original=original)
if output:
# Tag with metadata
if settings.tagfile and output['output_extension'] in valid_tagging_extensions:
log.info('Tagging file with IMDB ID %s', imdbid)
try:
tagmp4 = tmdb_mp4(imdbid, original=original, language=settings.taglanguage)
tagmp4.setHD(output['x'], output['y'])
tagmp4.writeTags(output['output'], settings.artwork)
except:
log.error("Unable to tag file")
# Copy to additional locations
output_files = converter.replicate(output['output'])
# Run any post process scripts
if settings.postprocess:
post_processor = PostProcessor(output_files, log)
post_processor.setMovie(imdbid)
post_processor.run_scripts()
plex.refreshPlex(settings, 'movie', log)
else:
log.info('File %s is invalid, ignoring' % inputfile)
except:
log.exception('File processing failed: %s' % inputfile)<|fim▁end|> | except:
logpath = os.path.dirname(sys.argv[0]) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/* Aurélien DESBRIÈRES
aurelien(at)hackers(dot)camp
License GNU GPL latest */
// Rust experimentations
// HOF - Higher Order Functions in Rust
fn is_odd(n: u32) -> bool {
n % 2 == 1
}
fn main() {
println!("Find the sum of all the squared odd numbers under 1000");
let upper = 1000;
// Imperative approach
// Declare accumulator varialbe
let mut acc = 0;
// Iterate: 0, 1, 2, ... to infinity
for n in 0.. {
// Square the number
let n_squared = n * n;
if n_squared >= upper {
// Break loop if exceeded the upper limit
break;
} else if is_odd(n_squared) {
// Accumulate value, if it's odd
acc += n_squared;
}
}
println!("imperative style: {}", acc);
// Functional approach
let sum_of_squared_odd_numbers: u32 =
(0..).map(|n| n * n) // All natural numbers squared
.take_while(|&n| n < upper) // Below upper limit
.filter(|&n| is_odd(n)) // That are odd
.fold(0, |sum, i| sum + i); // Sum them
println!("functional style: {}", sum_of_squared_odd_numbers);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>migrated_0010.run.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | esprima.tokenize(null); |
<|file_name|>vrootgraphes.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const React = require("react");
const vgraph_1 = require("./vgraph");
const vrootgraph_list_1 = require("../../components/graph/vrootgraph.list");
const cabeiri_lang_1 = require("cabeiri-lang");
class RootGraphesState {
constructor() {
this.selectedRootGraph = cabeiri_lang_1.CID.CID_NONE;
}
}
exports.RootGraphesState = RootGraphesState;
class VRootGraphes extends React.Component {
constructor(props) {
super(props);
this.onRootGraphSelected = (selectedRootGraph) => {
var graphState = new RootGraphesState();
graphState.selectedRootGraph = selectedRootGraph;
this.setState(graphState);
};
this.state = new RootGraphesState();
}
componentDidMount() {
}<|fim▁hole|> render() {
return React.createElement("div", { className: "container-fluid" },
React.createElement("div", { className: "row" },
React.createElement("div", { className: "col-xs-4 col-md-3" },
React.createElement(vrootgraph_list_1.VRootGraphList, { onItemSelected: this.onRootGraphSelected, searchLabel: "Graphes" })),
React.createElement("div", { className: "col-xs-10 col-md-9" },
React.createElement(vgraph_1.VGraph, { rootGraphCID: this.state.selectedRootGraph }))));
}
;
}
exports.VRootGraphes = VRootGraphes;
;
React.createFactory(VRootGraphes);
//# sourceMappingURL=vrootgraphes.js.map<|fim▁end|> | |
<|file_name|>keywords.py<|end_file_name|><|fim▁begin|>import Tkinter as tk
def foo(*args):
print "foo!", args
import sys; sys.stdout.flush()
def __extend__(app):
extension = KeywordExtension(app)
app.bind_class("all", "<F5>", extension.make_keyword)
# this needs to add something to the tools menu...
class KeywordExtension(object):
def __init__(self, app):
self.app = app
pass
def make_keyword(self, event=None):
# N.B. this is the editor_page object
editor = self.app.get_current_editor()
rows = editor.get_selected_rows()
print rows
import sys; sys.stdout.flush()
# now I want to do something like:
'''
editor.delete_selected_rows()
editor.new_keyword(rows)
=> prompts user for a name (with a 'place'd dialog rather than a popup?
then creates the keyword with that name, and replaces the selected
text with a reference to that keyword<|fim▁hole|>
'''
self.app.status_message("an extension says hello; you have selected %s rows" % len(rows))<|fim▁end|> | |
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>peg_file! ply_rustpeg("ply.rustpeg");
pub fn parse(s: &str) -> Result<PLY, String> {
let (f, v, mut counted_elems, data) = try!(ply_rustpeg::parse(s));
let mut counter = 0us;
for &mut (count, ref mut elem) in counted_elems.iter_mut() {
//let (count, ref mut elem) = counted_elems.get_mut(i).unwrap();
if data.len() < count + counter {
return Err(format!("Data section too short."));
}
elem.data.push_all(&data[counter .. counter + count]);
counter += count;
}
Ok(PLY {format: f, version: v, elements: counted_elems.into_iter().map(|(_,e)|e).collect()})
}
#[derive(Debug, Copy)]
pub enum Format { Ascii }
#[derive(Debug, Copy)]
pub struct Version (u32, u32);
#[derive(Debug)]
pub struct ElementSpec {
pub name: String,
pub props: Vec<PropertySpec>,
pub data: Vec<Vec<String>>, // individual lines of the data
}
impl ElementSpec {
pub fn get_prop(&self, name: String) -> Option<&PropertySpec> {
self.props.iter().filter(|&e| e.name == name).next()
}
}<|fim▁hole|>
#[derive(Debug)]
pub struct PropertySpec {
pub name: String,
pub type_: Type,
}
#[derive(Debug,PartialEq)]
pub enum Type {
Char, UChar, Short, UShort, Int, UInt, Float, Double,
List (Box<Type>),
}
#[derive(Debug)]
pub struct PLY {
pub format: Format,
pub version: Version,
pub elements: Vec<ElementSpec>,
}
impl PLY {
pub fn get_elem(&self, name: String) -> Option<&ElementSpec> {
self.elements.iter().filter(|&e| e.name == name).next()
}
}<|fim▁end|> | |
<|file_name|>itSystemService.ts<|end_file_name|><|fim▁begin|>module Kitos.Services {
"use strict";
interface ISystemRoleModel {
Id: number;
HasReadAccess: boolean;
HasWriteAccess: boolean;
Name: string;
IsActive: boolean;
Description?: any;
ObjectOwnerId: number;
LastChanged: Date;
LastChangedByUserId: number;
}
interface ISystemRightsModel {
Id: number;
UserId: number;
RoleId: number;
ObjectId: number;
ObjectOwnerId: number;
LastChanged: Date;
LastChangedByUserId: number;
}
export class ItSystemService {
public static $inject: string[] = ["$http"];
constructor(private $http: IHttpServiceWithCustomConfig) {
}
GetSystemById = (id: number) => {
return this.$http.get<Models.ItSystem.IItSystem>(`odata/ItSystems(${id})`);
}
GetAllSystems = () => {
return this.$http.get<Models.ItSystem.IItSystem>(`odata/ItSystems`);
}
GetSystemRoleById = (roleId: number) => {
return this.$http.get<ISystemRoleModel>(`odata/ItSystemRoles(${roleId})`);
}
GetAllSystemRoles = () => {
return this.$http.get<ISystemRoleModel>(`odata/ItSystemRoles`);
}
GetSystemRightsById = (id: number) => {
return this.$http.get<ISystemRightsModel>(`odata/ItSystemRights?$filter=UserId eq (${id})`);
}
GetSystemDataById = (id: number) => {
return this.$http.get(`odata/ItSystemRights?$expand=role,object&$filter=UserId eq (${id})`);
}
<|fim▁hole|> return this.$http
.get(`odata/ItSystemRights?$expand=role($select=Name),object($select=Id;$expand=ItSystem($select=Id,Name))&$filter=Object/OrganizationId eq (${orgId}) AND UserId eq (${id})&$select=Id`);
}
}
app.service("ItSystemService", ItSystemService);
}<|fim▁end|> | GetSystemDataByIdFiltered = (id: number, orgId: number) => { |
<|file_name|>TeX.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:558005fd55405d3069b06849812a921274543d712676f42ad4a8c122034c02e4<|fim▁hole|><|fim▁end|> | size 819 |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from apps.employees import ajax
from . import views
urlpatterns = (
url(r'^home/$', login_required(views.home), name="employee_home_redirect"),
url(r'^(?P<pk>[\d]+)/$', login_required(views.EmployeeDetail.as_view()), name="employee_detail"),
url(r'^schedule/add$', login_required(views.ScheduleAdd.as_view()), name="employee_schedule_add"),<|fim▁hole|> url(r'^ajax/take_slip/$', login_required(ajax.SubSlipAjax.as_view()), name="take_slip"),
)<|fim▁end|> | url(r'^schedule/$', login_required(views.schedule), name="employee_schedule"),
url(r'^admin/$', login_required(views.EmployeeAdminPanel.as_view()), name="employee_admin"),
url(r'^sub-board/$', login_required(views.SubBoard.as_view()), name="sub_board"), |
<|file_name|>test_sym_char_class.py<|end_file_name|><|fim▁begin|>###############################################################################
# test_sym_char_class.py: Test module for PATTERN MATCH - symbol char class
# class
# Copyright (C) 2011 Brno University of Technology, ANT @ FIT
# Author(s): Jaroslav Suchodol <[email protected]>
###############################################################################
#
# LICENSE TERMS
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. All advertising materials mentioning features or use of this software
# or firmware must display the following acknowledgement:
#
# This product includes software developed by the University of
# Technology, Faculty of Information Technology, Brno and its
# contributors.
#
# 4. Neither the name of the Company nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# This software or firmware is provided ``as is'', and any express or implied
# warranties, including, but not limited to, the implied warranties of
# merchantability and fitness for a particular purpose are disclaimed.
# In no event shall the company or contributors be liable for any
# direct, indirect, incidental, special, exemplary, or consequential
# damages (including, but not limited to, procurement of substitute
# goods or services; loss of use, data, or profits; or business
# interruption) however caused and on any theory of liability, whether
# in contract, strict liability, or tort (including negligence or
# otherwise) arising in any way out of the use of this software, even
# if advised of the possibility of such damage.
#
# $Id$
from netbench.pattern_match.b_symbol import io_mapper
from netbench.pattern_match.sym_char import b_Sym_char
from netbench.pattern_match.sym_kchar import b_Sym_kchar
from netbench.pattern_match.sym_string import b_Sym_string
from netbench.pattern_match.sym_char_class import b_Sym_char_class
from netbench.pattern_match.pattern_exceptions import \
symbol_string_to_short, \
symbol_accept_exception, \
symbol_import_exception
import unittest
class test_b_Sym_char_class(unittest.TestCase):
"""A base test class to represent a char class symbol."""
def test_accept(self):
"""accept()"""
# method accept(text):
# Check if len(text) == 0,
# then is call exception symbol_string_to_short
ab = b_Sym_char_class("ab", set(['a', 'b']), 0)
try:
ab.accept("")
self.assertTrue(False)
except symbol_string_to_short:
self.assertTrue(True)
# Check if text[0] in self.charClass, then is return value text[1:]
ab = b_Sym_char_class("ab", set(['a', 'b']), 0)
self.assertTrue(ab.accept("adam") == "dam")
# In case text[0] != self.char[0],
# then is call exception symbol_accept_exception
ab = b_Sym_char_class("ab", set(['a', 'b']), 0)
try:
ab.accept("eva")
self.assertTrue(False)
except symbol_accept_exception:
self.assertTrue(True)
def test_collision(self):
"""collision()"""
# method collision(set_of_symbols):
# Try with suitable objects class sym_char, sym_char_class,
# sym_string. Check correct output (is / is not collision).
a = b_Sym_char('a', 'a', 0)
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 1)
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 2)
adam = b_Sym_string("baba", "baba", 3)
set_of_symbols = set([a, cd, adam])
self.assertTrue(ef.collision(set_of_symbols) == False)
fg = b_Sym_char_class("set(['f', 'g'])", set(['f', 'g']), 4)
set_of_symbols = set([a, fg, adam])
self.assertTrue(ef.collision(set_of_symbols) == True)
def test_export_symbol(self):
"""export_symbol()"""
# Check return correct representation of symbol.
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 0)
self.assertTrue(cd.export_symbol() == "16364")
def test_import_symbol(self):
"""import_symbol()"""
# method import_symbol(text_repr, tid):
# Check whether is from text_repr created and returned correct object
# and having set self._id on tid and all parametrs are correct set.
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 0)
cd.import_symbol("16566", 15)
self.assertTrue(cd.charClass == set(['e', 'f']))
self.assertTrue(cd._text == "[ef]")
self.assertTrue(cd._id == 15)
# Check if is text_repr represented by other type, then is call
# exception symbol_import_exception.
try:
cd.import_symbol("061", 17)
self.assertTrue(False)
except symbol_import_exception:
self.assertTrue(True)
def test___str__(self):
"""__str__()"""
# Check return self.charClass
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 0)
self.assertTrue(cd.__str__() == str(cd.charClass))
def test_compute_equal(self):
"""compute_equal()"""
# method compute_equal(other):
# If is other object of type sym_char_class return True if
# arguments are same, otherwise return False.
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 0)
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(cd.compute_equal(ef) == False)
ef = b_Sym_char_class("set(['c', 'd'])", set(['d', 'c']), 1)
self.assertTrue(cd.compute_equal(ef) == True)
a = b_Sym_char('a', 'a', 0)
self.assertTrue(cd.compute_equal(a) == False)
def test___hash__(self):
"""__hash__()"""
# Check return hash(frozenset(self.charClass)).
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(ef.__hash__() == hash(frozenset(ef.charClass)))
def test___repr__(self):
"""__repr__()"""
# Check return self.charClass.
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(ef.__repr__() == repr(ef.charClass))
def test_get_support_type(self):
"""get_support_type()"""
# Check return [b_symbol.io_mapper["b_Sym_char_class"]].
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(ef.get_support_type() ==
[io_mapper["b_Sym_char_class"]])
def test_compute_collision(self):
"""compute_collision()"""
# Check correct compute of collision for objects of type sym_char_class.
cd = b_Sym_char_class("set(['c', 'd'])", set(['c', 'd']), 0)
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(cd.compute_collision(ef) == (set([cd]), set(), set([ef])))
ef = b_Sym_char_class("set(['e', 'f'])", set(['c', 'f']), 1)
result = cd.compute_collision(ef)
newSymbol = result[0].pop()
self.assertTrue(newSymbol.charClass == set(['d']))
newSymbol = result[2].pop()
self.assertTrue(newSymbol.charClass == set(['f']))
newSymbol = result[1].pop()
self.assertTrue(newSymbol.charClass == set(['c']))
def test_get_text(self):
"""get_text()"""<|fim▁hole|> ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(ef.get_text() == "[ef]")
chars = set()
for i in range(0, 256):
chars.add(chr(i))
chars.remove('2')
chars.remove('3')
chars.remove('4')
chars.remove('7')
chars.remove('8')
chars.remove('9')
big_set = b_Sym_char_class("big_set", chars, 2)
self.assertTrue(big_set.get_text() == "^[234789]")
def test_is_empty(self):
"""is_empty()"""
# If is len(self.charClass) == 0 and self._id != -1 return True,
# otherwise return False.
ef = b_Sym_char_class("set(['e', 'f'])", set(['e', 'f']), 1)
self.assertTrue(ef.is_empty() == False)
near_empty = b_Sym_char_class("near_empty", set(), -1)
self.assertTrue(near_empty.is_empty() == False)
empty = b_Sym_char_class("empty", set(), 15)
self.assertTrue(empty.is_empty() == True)
def test_compute_double_stride(self):
"""compute_double_stride()"""
# Method compute_double_stride(compSymbol, reverse, last, local_chars)
# Test with compSymbol type sym_char and sym_char_class.
# If the reverse is True then change order self and compSymbol.
# compSymbol type sym_char ; reverse = False
ac = b_Sym_char_class('ac', set(['a', 'c']), 0)
b = b_Sym_char('b', 'b', 1)
local_chars = list()
chars = set()
for i in range(0,256):
chars.add(chr(i))
local_chars.append(chars)
new_kchar = ac.compute_double_stride(b, False, 2, local_chars)[0]
new_local_chars = ac.compute_double_stride(b, False, 2, local_chars)[1]
reference_kchar = b_Sym_kchar("[ac]b", (set(['a', 'c']),'b'), 2)
reference_kchar_2 = \
b_Sym_kchar("[ac]b", (frozenset(['a', 'c']),frozenset(['b'])), 2)
reference_kchar.last = 2
reference_kchar_2.last = 2
reference_local_chars = local_chars[0] - set([b.char])
self.assertTrue(new_kchar == reference_kchar
or new_kchar == reference_kchar_2)
self.assertTrue(new_local_chars[0] == reference_local_chars)
self.assertTrue(new_kchar.last == 2)
# compSymbol type sym_char_class ; reverse = False
ac = b_Sym_char_class('ac', set(['a', 'c']), 0)
bc = b_Sym_char_class("set(['b', 'c'])", set(['b', 'c']), 1)
local_chars = list()
chars = set()
for i in range(0,256):
chars.add(chr(i))
local_chars.append(chars)
new_kchar = ac.compute_double_stride(bc, False, 3, local_chars)[0]
new_local_chars = ac.compute_double_stride(bc, False, 3, local_chars)[1]
reference_kchar = b_Sym_kchar("[ac][bc]",
(set(['a', 'c']), set(['b', 'c'])), 2)
reference_kchar_2 = \
b_Sym_kchar("[ac][bc]",
(frozenset(['a', 'c']),frozenset(['b','c'])), 2)
reference_kchar.last = 3
reference_kchar_2.last = 3
reference_local_chars = local_chars[0] - bc.charClass
self.assertTrue(new_kchar == reference_kchar
or new_kchar == reference_kchar_2)
self.assertTrue(new_local_chars[0] == reference_local_chars)
self.assertTrue(new_kchar.last == 3)
# compSymbol type sym_char ; reverse = True
ac = b_Sym_char_class('ac', set(['a', 'c']), 0)
b = b_Sym_char('b', 'b', 1)
local_chars = list()
chars = set()
for i in range(0,256):
chars.add(chr(i))
local_chars.append(chars)
new_kchar = ac.compute_double_stride(b, True, 2, local_chars)[0]
new_local_chars = ac.compute_double_stride(b, True, 2, local_chars)[1]
reference_kchar = b_Sym_kchar("b[ac]", ('b', set(['a', 'c'])), 2)
reference_kchar_2 = \
b_Sym_kchar("b[ac]", (frozenset(['b']),frozenset(['a', 'c'])), 2)
reference_kchar.last = 2
reference_kchar_2.last = 2
reference_local_chars = local_chars[0] - ac.charClass
self.assertTrue(new_kchar == reference_kchar
or new_kchar == reference_kchar_2)
self.assertTrue(new_local_chars[0] == reference_local_chars)
self.assertTrue(new_kchar.last == 2)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(test_b_Sym_char_class)
unittest.TextTestRunner(verbosity=2).run(suite)<|fim▁end|> | # Check return correct representation. |
<|file_name|>runbook.go<|end_file_name|><|fim▁begin|>package automation
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"net/http"
)
// RunbookClient is the automation Client
type RunbookClient struct {
BaseClient
}
// NewRunbookClient creates an instance of the RunbookClient client.
func NewRunbookClient(subscriptionID string, resourceGroupName string, clientRequestID string, automationAccountName string) RunbookClient {
return NewRunbookClientWithBaseURI(DefaultBaseURI, subscriptionID, resourceGroupName, clientRequestID, automationAccountName)
}
// NewRunbookClientWithBaseURI creates an instance of the RunbookClient client.
func NewRunbookClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, clientRequestID string, automationAccountName string) RunbookClient {
return RunbookClient{NewWithBaseURI(baseURI, subscriptionID, resourceGroupName, clientRequestID, automationAccountName)}
}
// CreateOrUpdate create the runbook identified by runbook name.
//
// automationAccountName is the automation account name. runbookName is the runbook name. parameters is the create
// or update parameters for runbook. Provide either content link for a published runbook or draft, not both.
func (client RunbookClient) CreateOrUpdate(ctx context.Context, automationAccountName string, runbookName string, parameters RunbookCreateOrUpdateParameters) (result autorest.Response, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}},
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties", Name: validation.Null, Rule: true,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.Draft", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.Draft.DraftContentLink", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.Draft.DraftContentLink.ContentHash", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.Draft.DraftContentLink.ContentHash.Algorithm", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.RunbookCreateOrUpdateProperties.Draft.DraftContentLink.ContentHash.Value", Name: validation.Null, Rule: true, Chain: nil},
}},
}},
}},
{Target: "parameters.RunbookCreateOrUpdateProperties.PublishContentLink", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.PublishContentLink.ContentHash", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.RunbookCreateOrUpdateProperties.PublishContentLink.ContentHash.Algorithm", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.RunbookCreateOrUpdateProperties.PublishContentLink.ContentHash.Value", Name: validation.Null, Rule: true, Chain: nil},
}},
}},
}}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "CreateOrUpdate", err.Error())
}
req, err := client.CreateOrUpdatePreparer(ctx, automationAccountName, runbookName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
resp, err := client.CreateOrUpdateSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "CreateOrUpdate", resp, "Failure sending request")
return
}
result, err = client.CreateOrUpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "CreateOrUpdate", resp, "Failure responding to request")
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client RunbookClient) CreateOrUpdatePreparer(ctx context.Context, automationAccountName string, runbookName string, parameters RunbookCreateOrUpdateParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"runbookName": autorest.Encode("path", runbookName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsJSON(),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) CreateOrUpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client RunbookClient) CreateOrUpdateResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated, http.StatusBadRequest),
autorest.ByClosing())
result.Response = resp
return
}
// Delete delete the runbook by name.
//
// automationAccountName is the automation account name. runbookName is the runbook name.
func (client RunbookClient) Delete(ctx context.Context, automationAccountName string, runbookName string) (result autorest.Response, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "Delete", err.Error())
}
req, err := client.DeletePreparer(ctx, automationAccountName, runbookName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Delete", nil, "Failure preparing request")
return
}
resp, err := client.DeleteSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Delete", resp, "Failure sending request")
return
}
result, err = client.DeleteResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Delete", resp, "Failure responding to request")
}
return
}
<|fim▁hole|> pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"runbookName": autorest.Encode("path", runbookName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) DeleteSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client RunbookClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Get retrieve the runbook identified by runbook name.
//
// automationAccountName is the automation account name. runbookName is the runbook name.
func (client RunbookClient) Get(ctx context.Context, automationAccountName string, runbookName string) (result Runbook, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "Get", err.Error())
}
req, err := client.GetPreparer(ctx, automationAccountName, runbookName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client RunbookClient) GetPreparer(ctx context.Context, automationAccountName string, runbookName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"runbookName": autorest.Encode("path", runbookName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client RunbookClient) GetResponder(resp *http.Response) (result Runbook, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// GetContent retrieve the content of runbook identified by runbook name.
//
// automationAccountName is the automation account name. runbookName is the runbook name.
func (client RunbookClient) GetContent(ctx context.Context, automationAccountName string, runbookName string) (result ReadCloser, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "GetContent", err.Error())
}
req, err := client.GetContentPreparer(ctx, automationAccountName, runbookName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "GetContent", nil, "Failure preparing request")
return
}
resp, err := client.GetContentSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "GetContent", resp, "Failure sending request")
return
}
result, err = client.GetContentResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "GetContent", resp, "Failure responding to request")
}
return
}
// GetContentPreparer prepares the GetContent request.
func (client RunbookClient) GetContentPreparer(ctx context.Context, automationAccountName string, runbookName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"runbookName": autorest.Encode("path", runbookName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}/content", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetContentSender sends the GetContent request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) GetContentSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetContentResponder handles the response to the GetContent request. The method always
// closes the http.Response Body.
func (client RunbookClient) GetContentResponder(resp *http.Response) (result ReadCloser, err error) {
result.Value = &resp.Body
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK))
result.Response = autorest.Response{Response: resp}
return
}
// ListByAutomationAccount retrieve a list of runbooks.
//
// automationAccountName is the automation account name.
func (client RunbookClient) ListByAutomationAccount(ctx context.Context, automationAccountName string) (result RunbookListResultPage, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "ListByAutomationAccount", err.Error())
}
result.fn = client.listByAutomationAccountNextResults
req, err := client.ListByAutomationAccountPreparer(ctx, automationAccountName)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "ListByAutomationAccount", nil, "Failure preparing request")
return
}
resp, err := client.ListByAutomationAccountSender(req)
if err != nil {
result.rlr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "ListByAutomationAccount", resp, "Failure sending request")
return
}
result.rlr, err = client.ListByAutomationAccountResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "ListByAutomationAccount", resp, "Failure responding to request")
}
return
}
// ListByAutomationAccountPreparer prepares the ListByAutomationAccount request.
func (client RunbookClient) ListByAutomationAccountPreparer(ctx context.Context, automationAccountName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByAutomationAccountSender sends the ListByAutomationAccount request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) ListByAutomationAccountSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListByAutomationAccountResponder handles the response to the ListByAutomationAccount request. The method always
// closes the http.Response Body.
func (client RunbookClient) ListByAutomationAccountResponder(resp *http.Response) (result RunbookListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByAutomationAccountNextResults retrieves the next set of results, if any.
func (client RunbookClient) listByAutomationAccountNextResults(lastResults RunbookListResult) (result RunbookListResult, err error) {
req, err := lastResults.runbookListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "automation.RunbookClient", "listByAutomationAccountNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByAutomationAccountSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "automation.RunbookClient", "listByAutomationAccountNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByAutomationAccountResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "listByAutomationAccountNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByAutomationAccountComplete enumerates all values, automatically crossing page boundaries as required.
func (client RunbookClient) ListByAutomationAccountComplete(ctx context.Context, automationAccountName string) (result RunbookListResultIterator, err error) {
result.page, err = client.ListByAutomationAccount(ctx, automationAccountName)
return
}
// Update update the runbook identified by runbook name.
//
// automationAccountName is the automation account name. runbookName is the runbook name. parameters is the update
// parameters for runbook.
func (client RunbookClient) Update(ctx context.Context, automationAccountName string, runbookName string, parameters RunbookUpdateParameters) (result Runbook, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: client.ResourceGroupName,
Constraints: []validation.Constraint{{Target: "client.ResourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("automation.RunbookClient", "Update", err.Error())
}
req, err := client.UpdatePreparer(ctx, automationAccountName, runbookName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Update", nil, "Failure preparing request")
return
}
resp, err := client.UpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Update", resp, "Failure sending request")
return
}
result, err = client.UpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "automation.RunbookClient", "Update", resp, "Failure responding to request")
}
return
}
// UpdatePreparer prepares the Update request.
func (client RunbookClient) UpdatePreparer(ctx context.Context, automationAccountName string, runbookName string, parameters RunbookUpdateParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"automationAccountName": autorest.Encode("path", automationAccountName),
"resourceGroupName": autorest.Encode("path", client.ResourceGroupName),
"runbookName": autorest.Encode("path", runbookName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2015-10-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsJSON(),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Automation/automationAccounts/{automationAccountName}/runbooks/{runbookName}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client RunbookClient) UpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// UpdateResponder handles the response to the Update request. The method always
// closes the http.Response Body.
func (client RunbookClient) UpdateResponder(resp *http.Response) (result Runbook, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|> | // DeletePreparer prepares the Delete request.
func (client RunbookClient) DeletePreparer(ctx context.Context, automationAccountName string, runbookName string) (*http.Request, error) { |
<|file_name|>factories.py<|end_file_name|><|fim▁begin|>from django.contrib.contenttypes.models import ContentType
import factory
from glitter.models import Version
from glitter.pages.models import Page
class PageFactory(factory.DjangoModelFactory):
url = factory.Sequence(lambda n: '/page-{}/'.format(n))
title = factory.Sequence(lambda n: 'Page {}'.format(n))
class Meta:
model = Page
class VersionFactory(factory.DjangoModelFactory):
object_id = factory.SelfAttribute('content_object.id')
content_type = factory.LazyAttribute(
lambda o: ContentType.objects.get_for_model(o.content_object),
)
class Meta:
exclude = ('content_object',)
abstract = True
class PageVersionFactory(VersionFactory):
content_object = factory.SubFactory(PageFactory)
class Meta:
model = Version
@factory.post_generation
def set_version(self, create, extracted, **kwargs):
if extracted:
page = self.content_object
page.current_version = self
if create:<|fim▁hole|><|fim▁end|> | page.save() |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os
from flask import Flask, Response, request, url_for
import psycopg2
import urlparse
import plivo
import plivoxml
<|fim▁hole|>MY_URL = 'http://morning-ocean-4669.herokuapp.com/report/'
app = Flask(__name__)
@app.route('/response/speak/', methods=['GET'])
def speak():
# Enter the message you want to play
text = "Congratulations! You just made a text to speech app on Plivo cloud!"
parameters = {'loop': 1, 'language': "en-US", 'voice': "WOMAN"}
response = plivoxml.Response()
response.addSpeak(text, **parameters)
return Response(str(response), mimetype='text/xml')
@app.route('/send', methods=['GET', 'POST'])
def send():
# Enter the message you want to send
auth_id = os.environ.get("AUTH_ID", AUTH_ID)
auth_token = os.environ.get("AUTH_TOKEN", AUTH_TOKEN)
caller_id = os.environ.get("CALLER_ID", CALLER_ID)
box_id = os.environ.get("BOX_ID", BOX_ID)
my_url = os.environ.get("MY_URL", MY_URL)
params = {
'src': caller_id, # Sender's phone number with country code
'dst' : box_id, # Receiver's phone Number with country code
'text' : u"Hello, how are you?", # Your SMS Text Message - English
'url' : my_url, # The URL to which with the status of the message is sent
'method' : 'POST' # The method used to call the url
}
if request.method == 'GET':
response = plivoxml.Response()
#response.addSpeak(auth_id + auth_token + caller_id + box_id + my_url)
elif request.method == 'POST':
p = plivo.RestAPI(auth_id, auth_token)
response = p.send_message(params)
return Response(str(response), mimetype='text/xml')
@app.route('/call', methods=['GET', 'POST'])
def call():
# Enter the message you want to send
auth_id = os.environ.get("AUTH_ID", AUTH_ID)
auth_token = os.environ.get("AUTH_TOKEN", AUTH_TOKEN)
caller_id = os.environ.get("CALLER_ID", CALLER_ID)
box_id = os.environ.get("BOX_ID", BOX_ID)
my_url = os.environ.get("MY_URL", MY_URL)
client = request.values.get('client')
params = {
'from': caller_id, # Caller Id
'to' : box_id, # User Number to Call
'answer_url' : my_url+"call",
'time_limit': 80
}
if request.method == 'GET':
response = plivoxml.Response()
response.addSpeak("hello "+client)
#response.addSpeak(auth_id + auth_token + caller_id + box_id + my_url)
#p = plivo.RestAPI(auth_id, auth_token)
#response = p.make_call(params)
elif request.method == 'POST':
response = plivoxml.Response()
response.addSpeak("hello "+client)
#p = plivo.RestAPI(auth_id, auth_token)
#response = p.make_call(params)
return Response(str(response), mimetype='text/xml')
@app.route("/initdb", methods=['GET', 'POST'])
def initdb():
response = plivoxml.Response()
client = request.values.get('client')
if client == None:
return Response(str(response), mimetype='text/xml')
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
try:
cur.execute("CREATE TABLE IF NOT EXISTS test (id serial PRIMARY KEY, num integer, data varchar);")
cur.execute("INSERT INTO test (num, data) VALUES (%s, %s)", (100, "abc'def"))
cur.execute("SELECT * FROM test;")
response.addSpeak(cur.fetchone())
except Exception, e:
response.addSpeak(e)
cur.close()
conn.commit()
conn.close()
return Response(str(response), mimetype='text/xml')
@app.route("/writedb", methods=['GET', 'POST'])
def writedb():
response = plivoxml.Response()
client = request.values.get('client')
text = request.values.get('text')
if client == None:
return Response(str(response), mimetype='text/xml')
if text == None:
return Response(str(response), mimetype='text/xml')
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
#cur.execute("UPDATE test SET data = 'abcd' WHERE num = 100;")
try:
SQL = "UPDATE test SET data = %s WHERE num = 100;"
data = (""+text+"",)
cur.execute(SQL, data)
cur.execute("SELECT * FROM test;")
response.addSpeak(cur.fetchone())
except Exception, e:
response.addSpeak(e)
cur.close()
conn.commit()
conn.close()
return Response(str(response), mimetype='text/xml')
@app.route("/readdb", methods=['GET', 'POST'])
def readdb():
response = plivoxml.Response()
client = request.values.get('client')
if client == None:
return Response(str(response), mimetype='text/xml')
urlparse.uses_netloc.append("postgres")
url = urlparse.urlparse(os.environ["DATABASE_URL"])
conn = psycopg2.connect(
database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port
)
cur = conn.cursor()
try:
cur.execute("SELECT * FROM test;")
response.addSpeak(cur.fetchone())
except Exception, e:
response.addSpeak(e)
cur.close()
conn.close()
return Response(str(response), mimetype='text/xml')
@app.route("/writefile", methods=['GET', 'POST'])
def writefile():
response = plivoxml.Response()
client = request.values.get('client')
try:
file = open("/tmp/foo.txt", "w")
file.write('this is a line of text')
file.close()
read_file = open("/tmp/foo.txt", 'r')
text = read_file.read()
read_file.close()
response.addSpeak(text)
except Exception, e:
response.addSpeak(e)
return Response(str(response), mimetype='text/xml')
@app.route("/readfile", methods=['GET', 'POST'])
def readfile():
response = plivoxml.Response()
client = request.values.get('client')
try:
read_file = open("/tmp/foo.txt", 'r')
text = read_file.read()
read_file.close()
response.addSpeak(text)
except Exception, e:
response.addSpeak(e)
return Response(str(response), mimetype='text/xml')
@app.route("/hello", methods=['GET', 'POST'])
def hello():
response = plivoxml.Response()
client = request.values.get('client')
response.addSpeak("hello "+client)
return Response(str(response), mimetype='text/xml')
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)<|fim▁end|> | AUTH_ID = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
AUTH_TOKEN = 'YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY'
CALLER_ID = '+12345678901'
BOX_ID = '+12345678901' |
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
use std::io::{self, Write};
use std::mem;
use std::net::Ipv4Addr;
use std::os::raw::c_uint;
use std::result;
use std::sync::Arc;
use std::thread;
use base::Error as SysError;
use base::{error, warn, AsRawDescriptor, Event, EventType, PollToken, RawDescriptor, WaitContext};
use data_model::{DataInit, Le16, Le64};
use net_util::{Error as TapError, MacAddress, TapT};
use remain::sorted;
use thiserror::Error as ThisError;
use virtio_sys::virtio_net;
use virtio_sys::virtio_net::{
virtio_net_hdr_v1, VIRTIO_NET_CTRL_GUEST_OFFLOADS, VIRTIO_NET_CTRL_GUEST_OFFLOADS_SET,
VIRTIO_NET_CTRL_MQ, VIRTIO_NET_CTRL_MQ_VQ_PAIRS_SET, VIRTIO_NET_ERR, VIRTIO_NET_OK,
};
use vm_memory::GuestMemory;
use super::{
copy_config, DescriptorError, Interrupt, Queue, Reader, SignalableInterrupt, VirtioDevice,
Writer, TYPE_NET,
};
const QUEUE_SIZE: u16 = 256;
#[sorted]
#[derive(ThisError, Debug)]
pub enum NetError {
/// Cloning kill event failed.
#[error("failed to clone kill event: {0}")]
CloneKillEvent(SysError),
/// Creating kill event failed.
#[error("failed to create kill event: {0}")]
CreateKillEvent(SysError),
/// Creating WaitContext failed.
#[error("failed to create wait context: {0}")]
CreateWaitContext(SysError),
/// Descriptor chain was invalid.
#[error("failed to valildate descriptor chain: {0}")]
DescriptorChain(DescriptorError),
/// Error reading data from control queue.
#[error("failed to read control message data: {0}")]
ReadCtrlData(io::Error),
/// Error reading header from control queue.
#[error("failed to read control message header: {0}")]
ReadCtrlHeader(io::Error),
/// There are no more available descriptors to receive into.
#[error("no rx descriptors available")]
RxDescriptorsExhausted,
/// Enabling tap interface failed.
#[error("failed to enable tap interface: {0}")]
TapEnable(TapError),
/// Couldn't get the MTU from the tap device.
#[error("failed to get tap interface MTU: {0}")]
TapGetMtu(TapError),
/// Open tap device failed.
#[error("failed to open tap device: {0}")]
TapOpen(TapError),
/// Setting tap IP failed.
#[error("failed to set tap IP: {0}")]
TapSetIp(TapError),
/// Setting tap mac address failed.
#[error("failed to set tap mac address: {0}")]
TapSetMacAddress(TapError),
/// Setting tap netmask failed.
#[error("failed to set tap netmask: {0}")]
TapSetNetmask(TapError),
/// Setting vnet header size failed.
#[error("failed to set vnet header size: {0}")]
TapSetVnetHdrSize(TapError),
/// Validating tap interface failed.
#[error("failed to validate tap interface: {0}")]
TapValidate(String),
/// Removing read event from the tap fd events failed.
#[error("failed to disable EPOLLIN on tap fd: {0}")]
WaitContextDisableTap(SysError),
/// Adding read event to the tap fd events failed.
#[error("failed to enable EPOLLIN on tap fd: {0}")]
WaitContextEnableTap(SysError),
/// Error while waiting for events.
#[error("error while waiting for events: {0}")]
WaitError(SysError),
/// Failed writing an ack in response to a control message.
#[error("failed to write control message ack: {0}")]
WriteAck(io::Error),
/// Writing to a buffer in the guest failed.
#[error("failed to write to guest buffer: {0}")]
WriteBuffer(io::Error),
}
#[repr(C, packed)]
#[derive(Debug, Clone, Copy)]
pub struct virtio_net_ctrl_hdr {
pub class: u8,
pub cmd: u8,
}
// Safe because it only has data and has no implicit padding.
unsafe impl DataInit for virtio_net_ctrl_hdr {}
/// Converts virtio-net feature bits to tap's offload bits.
pub fn virtio_features_to_tap_offload(features: u64) -> c_uint {
let mut tap_offloads: c_uint = 0;
if features & (1 << virtio_net::VIRTIO_NET_F_GUEST_CSUM) != 0 {
tap_offloads |= net_sys::TUN_F_CSUM;
}
if features & (1 << virtio_net::VIRTIO_NET_F_GUEST_TSO4) != 0 {
tap_offloads |= net_sys::TUN_F_TSO4;
}
if features & (1 << virtio_net::VIRTIO_NET_F_GUEST_TSO6) != 0 {
tap_offloads |= net_sys::TUN_F_TSO6;
}
if features & (1 << virtio_net::VIRTIO_NET_F_GUEST_ECN) != 0 {
tap_offloads |= net_sys::TUN_F_TSO_ECN;
}
if features & (1 << virtio_net::VIRTIO_NET_F_GUEST_UFO) != 0 {
tap_offloads |= net_sys::TUN_F_UFO;
}
tap_offloads
}
#[derive(Debug, Clone, Copy, Default)]
#[repr(C)]
pub struct VirtioNetConfig {
mac: [u8; 6],
status: Le16,
max_vq_pairs: Le16,
mtu: Le16,
}
// Safe because it only has data and has no implicit padding.
unsafe impl DataInit for VirtioNetConfig {}
pub fn process_rx<I: SignalableInterrupt, T: TapT>(
interrupt: &I,
rx_queue: &mut Queue,
mem: &GuestMemory,
mut tap: &mut T,
) -> result::Result<(), NetError> {
let mut needs_interrupt = false;
let mut exhausted_queue = false;
// Read as many frames as possible.
loop {
let desc_chain = match rx_queue.peek(mem) {
Some(desc) => desc,
None => {
exhausted_queue = true;
break;
}
};
let index = desc_chain.index;
let bytes_written = match Writer::new(mem.clone(), desc_chain) {
Ok(mut writer) => {
match writer.write_from(&mut tap, writer.available_bytes()) {
Ok(_) => {}
Err(ref e) if e.kind() == io::ErrorKind::WriteZero => {
warn!("net: rx: buffer is too small to hold frame");
break;
}
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => {
// No more to read from the tap.
break;
}
Err(e) => {
warn!("net: rx: failed to write slice: {}", e);
return Err(NetError::WriteBuffer(e));
}
};
writer.bytes_written() as u32
}
Err(e) => {
error!("net: failed to create Writer: {}", e);
0
}
};
if bytes_written > 0 {
rx_queue.pop_peeked(mem);
rx_queue.add_used(mem, index, bytes_written);
needs_interrupt = true;
}
}
if needs_interrupt {
rx_queue.trigger_interrupt(mem, interrupt);
}
if exhausted_queue {
Err(NetError::RxDescriptorsExhausted)
} else {
Ok(())
}
}
pub fn process_tx<I: SignalableInterrupt, T: TapT>(
interrupt: &I,
tx_queue: &mut Queue,
mem: &GuestMemory,
mut tap: &mut T,
) {
while let Some(desc_chain) = tx_queue.pop(mem) {
let index = desc_chain.index;
match Reader::new(mem.clone(), desc_chain) {
Ok(mut reader) => {
let expected_count = reader.available_bytes();
match reader.read_to(&mut tap, expected_count) {
Ok(count) => {
// Tap writes must be done in one call. If the entire frame was not
// written, it's an error.
if count != expected_count {
error!(
"net: tx: wrote only {} bytes of {} byte frame",
count, expected_count
);
}
}
Err(e) => error!("net: tx: failed to write frame to tap: {}", e),
}
}
Err(e) => error!("net: failed to create Reader: {}", e),
}
tx_queue.add_used(mem, index, 0);
}
tx_queue.trigger_interrupt(mem, interrupt);
}
pub fn process_ctrl<I: SignalableInterrupt, T: TapT>(
interrupt: &I,
ctrl_queue: &mut Queue,
mem: &GuestMemory,
tap: &mut T,
acked_features: u64,
vq_pairs: u16,
) -> Result<(), NetError> {
while let Some(desc_chain) = ctrl_queue.pop(mem) {
let index = desc_chain.index;
let mut reader =
Reader::new(mem.clone(), desc_chain.clone()).map_err(NetError::DescriptorChain)?;
let mut writer = Writer::new(mem.clone(), desc_chain).map_err(NetError::DescriptorChain)?;
let ctrl_hdr: virtio_net_ctrl_hdr = reader.read_obj().map_err(NetError::ReadCtrlHeader)?;
<|fim▁hole|> writer
.write_all(&[VIRTIO_NET_ERR as u8])
.map_err(NetError::WriteAck)?;
ctrl_queue.add_used(mem, index, writer.bytes_written() as u32);
Ok(())
};
match ctrl_hdr.class as c_uint {
VIRTIO_NET_CTRL_GUEST_OFFLOADS => {
if ctrl_hdr.cmd != VIRTIO_NET_CTRL_GUEST_OFFLOADS_SET as u8 {
error!(
"invalid cmd for VIRTIO_NET_CTRL_GUEST_OFFLOADS: {}",
ctrl_hdr.cmd
);
write_error()?;
continue;
}
let offloads: Le64 = reader.read_obj().map_err(NetError::ReadCtrlData)?;
let tap_offloads = virtio_features_to_tap_offload(offloads.into());
if let Err(e) = tap.set_offload(tap_offloads) {
error!("Failed to set tap itnerface offload flags: {}", e);
write_error()?;
continue;
}
let ack = VIRTIO_NET_OK as u8;
writer.write_all(&[ack]).map_err(NetError::WriteAck)?;
}
VIRTIO_NET_CTRL_MQ => {
if ctrl_hdr.cmd == VIRTIO_NET_CTRL_MQ_VQ_PAIRS_SET as u8 {
let pairs: Le16 = reader.read_obj().map_err(NetError::ReadCtrlData)?;
// Simple handle it now
if acked_features & 1 << virtio_net::VIRTIO_NET_F_MQ == 0
|| pairs.to_native() != vq_pairs
{
error!("Invalid VQ_PAIRS_SET cmd, driver request pairs: {}, device vq pairs: {}",
pairs.to_native(), vq_pairs);
write_error()?;
continue;
}
let ack = VIRTIO_NET_OK as u8;
writer.write_all(&[ack]).map_err(NetError::WriteAck)?;
}
}
_ => warn!(
"unimplemented class for VIRTIO_NET_CTRL_GUEST_OFFLOADS: {}",
ctrl_hdr.class
),
}
ctrl_queue.add_used(mem, index, writer.bytes_written() as u32);
}
ctrl_queue.trigger_interrupt(mem, interrupt);
Ok(())
}
#[derive(PollToken, Debug, Clone)]
pub enum Token {
// A frame is available for reading from the tap device to receive in the guest.
RxTap,
// The guest has made a buffer available to receive a frame into.
RxQueue,
// The transmit queue has a frame that is ready to send from the guest.
TxQueue,
// The control queue has a message.
CtrlQueue,
// Check if any interrupts need to be re-asserted.
InterruptResample,
// crosvm has requested the device to shut down.
Kill,
}
struct Worker<T: TapT> {
interrupt: Arc<Interrupt>,
mem: GuestMemory,
rx_queue: Queue,
tx_queue: Queue,
ctrl_queue: Option<Queue>,
tap: T,
acked_features: u64,
vq_pairs: u16,
kill_evt: Event,
}
impl<T> Worker<T>
where
T: TapT,
{
fn process_rx(&mut self) -> result::Result<(), NetError> {
process_rx(
self.interrupt.as_ref(),
&mut self.rx_queue,
&self.mem,
&mut self.tap,
)
}
fn process_tx(&mut self) {
process_tx(
self.interrupt.as_ref(),
&mut self.tx_queue,
&self.mem,
&mut self.tap,
)
}
fn process_ctrl(&mut self) -> Result<(), NetError> {
let ctrl_queue = match self.ctrl_queue.as_mut() {
Some(queue) => queue,
None => return Ok(()),
};
process_ctrl(
self.interrupt.as_ref(),
ctrl_queue,
&self.mem,
&mut self.tap,
self.acked_features,
self.vq_pairs,
)
}
fn run(
&mut self,
rx_queue_evt: Event,
tx_queue_evt: Event,
ctrl_queue_evt: Option<Event>,
) -> Result<(), NetError> {
let wait_ctx: WaitContext<Token> = WaitContext::build_with(&[
(&self.tap, Token::RxTap),
(&rx_queue_evt, Token::RxQueue),
(&tx_queue_evt, Token::TxQueue),
(&self.kill_evt, Token::Kill),
])
.map_err(NetError::CreateWaitContext)?;
if let Some(ctrl_evt) = &ctrl_queue_evt {
wait_ctx
.add(ctrl_evt, Token::CtrlQueue)
.map_err(NetError::CreateWaitContext)?;
// Let CtrlQueue's thread handle InterruptResample also.
if let Some(resample_evt) = self.interrupt.get_resample_evt() {
wait_ctx
.add(resample_evt, Token::InterruptResample)
.map_err(NetError::CreateWaitContext)?;
}
}
let mut tap_polling_enabled = true;
'wait: loop {
let events = wait_ctx.wait().map_err(NetError::WaitError)?;
for event in events.iter().filter(|e| e.is_readable) {
match event.token {
Token::RxTap => match self.process_rx() {
Ok(()) => {}
Err(NetError::RxDescriptorsExhausted) => {
wait_ctx
.modify(&self.tap, EventType::None, Token::RxTap)
.map_err(NetError::WaitContextDisableTap)?;
tap_polling_enabled = false;
}
Err(e) => return Err(e),
},
Token::RxQueue => {
if let Err(e) = rx_queue_evt.read() {
error!("net: error reading rx queue Event: {}", e);
break 'wait;
}
if !tap_polling_enabled {
wait_ctx
.modify(&self.tap, EventType::Read, Token::RxTap)
.map_err(NetError::WaitContextEnableTap)?;
tap_polling_enabled = true;
}
}
Token::TxQueue => {
if let Err(e) = tx_queue_evt.read() {
error!("net: error reading tx queue Event: {}", e);
break 'wait;
}
self.process_tx();
}
Token::CtrlQueue => {
if let Some(ctrl_evt) = &ctrl_queue_evt {
if let Err(e) = ctrl_evt.read() {
error!("net: error reading ctrl queue Event: {}", e);
break 'wait;
}
} else {
break 'wait;
}
if let Err(e) = self.process_ctrl() {
error!("net: failed to process control message: {}", e);
break 'wait;
}
}
Token::InterruptResample => {
// We can unwrap safely because interrupt must have the event.
let _ = self.interrupt.get_resample_evt().unwrap().read();
self.interrupt.do_interrupt_resample();
}
Token::Kill => {
let _ = self.kill_evt.read();
break 'wait;
}
}
}
}
Ok(())
}
}
pub fn build_config(vq_pairs: u16, mtu: u16) -> VirtioNetConfig {
VirtioNetConfig {
max_vq_pairs: Le16::from(vq_pairs),
mtu: Le16::from(mtu),
// Other field has meaningful value when the corresponding feature
// is enabled, but all these features aren't supported now.
// So set them to default.
..Default::default()
}
}
pub struct Net<T: TapT> {
queue_sizes: Box<[u16]>,
workers_kill_evt: Vec<Event>,
kill_evts: Vec<Event>,
worker_threads: Vec<thread::JoinHandle<Worker<T>>>,
taps: Vec<T>,
avail_features: u64,
acked_features: u64,
mtu: u16,
}
impl<T> Net<T>
where
T: TapT,
{
/// Create a new virtio network device with the given IP address and
/// netmask.
pub fn new(
base_features: u64,
ip_addr: Ipv4Addr,
netmask: Ipv4Addr,
mac_addr: MacAddress,
vq_pairs: u16,
) -> Result<Net<T>, NetError> {
let multi_queue = vq_pairs > 1;
let tap: T = T::new(true, multi_queue).map_err(NetError::TapOpen)?;
tap.set_ip_addr(ip_addr).map_err(NetError::TapSetIp)?;
tap.set_netmask(netmask).map_err(NetError::TapSetNetmask)?;
tap.set_mac_address(mac_addr)
.map_err(NetError::TapSetMacAddress)?;
tap.enable().map_err(NetError::TapEnable)?;
Net::from(base_features, tap, vq_pairs)
}
/// Try to open the already-configured TAP interface `name` and to create a network device from
/// it.
pub fn new_from_name(
base_features: u64,
name: &[u8],
vq_pairs: u16,
) -> Result<Net<T>, NetError> {
let multi_queue = vq_pairs > 1;
let tap: T = T::new_with_name(name, true, multi_queue).map_err(NetError::TapOpen)?;
Net::from(base_features, tap, vq_pairs)
}
/// Creates a new virtio network device from a tap device that has already been
/// configured.
pub fn from(base_features: u64, tap: T, vq_pairs: u16) -> Result<Net<T>, NetError> {
let taps = tap.into_mq_taps(vq_pairs).map_err(NetError::TapOpen)?;
let mut mtu = u16::MAX;
// This would also validate a tap created by Self::new(), but that's a good thing as it
// would ensure that any changes in the creation procedure are matched in the validation.
// Plus we still need to set the offload and vnet_hdr_size values.
for tap in &taps {
validate_and_configure_tap(tap, vq_pairs)?;
mtu = std::cmp::min(mtu, tap.mtu().map_err(NetError::TapGetMtu)?);
}
let mut avail_features = base_features
| 1 << virtio_net::VIRTIO_NET_F_GUEST_CSUM
| 1 << virtio_net::VIRTIO_NET_F_CSUM
| 1 << virtio_net::VIRTIO_NET_F_CTRL_VQ
| 1 << virtio_net::VIRTIO_NET_F_CTRL_GUEST_OFFLOADS
| 1 << virtio_net::VIRTIO_NET_F_GUEST_TSO4
| 1 << virtio_net::VIRTIO_NET_F_GUEST_UFO
| 1 << virtio_net::VIRTIO_NET_F_HOST_TSO4
| 1 << virtio_net::VIRTIO_NET_F_HOST_UFO
| 1 << virtio_net::VIRTIO_NET_F_MTU;
if vq_pairs > 1 {
avail_features |= 1 << virtio_net::VIRTIO_NET_F_MQ;
}
let mut kill_evts: Vec<Event> = Vec::new();
let mut workers_kill_evt: Vec<Event> = Vec::new();
for _ in 0..taps.len() {
let kill_evt = Event::new().map_err(NetError::CreateKillEvent)?;
let worker_kill_evt = kill_evt.try_clone().map_err(NetError::CloneKillEvent)?;
kill_evts.push(kill_evt);
workers_kill_evt.push(worker_kill_evt);
}
Ok(Net {
queue_sizes: vec![QUEUE_SIZE; (vq_pairs * 2 + 1) as usize].into_boxed_slice(),
workers_kill_evt,
kill_evts,
worker_threads: Vec::new(),
taps,
avail_features,
acked_features: 0u64,
mtu,
})
}
}
// Ensure that the tap interface has the correct flags and sets the offload and VNET header size
// to the appropriate values.
pub fn validate_and_configure_tap<T: TapT>(tap: &T, vq_pairs: u16) -> Result<(), NetError> {
let flags = tap.if_flags();
let mut required_flags = vec![
(net_sys::IFF_TAP, "IFF_TAP"),
(net_sys::IFF_NO_PI, "IFF_NO_PI"),
(net_sys::IFF_VNET_HDR, "IFF_VNET_HDR"),
];
if vq_pairs > 1 {
required_flags.push((net_sys::IFF_MULTI_QUEUE, "IFF_MULTI_QUEUE"));
}
let missing_flags = required_flags
.iter()
.filter_map(
|(value, name)| {
if value & flags == 0 {
Some(name)
} else {
None
}
},
)
.collect::<Vec<_>>();
if !missing_flags.is_empty() {
return Err(NetError::TapValidate(format!(
"Missing flags: {:?}",
missing_flags
)));
}
let vnet_hdr_size = mem::size_of::<virtio_net_hdr_v1>() as i32;
tap.set_vnet_hdr_size(vnet_hdr_size)
.map_err(NetError::TapSetVnetHdrSize)?;
Ok(())
}
impl<T> Drop for Net<T>
where
T: TapT,
{
fn drop(&mut self) {
let len = self.kill_evts.len();
for i in 0..len {
// Only kill the child if it claimed its event.
if self.workers_kill_evt.get(i).is_none() {
if let Some(kill_evt) = self.kill_evts.get(i) {
// Ignore the result because there is nothing we can do about it.
let _ = kill_evt.write(1);
}
}
}
let len = self.worker_threads.len();
for _ in 0..len {
let _ = self.worker_threads.remove(0).join();
}
}
}
impl<T> VirtioDevice for Net<T>
where
T: 'static + TapT,
{
fn keep_rds(&self) -> Vec<RawDescriptor> {
let mut keep_rds = Vec::new();
for tap in &self.taps {
keep_rds.push(tap.as_raw_descriptor());
}
for worker_kill_evt in &self.workers_kill_evt {
keep_rds.push(worker_kill_evt.as_raw_descriptor());
}
for kill_evt in &self.kill_evts {
keep_rds.push(kill_evt.as_raw_descriptor());
}
keep_rds
}
fn device_type(&self) -> u32 {
TYPE_NET
}
fn queue_max_sizes(&self) -> &[u16] {
&self.queue_sizes
}
fn features(&self) -> u64 {
self.avail_features
}
fn ack_features(&mut self, value: u64) {
let mut v = value;
// Check if the guest is ACK'ing a feature that we didn't claim to have.
let unrequested_features = v & !self.avail_features;
if unrequested_features != 0 {
warn!("net: virtio net got unknown feature ack: {:x}", v);
// Don't count these features as acked.
v &= !unrequested_features;
}
self.acked_features |= v;
// Set offload flags to match acked virtio features.
if let Some(tap) = self.taps.first() {
if let Err(e) = tap.set_offload(virtio_features_to_tap_offload(self.acked_features)) {
warn!(
"net: failed to set tap offload to match acked features: {}",
e
);
}
}
}
fn read_config(&self, offset: u64, data: &mut [u8]) {
let vq_pairs = self.queue_sizes.len() / 2;
let config_space = build_config(vq_pairs as u16, self.mtu);
copy_config(data, 0, config_space.as_slice(), offset);
}
fn activate(
&mut self,
mem: GuestMemory,
interrupt: Interrupt,
mut queues: Vec<Queue>,
mut queue_evts: Vec<Event>,
) {
if queues.len() != self.queue_sizes.len() || queue_evts.len() != self.queue_sizes.len() {
error!(
"net: expected {} queues, got {}",
self.queue_sizes.len(),
queues.len()
);
return;
}
let vq_pairs = self.queue_sizes.len() / 2;
if self.taps.len() != vq_pairs {
error!("net: expected {} taps, got {}", vq_pairs, self.taps.len());
return;
}
if self.workers_kill_evt.len() != vq_pairs {
error!(
"net: expected {} worker_kill_evt, got {}",
vq_pairs,
self.workers_kill_evt.len()
);
return;
}
let interrupt_arc = Arc::new(interrupt);
for i in 0..vq_pairs {
let tap = self.taps.remove(0);
let acked_features = self.acked_features;
let interrupt = interrupt_arc.clone();
let memory = mem.clone();
let kill_evt = self.workers_kill_evt.remove(0);
// Queues alternate between rx0, tx0, rx1, tx1, ..., rxN, txN, ctrl.
let rx_queue = queues.remove(0);
let tx_queue = queues.remove(0);
let ctrl_queue = if i == 0 {
Some(queues.remove(queues.len() - 1))
} else {
None
};
let pairs = vq_pairs as u16;
let rx_queue_evt = queue_evts.remove(0);
let tx_queue_evt = queue_evts.remove(0);
let ctrl_queue_evt = if i == 0 {
Some(queue_evts.remove(queue_evts.len() - 1))
} else {
None
};
let worker_result = thread::Builder::new()
.name(format!("virtio_net worker {}", i))
.spawn(move || {
let mut worker = Worker {
interrupt,
mem: memory,
rx_queue,
tx_queue,
ctrl_queue,
tap,
acked_features,
vq_pairs: pairs,
kill_evt,
};
let result = worker.run(rx_queue_evt, tx_queue_evt, ctrl_queue_evt);
if let Err(e) = result {
error!("net worker thread exited with error: {}", e);
}
worker
});
match worker_result {
Err(e) => {
error!("failed to spawn virtio_net worker: {}", e);
return;
}
Ok(join_handle) => self.worker_threads.push(join_handle),
}
}
}
fn reset(&mut self) -> bool {
let len = self.kill_evts.len();
for i in 0..len {
// Only kill the child if it claimed its event.
if self.workers_kill_evt.get(i).is_none() {
if let Some(kill_evt) = self.kill_evts.get(i) {
if kill_evt.write(1).is_err() {
error!("{}: failed to notify the kill event", self.debug_label());
return false;
}
}
}
}
let len = self.worker_threads.len();
for _ in 0..len {
match self.worker_threads.remove(0).join() {
Err(_) => {
error!("{}: failed to get back resources", self.debug_label());
return false;
}
Ok(worker) => {
self.taps.push(worker.tap);
self.workers_kill_evt.push(worker.kill_evt);
}
}
}
true
}
}<|fim▁end|> | let mut write_error = || { |
<|file_name|>CustomGraphicsUtil.java<|end_file_name|><|fim▁begin|>package org.cytoscape.cg.model;
import java.awt.Image;
public class CustomGraphicsUtil {
public static Image getResizedImage(Image original, Integer w, Integer h, boolean keepAspectRatio) {
if (original == null)
throw new IllegalArgumentException("Original image cannot be null.");
if (w == null && h == null)<|fim▁hole|> return original;
int currentW = original.getWidth(null);
int currentH = original.getHeight(null);
float ratio;
int converted;
if (keepAspectRatio == false) {
return original.getScaledInstance(w, h, Image.SCALE_AREA_AVERAGING);
} else if (h == null) {
ratio = ((float) currentH) / ((float) currentW);
converted = (int) (w * ratio);
return original.getScaledInstance(w, converted, Image.SCALE_AREA_AVERAGING);
} else {
ratio = ((float) currentW) / ((float) currentH);
converted = (int) (h * ratio);
return original.getScaledInstance(converted, h, Image.SCALE_AREA_AVERAGING);
}
}
}<|fim▁end|> | |
<|file_name|>dailymotion.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#------------------------------------------------------------
# beta.1 Dailymotion
# Version 0.1 (10.12.2014)
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
# Gracias a la librería plugintools de Jesús (www.mimediacenter.info)
import os
import sys
import urllib
import urllib2
import re
import shutil
import zipfile
import time
import xbmc
import xbmcgui
import xbmcaddon
import xbmcplugin
import plugintools
import json
import math
home = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.arena+/', ''))
tools = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.arena+/resources/tools', ''))
addons = xbmc.translatePath(os.path.join('special://home/addons/', ''))
resources = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.arena+/resources', ''))
art = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.arena+/art', ''))
tmp = xbmc.translatePath(os.path.join('special://home/addons/plugin.video.arena+/tmp', ''))
playlists = xbmc.translatePath(os.path.join('special://home/addons/playlists', ''))
icon = art + 'icon.png'
fanart = 'fanart.jpg'<|fim▁hole|>
def dailym_getplaylist(url):
plugintools.log("beta.1.dailymotion_playlists "+url)
# Fetch video list from Dailymotion playlist user
data = plugintools.read(url)
#plugintools.log("data= "+data)
# Extract items from feed
pattern = ""
matches = plugintools.find_multiple_matches(data,'{"(.*?)}')
pattern = '{"(.*?)},{'
for entry in matches:
plugintools.log("entry="+entry)
title = plugintools.find_single_match(entry,'name":"(.*?)"')
title = title.replace("\u00e9" , "é")
title = title.replace("\u00e8" , "è")
title = title.replace("\u00ea" , "ê")
title = title.replace("\u00e0" , "à")
plugintools.log("title= "+title)
id_playlist = plugintools.find_single_match(entry,'id":"(.*?)",')
if id_playlist:
plugintools.log("id_playlist= "+id_playlist)
return id_playlist
def dailym_getvideo(url):
plugintools.log("beta.1.dailymotion_videos "+url)
# Fetch video list from Dailymotion feed
data = plugintools.read(url)
#plugintools.log("data= "+data)
# Extract items from feed
pattern = ""
matches = plugintools.find_multiple_matches(data,'{"(.*?)}')
pattern = '{"(.*?)},{'
for entry in matches:
plugintools.log("entry= "+entry)
# Not the better way to parse XML, but clean and easy
title = plugintools.find_single_match(entry,'title":"(.*?)"')
title = title.replace("\u00e9" , "é")
title = title.replace("\u00e8" , "è")
title = title.replace("\u00ea" , "ê")
title = title.replace("\u00e0" , "à")
video_id = plugintools.find_single_match(entry,'id":"(.*?)",')
if video_id:
plugintools.log("video_id= "+video_id)
return video_id
def dailym_pl(params):
plugintools.log("dailym_pl "+repr(params))
pl = params.get("url")
data = plugintools.read(pl)
plugintools.log("playlist= "+data)
dailym_vid = plugintools.find_multiple_matches(data, '{(.*?)}')
for entry in dailym_vid:
plugintools.log("entry= "+entry)
title = plugintools.find_single_match(entry, '"title":"(.*?)",')
title = title.replace('"', "")
title = title.replace('\*', "")
video_id = plugintools.find_single_match(entry, '"id":"(.*?)",')
thumbnail = "https://api.dailymotion.com/thumbnail/video/"+video_id+""
if thumbnail == "":
thumbnail = 'http://image-parcours.copainsdavant.com/image/750/1925508253/4094834.jpg'
url = "plugin://plugin.video.dailymotion_com/?url="+video_id+"&mode=playVideo"
print 'url',url
plugintools.add_item(action="play", title=title, url=url, folder = False, fanart='http://image-parcours.copainsdavant.com/image/750/1925508253/4094834.jpg',thumbnail=thumbnail,isPlayable = True)<|fim▁end|> | |
<|file_name|>networking.py<|end_file_name|><|fim▁begin|># Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
__author__ = (
"Michael Cohen <[email protected]>",
"Adam Sindelar <[email protected]>")
from rekall import obj
from rekall import plugin
from rekall_lib import registry
from rekall.plugins.darwin import common
class DarwinUnpListCollector(common.AbstractDarwinProducer):
"""Walks the global list of sockets in uipc_usrreq."""
name = "unp_sockets"
type_name = "socket"
def collect(self):
for head_const in ["_unp_dhead", "_unp_shead"]:
lhead = self.session.profile.get_constant_object(
head_const,
target="unp_head")
for unp in lhead.lh_first.walk_list("unp_link.le_next"):
yield [unp.unp_socket]
class DarwinSocketsFromHandles(common.AbstractDarwinProducer):
"""Looks up handles that point to a socket and collects the socket."""
name = "open_sockets"
type_name = "socket"
def collect(self):
for fileproc in self.session.plugins.collect("fileproc"):
if fileproc.fg_type == "DTYPE_SOCKET":
yield [fileproc.autocast_fg_data()]
class DarwinNetstat(common.AbstractDarwinCommand):
"""Prints all open sockets we know about, from any source.
Netstat will display even connections that lsof doesn't know about, because
they were either recovered from an allocation zone, or found through a
secondary mechanism (like system call handler cache).
On the other hand, netstat doesn't know the file descriptor or, really, the
process that owns the connection (although it does know the PID of the last
process to access the socket.)
Netstat will also tell you, in the style of psxview, if a socket was only
found using some of the methods available.
"""
name = "netstat"
@classmethod
def methods(cls):
"""Return the names of available socket enumeration methods."""
# Find all the producers that collect procs and inherit from
# AbstractDarwinCachedProducer.
methods = []
for subclass in common.AbstractDarwinProducer.classes.itervalues():
# We look for a plugin which is a producer and a darwin command.
if (issubclass(subclass, common.AbstractDarwinCommand) and
issubclass(subclass, plugin.Producer) and
subclass.type_name == "socket"):
methods.append(subclass.name)
methods.sort()
return methods
@registry.classproperty
@registry.memoize
def table_header(cls): # pylint: disable=no-self-argument
header = [dict(name="socket", type="socket", width=60)]
for method in cls.methods():
header.append(dict(name=method, width=12))
return plugin.PluginHeader(*header)
def collect(self):
methods = self.methods()
for socket in sorted(self.session.plugins.collect("socket"),
key=lambda socket: socket.last_pid):
row = [socket]
for method in methods:
row.append(method in socket.obj_producers)
yield row
class DarwinGetArpListHead(common.AbstractDarwinParameterHook):
"""
One version of arp_init looks like this:
void
arp_init(void)
{
VERIFY(!arpinit_done);
LIST_INIT(&llinfo_arp); // <-- This is the global we want.
llinfo_arp_zone = zinit(sizeof (struct llinfo_arp),
LLINFO_ARP_ZONE_MAX * sizeof (struct llinfo_arp), 0,
LLINFO_ARP_ZONE_NAME);
if (llinfo_arp_zone == NULL)
panic("%s: failed allocating llinfo_arp_zone", __func__);
zone_change(llinfo_arp_zone, Z_EXPAND, TRUE);
zone_change(llinfo_arp_zone, Z_CALLERACCT, FALSE);
arpinit_done = 1;
}
Disassembled, the first few instructions look like this:
0x0 55 PUSH RBP
0x1 4889e5 MOV RBP, RSP
0x4 803d65e9400001 CMP BYTE [RIP+0x40e965], 0x1
0xb 7518 JNZ 0xff80090a7f95
0xd 488d3dee802900 LEA RDI, [RIP+0x2980ee]
0x14 488d35f5802900 LEA RSI, [RIP+0x2980f5]
0x1b baf3000000 MOV EDX, 0xf3
# This is a call to kernel!panic (later kernel!assfail):
0x20 e80b6c1400 CALL 0xff80091eeba0
# This is where it starts initializing the linked list:
0x25 48c70548e94000000000 MOV QWORD [RIP+0x40e948], 0x0
00
0x30 488d0d0e812900 LEA RCX, [RIP+0x29810e]
"""
name = "disassembled_llinfo_arp"
PANIC_FUNCTIONS = (u"__kernel__!_panic", u"__kernel__!_assfail")
def calculate(self):
resolver = self.session.address_resolver
arp_init = resolver.get_constant_object("__kernel__!_arp_init",
target="Function")
instructions = iter(arp_init.Decompose(20))
# Walk down to the CALL mnemonic and use the address resolver to
# see if it calls one of the panic functions.
for instruction in instructions:
# Keep spinning until we get to the first CALL.
if instruction.mnemonic != "CALL":
continue
# This is absolute:
target = instruction.operands[0].value
_, names = resolver.get_nearest_constant_by_address(target)
if not names:
return obj.NoneObject("Could not find CALL in arp_init.")
if names[0] not in self.PANIC_FUNCTIONS:
return obj.NoneObject(
"CALL was to %r, which is not on the PANIC list."
% names)
# We verified it's the right CALL. MOV should be right after it,
# so let's just grab it.
mov_instruction = next(instructions)
if mov_instruction.mnemonic != "MOV":
return obj.NoneObject("arp_init code changed.")
offset = (mov_instruction.operands[0].disp
+ mov_instruction.address
+ mov_instruction.size)
address = self.session.profile.Object(type_name="address",
offset=offset)
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp",
offset=address.v())
if llinfo_arp.isvalid:
return llinfo_arp.obj_offset
return obj.NoneObject("llinfo_arp didn't validate.")
class DarwinArp(common.AbstractDarwinProducer):
"""Show information about arp tables."""
name = "arp"
type_name = "rtentry"
def collect(self):
llinfo_arp = self.session.address_resolver.get_constant_object(
"__kernel__!_llinfo_arp",
target="Pointer",
target_args=dict(target="llinfo_arp"))
if not llinfo_arp:
# Must not have it in the profile. Try asking the session hook
# for the address.
offset = self.session.GetParameter("disassembled_llinfo_arp")
if not offset:
self.session.logging.error(
"Could not find the address of llinfo_arp.")
return
llinfo_arp = self.session.profile.Object(
type_name="llinfo_arp", offset=offset)
for arp_hit in llinfo_arp.walk_list("la_le.le_next"):
yield [arp_hit.la_rt]
class DarwinRoute(common.AbstractDarwinCommand):
"""Show routing table."""
__name = "route"
RNF_ROOT = 2
def rn_walk_tree(self, h):
"""Walks the radix tree starting from the header h.
This function is taken from
xnu-2422.1.72/bsd/net/radix.c: rn_walk_tree()
Which is why it does not conform to the style guide.
Note too that the darwin source code abuses C macros:
#define rn_dupedkey rn_u.rn_leaf.rn_Dupedkey
#define rn_key rn_u.rn_leaf.rn_Key
#define rn_mask rn_u.rn_leaf.rn_Mask
#define rn_offset rn_u.rn_node.rn_Off
#define rn_left rn_u.rn_node.rn_L
#define rn_right rn_u.rn_node.rn_R
And then the original code does:
rn = rn.rn_left
<|fim▁hole|> seen = set()
# First time through node, go left */
while rn.rn_bit >= 0:
rn = rn.rn_u.rn_node.rn_L
while rn and rn not in seen:
base = rn
seen.add(rn)
# If at right child go back up, otherwise, go right
while (rn.rn_parent.rn_u.rn_node.rn_R == rn and
not rn.rn_flags & self.RNF_ROOT):
rn = rn.rn_parent
# Find the next *leaf* to start from
rn = rn.rn_parent.rn_u.rn_node.rn_R
while rn.rn_bit >= 0:
rn = rn.rn_u.rn_node.rn_L
next = rn
# Process leaves
while True:
rn = base
if not rn:
break
base = rn.rn_u.rn_leaf.rn_Dupedkey
if not rn.rn_flags & self.RNF_ROOT:
yield rn
rn = next
if rn.rn_flags & self.RNF_ROOT:
return
def render(self, renderer):
renderer.table_header(
[("Source IP", "source", "20"),
("Dest IP", "dest", "20"),
("Interface", "interface", "9"),
("Sent", "sent", "8"),
("Recv", "recv", "8"),
("Time", "timestamp", "24"),
("Expires", "expires", "8"),
("Delta", "delta", "8")])
route_tables = self.profile.get_constant_object(
"_rt_tables",
target="Array",
target_args=dict(
count=32,
target="Pointer",
target_args=dict(
target="radix_node_head")))
for node in self.rn_walk_tree(route_tables[2]):
rentry = node.dereference_as("rtentry")
renderer.table_row(
rentry.source_ip,
rentry.dest_ip,
rentry.name,
rentry.sent, rentry.rx,
rentry.base_calendartime,
rentry.rt_expire,
rentry.delta)
class DarwinIfnetHook(common.AbstractDarwinParameterHook):
"""Walks the global list of interfaces.
The head of the list of network interfaces is a kernel global [1].
The struct we use [2] is just the public part of the data [3]. Addresses
are related to an interface in a N:1 relationship [4]. AF-specific data
is a normal sockaddr struct.
References:
1:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/dlil.c#L254
2:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/if_var.h#L528
3:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/dlil.c#L188
4:
https://github.com/opensource-apple/xnu/blob/10.9/bsd/net/if_var.h#L816
"""
name = "ifconfig"
# ifnet_head is the actual extern holding ifnets and seems to be an
# improvement over dlil_ifnet_head, which is a static and used only in the
# dlil (stands for data link interface, I think?) module.
IFNET_HEAD_NAME = ("_ifnet_head", "_dlil_ifnet_head")
def calculate(self):
ifnet_head = obj.NoneObject("No ifnet global names given.")
for name in self.IFNET_HEAD_NAME:
ifnet_head = self.session.profile.get_constant_object(
name,
target="Pointer",
target_args=dict(
target="ifnet"))
if ifnet_head:
break
return [x.obj_offset for x in ifnet_head.walk_list("if_link.tqe_next")]
class DarwinIfnetCollector(common.AbstractDarwinCachedProducer):
name = "ifconfig"
type_name = "ifnet"
class DarwinIPFilters(common.AbstractDarwinCommand):
"""Check IP Filters for hooks."""
__name = "ip_filters"
def render(self, renderer):
renderer.table_header([
("Context", "context", "10"),
("Filter", "filter", "16"),
("Handler", "handler", "[addrpad]"),
("Symbol", "symbol", "20")])
resolver = self.session.address_resolver
for list_name in ["_ipv4_filters", "_ipv6_filters"]:
filter_list = self.profile.get_constant_object(
list_name, target="ipfilter_list")
for item in filter_list.tqh_first.walk_list("ipf_link.tqe_next"):
filter = item.ipf_filter
name = filter.name.deref()
handler = filter.ipf_input.deref()
renderer.table_row("INPUT", name, handler,
resolver.format_address(handler))
handler = filter.ipf_output.deref()
renderer.table_row("OUTPUT", name, handler,
resolver.format_address(handler))
handler = filter.ipf_detach.deref()
renderer.table_row("DETACH", name, handler,
resolver.format_address(handler))<|fim▁end|> | So we replace these below.
"""
rn = h.rnh_treetop
|
<|file_name|>test_fork1.py<|end_file_name|><|fim▁begin|>"""This test checks for correct fork() behavior.
"""
import _imp as imp
import os
import signal
import sys
import threading
import time
import unittest
from test.fork_wait import ForkWait
from test.support import reap_children, get_attribute, verbose
# Skip test if fork does not exist.
get_attribute(os, 'fork')
class ForkTest(ForkWait):
def wait_impl(self, cpid):
deadline = time.monotonic() + 10.0
while time.monotonic() <= deadline:
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(0.1)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_threaded_import_lock_fork(self):
"""Check fork() in main thread works while a subthread is doing an import"""
import_started = threading.Event()
fake_module_name = "fake test module"
partial_module = "partial"
complete_module = "complete"
def importer():
imp.acquire_lock()<|fim▁hole|> imp.release_lock()
t = threading.Thread(target=importer)
t.start()
import_started.wait()
pid = os.fork()
try:
# PyOS_BeforeFork should have waited for the import to complete
# before forking, so the child can recreate the import lock
# correctly, but also won't see a partially initialised module
if not pid:
m = __import__(fake_module_name)
if m == complete_module:
os._exit(0)
else:
if verbose > 1:
print("Child encountered partial module")
os._exit(1)
else:
t.join()
# Exitcode 1 means the child got a partial module (bad.) No
# exitcode (but a hang, which manifests as 'got pid 0')
# means the child deadlocked (also bad.)
self.wait_impl(pid)
finally:
try:
os.kill(pid, signal.SIGKILL)
except OSError:
pass
def test_nested_import_lock_fork(self):
"""Check fork() in main thread works while the main thread is doing an import"""
# Issue 9573: this used to trigger RuntimeError in the child process
def fork_with_import_lock(level):
release = 0
in_child = False
try:
try:
for i in range(level):
imp.acquire_lock()
release += 1
pid = os.fork()
in_child = not pid
finally:
for i in range(release):
imp.release_lock()
except RuntimeError:
if in_child:
if verbose > 1:
print("RuntimeError in child")
os._exit(1)
raise
if in_child:
os._exit(0)
self.wait_impl(pid)
# Check this works with various levels of nested
# import in the main thread
for level in range(5):
fork_with_import_lock(level)
def tearDownModule():
reap_children()
if __name__ == "__main__":
unittest.main()<|fim▁end|> | sys.modules[fake_module_name] = partial_module
import_started.set()
time.sleep(0.01) # Give the other thread time to try and acquire.
sys.modules[fake_module_name] = complete_module |
<|file_name|>SIOCC-TS.d.ts<|end_file_name|><|fim▁begin|>declare module ioc {
/**
* A base class for applications using an IOC Container
*/
abstract class ApplicationContext implements IApplicationContext {
/**
* A base class for applications using an IOC Container
* @param appName The name of your application
*/
constructor(appName: string);
/**
* A handle to access the ApplicationContext from anywhere in the application
*/
static applicationContext: IApplicationContext;
/**
* A method to override where you register your intances into the IOC Container
* @param container The IOC container created for this ApplicationContext
* @returns {}
*/
register(container: Container): void;
}
}
declare module ioc {
/**
* The IOC Container
*/
class Container {
private static container;
private registeredInstances;
private registeredScripts;
private appName;
/**
* The IOC Container
* @param appName The name of your application
* @param baseNamespace
*/
constructor(appName: string);
/**
* Get the currently assigned IOC Container
*/
static getCurrent(): Container;
/**
* Get the name of the ApplicationContext this IOC container is made from
*/
getAppName(): string;
/**
* Register an instance type
* @param type The full namespace of the type you want to instantiate
*/
register<T>(type: Function): InstanceRegistry<T>;
/**
* Resolve the registered Instance
* @param type The full namespace of the type you want to resolve
*/
resolve<T>(type: Function): T;
}
}
declare module ioc {
/**
* A helper class for aquiring animation methods
*/
class AnimationHelper {
/**
* Get the animationframe
* @param callback Function to call on AnimationFrame
*/
static getAnimationFrame(callback: FrameRequestCallback): number;
/**
* Cancel an animationFrameEvent
* @param requestId The handle of the event you want to cancel
*/
static cancelAnimationFrame(requestId: number): void;
}
}
declare module ioc {
interface IApplicationContext {
/**
* A method to override where you register your intances into the IOC Container
* @param container The IOC container created for this ApplicationContext
* @returns {}
*/
register(container: Container): void;
}
}
declare module ioc {
/**
* A base class for libraries using an IOC Container
* This is used to provide an easy way to register all the libraries components
*/
abstract class LibraryContext {
/**
* A method to override where you register your intances into the IOC Container
* @param container The IOC container created for the ApplicationContext of the using app
* @returns {}
*/
static register(container: Container): void;
}
}
declare module ioc {
interface IRegistryBase<T> {
/**
* Set the type of this Registry
* @param type The full type of the Instance you want to register
* @returns {}
*/
setType(type: Function): IRegistryBase<T>;
/**
* Return the Instance
* @returns {}
*/
getInstance(): T;
/**
* Get the type of this Registry
* @returns {}
*/
getType(): Function;
/**
* Set a function fo modify Instance that will be called directly after instantiating
* @param resolve The function to call when resolving
* @returns {}
*/
setResolveFunc(resolve: (instance: T) => T): IRegistryBase<T>;
/**
* Set a function to resolve the object in a different way than a parameterless constructor
* @param instantiate The function used to Instantiate the object<|fim▁hole|> setInstantiateFunc(instantiate: () => T): IRegistryBase<T>;
/**
* Apply a lifetimescope to this Registry
* @param lifetime The lifetimescope to apply to
*/
setLifetimeScope(lifetime: LifetimeScope): IRegistryBase<T>;
}
}
declare module ioc {
/**
* Registry for standard Instances
*/
class InstanceRegistry<T> extends RegistryBase<T> {
protected lifeTimeScope: LifetimeScope;
protected callers: {
[key: string]: any;
};
/**
* Return the Instance
* @returns {}
*/
getInstance(): T;
/**
* Instantiate the object
*/
protected instantiate(): void;
/**
* Apply a lifetimescope to this Registry
* @param lifetime The lifetimescope to apply to
*/
setLifetimeScope(lifetime: LifetimeScope): IRegistryBase<T>;
}
}
declare module ioc {
/**
* The available lifetime scopes
*/
enum LifetimeScope {
/**
* Resolve everytime the Resolve is called
*/
PerResolveCall = 0,
/**
* Allow only one Instance of this type
*/
SingleInstance = 1,
/**
* Return only one Instance for every dependency
*/
PerDependency = 2,
}
}
declare module ioc {
/**
* A base class to provide basic functionality for al Registries
*/
class RegistryBase<T> implements IRegistryBase<T> {
protected type: Function;
protected object: any;
protected initiated: boolean;
protected loaded: boolean;
protected resolveFunc: (instance: T) => any;
protected instantiateFunc: () => T;
/**
* Return the Instance
* @returns {}
*/
getInstance(): T;
/**
* Get the type of this Registry
* @returns {}
*/
getType(): Function;
/**
* Set the type of this Registry
* @param type The full type of the Instance you want to register
* @returns {}
*/
setType(type: Function | T): IRegistryBase<T>;
/**
* Method to override that Instantiates the object
*/
protected instantiate(): void;
/**
* Set a function fo modify Instance that will be called directly after instantiating
* @param resolve The function to call when resolving
* @returns {}
*/
setResolveFunc(resolve: (instance: T) => T): IRegistryBase<T>;
/**
* Set a function to resolve the object in a different way than a parameterless constructor
* @param instantiate The function used to Instantiate the object
* @returns {}
*/
setInstantiateFunc(instantiate: () => T): IRegistryBase<T>;
/**
* Apply a lifetimescope to this Registry
* @param lifetime The lifetimescope to apply to
*/
setLifetimeScope(lifetime: LifetimeScope): IRegistryBase<T>;
}
}
//# sourceMappingURL=SIOCC-TS.d.ts.map<|fim▁end|> | * @returns {}
*/ |
<|file_name|>query.go<|end_file_name|><|fim▁begin|>package query
import (
"Yearning-go/src/handler/commom"
"Yearning-go/src/lib"
"Yearning-go/src/model"
"github.com/cookieY/yee"
"net/http"
"time"
)
func FetchQueryRecord(c yee.Context) (err error) {
u := new(commom.PageInfo)
if err = c.Bind(u); err != nil {
c.Logger().Error(err.Error())
return
}
order := u.GetSQLQueryList(
commom.AccordingToQueryPer(),
commom.AccordingToWorkId(u.Find.Text),
commom.AccordingToDate(u.Find.Picker),
)
return c.JSON(http.StatusOK, commom.SuccessPayload(order))
}
func FetchQueryOrder(c yee.Context) (err error) {
<|fim▁hole|> if err = c.Bind(u); err != nil {
c.Logger().Error(err.Error())
return
}
user, _ := lib.JwtParse(c)
order := u.GetSQLQueryList(
commom.AccordingToUsername(u.Find.Text),
commom.AccordingToAssigned(user),
commom.AccordingToDate(u.Find.Picker),
commom.AccordingToAllQueryOrderState(u.Find.Status),
)
return c.JSON(http.StatusOK, commom.SuccessPayload(order))
}
func FetchQueryRecordProfile(c yee.Context) (err error) {
u := new(commom.ExecuteStr)
if err = c.Bind(u); err != nil {
c.Logger().Error(err.Error())
return
}
start, end := lib.Paging(u.Page, 20)
var detail []model.CoreQueryRecord
var count int
model.DB().Model(&model.CoreQueryRecord{}).Where("work_id =?", u.WorkId).Count(&count).Offset(start).Limit(end).Find(&detail)
return c.JSON(http.StatusOK, commom.SuccessPayload(commom.CommonList{Data: detail, Page: count}))
}
func QueryDeleteEmptyRecord(c yee.Context) (err error) {
var j []model.CoreQueryOrder
model.DB().Select("work_id").Where(`query_per =?`, 3).Find(&j)
for _, i := range j {
var k model.CoreQueryRecord
if model.DB().Where("work_id =?", i.WorkId).First(&k).RecordNotFound() {
model.DB().Where("work_id =?", i.WorkId).Delete(&model.CoreQueryOrder{})
}
}
return c.JSON(http.StatusOK, commom.SuccessPayLoadToMessage(commom.ORDER_IS_CLEAR))
}
func QueryHandlerSets(c yee.Context) (err error) {
u := new(commom.QueryOrder)
var s model.CoreQueryOrder
if err = c.Bind(u); err != nil {
c.Logger().Error(err.Error())
return c.JSON(http.StatusOK, err.Error())
}
found := !model.DB().Where("work_id=? AND query_per=?", u.WorkId, 2).First(&s).RecordNotFound()
switch u.Tp {
case "agreed":
if found {
model.DB().Model(model.CoreQueryOrder{}).Where("work_id =?", u.WorkId).Update(map[string]interface{}{"query_per": 1, "ex_date": time.Now().Format("2006-01-02 15:04")})
lib.MessagePush(u.WorkId, 8, "")
}
return c.JSON(http.StatusOK, commom.SuccessPayLoadToMessage(commom.ORDER_IS_AGREE))
case "reject":
if found {
model.DB().Model(model.CoreQueryOrder{}).Where("work_id =?", u.WorkId).Update(map[string]interface{}{"query_per": 0})
lib.MessagePush(u.WorkId, 9, "")
}
return c.JSON(http.StatusOK, commom.SuccessPayLoadToMessage(commom.ORDER_IS_REJECT))
case "stop":
model.DB().Model(model.CoreQueryOrder{}).Where("work_id =?", u.WorkId).Update(map[string]interface{}{"query_per": 3})
return c.JSON(http.StatusOK, commom.SuccessPayLoadToMessage(commom.ORDER_IS_ALL_END))
case "cancel":
model.DB().Model(model.CoreQueryOrder{}).Updates(&model.CoreQueryOrder{QueryPer: 3})
return c.JSON(http.StatusOK, commom.SuccessPayLoadToMessage(commom.ORDER_IS_ALL_CANCEL))
default:
return
}
}
func AuditOrRecordQueryOrderFetchApis(c yee.Context) (err error) {
switch c.Params("tp") {
case "list":
return FetchQueryOrder(c)
case "record":
return FetchQueryRecord(c)
case "profile":
return FetchQueryRecordProfile(c)
default:
return c.JSON(http.StatusOK, commom.ERR_REQ_FAKE)
}
}<|fim▁end|> | u := new(commom.PageInfo) |
<|file_name|>parser.copy.rs<|end_file_name|><|fim▁begin|>use scaly::containers::{Array, HashSet, Ref, String, Vector};
use scaly::io::Stream;
use scaly::memory::Region;
use scaly::Page;
use scalyc::errors::ParserError;
use scalyc::lexer::Lexer;
use scalyc::lexer::Position;
pub struct Parser {
lexer: Ref<Lexer>,
file_name: String,
_keywords: Ref<HashSet<String>>,
}
impl Parser {
pub fn new(_pr: &Region, _rp: *mut Page, file_name: String, stream: *mut Stream) -> Parser {
let _r = Region::create(_pr);
let keywords = HashSet::from_vector(
&_r,
_rp,
Ref::new(
_rp,
Vector::from_raw_array(
_rp,
&[
String::from_string_slice(_rp, "using"),
String::from_string_slice(_rp, "namespace"),
String::from_string_slice(_rp, "typedef"),
String::from_string_slice(_rp, "let"),
String::from_string_slice(_rp, "mutable"),
String::from_string_slice(_rp, "threadlocal"),
String::from_string_slice(_rp, "var"),
String::from_string_slice(_rp, "set"),
String::from_string_slice(_rp, "class"),
String::from_string_slice(_rp, "extends"),
String::from_string_slice(_rp, "initializer"),
String::from_string_slice(_rp, "allocator"),
String::from_string_slice(_rp, "method"),
String::from_string_slice(_rp, "function"),
String::from_string_slice(_rp, "operator"),
String::from_string_slice(_rp, "this"),
String::from_string_slice(_rp, "new"),
String::from_string_slice(_rp, "sizeof"),
String::from_string_slice(_rp, "catch"),
String::from_string_slice(_rp, "throws"),
String::from_string_slice(_rp, "as"),
String::from_string_slice(_rp, "is"),
String::from_string_slice(_rp, "if"),
String::from_string_slice(_rp, "else"),
String::from_string_slice(_rp, "switch"),
String::from_string_slice(_rp, "case"),
String::from_string_slice(_rp, "default"),
String::from_string_slice(_rp, "for"),
String::from_string_slice(_rp, "in"),
String::from_string_slice(_rp, "while"),
String::from_string_slice(_rp, "do"),
String::from_string_slice(_rp, "loop"),
String::from_string_slice(_rp, "break"),
String::from_string_slice(_rp, "continue"),
String::from_string_slice(_rp, "return"),
String::from_string_slice(_rp, "throw"),
String::from_string_slice(_rp, "intrinsic"),
String::from_string_slice(_rp, "define"),
],
),
),
);
Parser {
lexer: Lexer::new(&_r,_rp, stream),
file_name: file_name,
_keywords: keywords,
}
}
pub fn parse_file(
&mut self,
_pr: &Region,
_rp: *mut Page,
_ep: *mut Page,
) -> Result<Ref<FileSyntax>, Ref<ParserError>> {
let _r = Region::create(_pr);
let start: Position = self.lexer.get_previous_position();
// IntrinsicSyntax[] intrinsics = parseIntrinsicList();
// UsingSyntax[] usings = parseUsingList();
// DefineSyntax[] defines = parseDefineList();
// DeclarationSyntax[] declarations = parseDeclarationList();
let statements = self.parse_statement_list(&_r, _rp);
match statements {
Some(_) => {
if !self.is_at_end() {
let error_pos = self.lexer.get_previous_position();
return Result::Err(Ref::new(
_ep,
ParserError {
file_name: self.file_name,
line: error_pos.line,
column: error_pos.column,
},
));
}
}
None => (),
}
let end: Position = self.lexer.get_position();
let ret: Ref<FileSyntax> = Ref::new(
_rp,
FileSyntax {
start: start,
end: end,
},
);
// if (intrinsics != null)
// {
// foreach (IntrinsicSyntax item in intrinsics)
// item.parent = ret;
// }
// if (usings != null)
// {
// foreach (UsingSyntax item in usings)
// item.parent = ret;
// }
// if (defines != null)
// {
// foreach (DefineSyntax item in defines)
// item.parent = ret;
// }
// if (declarations != null)
// {
// foreach (DeclarationSyntax item in declarations)
// item.parent = ret;
// }
// if (statements != null)
// {
// foreach (StatementSyntax item in statements)
// item.parent = ret;
// }
Ok(ret)
}
pub fn parse_statement_list(
&mut self,
_pr: &Region,
_rp: *mut Page,
) -> Option<Ref<Vector<Ref<StatementSyntax>>>> {
let _r = Region::create(_pr);
let mut ret: Option<Ref<Array<Ref<StatementSyntax>>>> = Option::None;
loop {<|fim▁hole|> match ret {
None => ret = Some(Ref::new(_rp, Array::new())),
Some(_) => (),
};
ret.unwrap().add(node);
}
}
}
match ret {
Some(ret) => Some(Ref::new(_rp, Vector::from_array(_rp, ret))),
None => None,
}
}
pub fn parse_statement(
&mut self,
_pr: &Region,
_rp: *mut Page,
) -> Option<Ref<StatementSyntax>> {
let _r = Region::create(_pr);
let start: Position = self.lexer.get_previous_position();
let end: Position = self.lexer.get_position();
let ret: Ref<StatementSyntax> = Ref::new(
_rp,
StatementSyntax {
start: start,
end: end,
},
);
Some(ret)
}
fn is_at_end(&self) -> bool {
self.lexer.is_at_end()
}
fn _is_identifier(&self, id: String) -> bool {
if self._keywords.contains(id) {
false
} else {
true
}
}
}
#[derive(Copy, Clone)]
pub struct FileSyntax {
pub start: Position,
pub end: Position,
}
#[derive(Copy, Clone)]
pub struct StatementSyntax {
pub start: Position,
pub end: Position,
}<|fim▁end|> | let node = self.parse_statement(&_r, _rp);
match node {
None => break,
Some(node) => { |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/**
* Module dependencies.
*/
var express = require('express')
var MemoryStore = express.session.MemoryStore
var mongoStore = require('connect-mongo')(express)
var path = require('path')
var fs = require('fs')
var _ = require('underscore')
var mongoose = require('mongoose')
var passport = require('passport')
var http = require('http')
var socketio = require('socket.io')
var passportSocketIo = require('passport.socketio')
// Load configurations
var env = process.env.NODE_ENV || 'development'
var config = require('./config/config')[env]
// Bootstrap db connection
mongoose.connect(config.db)
// Bootstrap models
var modelsDir = path.join(__dirname, '/app/models')
fs.readdirSync(modelsDir).forEach(function (file) {
if (~file.indexOf('.js')) require(modelsDir + '/' + file)
})
// Bootstrap passport config
require('./config/passport')(passport, config)
<|fim▁hole|>// express settings
require('./config/express')(app, config, passport, store)
// Bootstrap routes
require('./config/routes')(app, passport)
var server = http.createServer(app)
var sio = socketio.listen(server)
var clients = {}
var socketsOfClients = {}
sio.configure(function () {
sio.set('authorization', passportSocketIo.authorize({
cookieParser: express.cookieParser, //or connect.cookieParser
key: 'express.sid', //the cookie where express (or connect) stores the session id.
secret: 'dirty', //the session secret to parse the cookie
store: store, //the session store that express uses
fail: function (data, accept) { // *optional* callbacks on success or fail
accept(null, false) // second parameter takes boolean on whether or not to allow handshake
},
success: function (data, accept) {
accept(null, true)
}
}))
})
// upon connection, start a periodic task that emits (every 1s) the current timestamp
sio.sockets.on('connection', function (socket) {
var username = socket.handshake.user.username;
clients[username] = socket.id
socketsOfClients[socket.id] = username
userNameAvailable(socket.id, username)
userJoined(username)
socket.on('data', function (data) {
socket.broadcast.emit('data', { 'drawing' : data })
})
socket.on('message', function (data) {
var now = new Date();
sio.sockets.emit('message', {
'source': socketsOfClients[socket.id],
'time': now.getHours() + ':' + now.getMinutes() + ':' + now.getSeconds(),
'message': data.message
})
})
socket.on('disconnect', function() {
var username = socketsOfClients[socket.id]
delete socketsOfClients[socket.id]
delete clients[username];
// relay this message to all the clients
userLeft(username)
})
})
// Start the application by listening on port <>
var port = process.env.PORT || 3000
server.listen(port, function(){
console.log('Express server listening on port ' + port)
});
function userNameAvailable(socketId, username) {
setTimeout(function(){
sio.sockets.sockets[socketId].emit('user welcome', {
"currentUsers": JSON.stringify(Object.keys(clients))
});
}, 500);
}
function userJoined(username) {
Object.keys(socketsOfClients).forEach(function(socketId) {
sio.sockets.sockets[socketId].emit('user joined', {
"username": username
});
});
}
function userLeft(username) {
sio.sockets.emit('user left', {
"username": username
});
}
// expose app
exports = module.exports = app;<|fim▁end|> | var app = express()
var store = new mongoStore({ url : config.db, collection : 'sessions' });
//var store = new MemoryStore() |
<|file_name|>storage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright(C) 2011-2016 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import plyvel
import ast
import hashlib
import os
import sys
import threading
from processor import print_log, logger
from utils import bc_address_to_hash_160, hash_160_to_pubkey_address, Hash, \
bytes8_to_int, bytes4_to_int, int_to_bytes8, \
int_to_hex8, int_to_bytes4, int_to_hex4
"""
Patricia tree for hashing unspents
"""
# increase this when database needs to be updated
global GENESIS_HASH
GENESIS_HASH = '000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
DB_VERSION = 3
KEYLENGTH = 56 # 20 + 32 + 4
class Node(object):
def __init__(self, s):
self.k = int(s[0:32].encode('hex'), 16)
self.s = s[32:]
if self.k==0 and self.s:
print "init error", len(self.s), "0x%0.64X" % self.k
raise BaseException("z")
def serialized(self):
k = "0x%0.64X" % self.k
k = k[2:].decode('hex')
assert len(k) == 32
return k + self.s
def has(self, c):
return (self.k & (1<<(ord(c)))) != 0
def is_singleton(self, key):
assert self.s != ''
return len(self.s) == 40
def get_singleton(self):
for i in xrange(256):
if self.k == (1<<i):
return chr(i)
raise BaseException("get_singleton")
def indexof(self, c):
assert self.k != 0 or self.s == ''
x = 0
for i in xrange(ord(c)):
if (self.k & (1<<i)) != 0:
x += 40
return x
def get(self, c):
x = self.indexof(c)
ss = self.s[x:x+40]
_hash = ss[0:32]
value = bytes8_to_int(ss[32:40])
return _hash, value
def set(self, c, h, value):
if h is None:
h = chr(0)*32
vv = int_to_bytes8(value)
item = h + vv
assert len(item) == 40
if self.has(c):
self.remove(c)
x = self.indexof(c)
self.s = self.s[0:x] + item + self.s[x:]
self.k |= (1<<ord(c))
assert self.k != 0
def remove(self, c):
x = self.indexof(c)
self.k &= ~(1<<ord(c))
self.s = self.s[0:x] + self.s[x+40:]
def get_hash(self, x, parent):
if x:
assert self.k != 0
skip_string = x[len(parent)+1:] if x != '' else ''
x = 0
v = 0
hh = ''
for i in xrange(256):
if (self.k&(1<<i)) != 0:
ss = self.s[x:x+40]
hh += ss[0:32]
v += bytes8_to_int(ss[32:40])
x += 40
try:
_hash = Hash(skip_string + hh)
except:
_hash = None
if x:
assert self.k != 0
return _hash, v
@classmethod
def from_dict(klass, d):
k = 0
s = ''
for i in xrange(256):
if chr(i) in d:
k += 1<<i
h, value = d[chr(i)]
if h is None: h = chr(0)*32
vv = int_to_bytes8(value)
item = h + vv
assert len(item) == 40
s += item
k = "0x%0.64X" % k # 32 bytes
k = k[2:].decode('hex')
assert len(k) == 32
out = k + s
return Node(out)
class DB(object):
def __init__(self, path, name, cache_size):
self.db = plyvel.DB(os.path.join(path, name), create_if_missing=True, compression=None, lru_cache_size=cache_size)
self.batch = self.db.write_batch()
self.cache = {}
self.lock = threading.Lock()
def put(self, key, s):
self.batch.put(key, s)
self.cache[key] = s
def get(self, key):
s = self.cache.get(key)
if s == 'deleted':
return None
if s is None:
with self.lock:
s = self.db.get(key)
return s
def delete(self, key):
self.batch.delete(key)
self.cache[key] = 'deleted'
def close(self):
self.db.close()
def write(self):
with self.lock:
self.batch.write()
self.batch.clear()
self.cache.clear()
def get_next(self, key):
with self.lock:
i = self.db.iterator(start=key)
k, _ = i.next()
return k
class Storage(object):
def __init__(self, config, shared, test_reorgs):
self.shared = shared
self.hash_list = {}
self.parents = {}
self.skip_batch = {}
self.test_reorgs = test_reorgs
# init path
self.dbpath = config.get('leveldb', 'path')
if not os.path.exists(self.dbpath):
os.mkdir(self.dbpath)
try:
self.db_utxo = DB(self.dbpath, 'utxo', config.getint('leveldb', 'utxo_cache'))
self.db_hist = DB(self.dbpath, 'hist', config.getint('leveldb', 'hist_cache'))
self.db_addr = DB(self.dbpath, 'addr', config.getint('leveldb', 'addr_cache'))
self.db_undo = DB(self.dbpath, 'undo', None)
except:
logger.error('db init', exc_info=True)
self.shared.stop()
try:
self.last_hash, self.height, db_version = ast.literal_eval(self.db_undo.get('height'))
except:
print_log('Initializing database')
self.height = 0
self.last_hash = GENESIS_HASH
self.pruning_limit = config.getint('leveldb', 'pruning_limit')
db_version = DB_VERSION
self.put_node('', Node.from_dict({}))
# check version
if db_version != DB_VERSION:
print_log("Your database '%s' is deprecated. Please create a new database"%self.dbpath)
self.shared.stop()
return
# pruning limit
try:
self.pruning_limit = ast.literal_eval(self.db_undo.get('limit'))
except:
self.pruning_limit = config.getint('leveldb', 'pruning_limit')
self.db_undo.put('version', repr(self.pruning_limit))
# compute root hash
root_node = self.get_node('')
self.root_hash, coins = root_node.get_hash('', None)
# print stuff
print_log("Database version %d."%db_version)
print_log("Pruning limit for spent outputs is %d."%self.pruning_limit)
print_log("Blockchain height", self.height)
print_log("UTXO tree root hash:", self.root_hash.encode('hex'))
print_log("Coins in database:", coins)
# convert between bitcoin addresses and 20 bytes keys used for storage.
@staticmethod
def address_to_key(addr):
return bc_address_to_hash_160(addr)
def get_skip(self, key):
o = self.skip_batch.get(key)
if o is not None:
return o
k = self.db_utxo.get_next(key)
assert k.startswith(key)
return k[len(key):]
def set_skip(self, key, skip):
self.skip_batch[key] = skip
def get_proof(self, addr):
key = self.address_to_key(addr)<|fim▁hole|> p.append(k)
out = []
for item in p:
v = self.db_utxo.get(item)
out.append((item.encode('hex'), v.encode('hex')))
return out
def get_balance(self, addr):
key = self.address_to_key(addr)
k = self.db_utxo.get_next(key)
if not k.startswith(key):
return 0
p = self.get_parent(k)
d = self.get_node(p)
letter = k[len(p)]
return d.get(letter)[1]
def listunspent(self, addr):
key = self.address_to_key(addr)
if key is None:
raise BaseException('Invalid Bitcoin address', addr)
out = []
with self.db_utxo.lock:
for k, v in self.db_utxo.db.iterator(start=key):
if not k.startswith(key):
break
if len(k) == KEYLENGTH:
txid = k[20:52].encode('hex')
txpos = bytes4_to_int(k[52:56])
h = bytes4_to_int(v[8:12])
v = bytes8_to_int(v[0:8])
out.append({'tx_hash': txid, 'tx_pos':txpos, 'height': h, 'value':v})
if len(out) == 1000:
print_log('max utxo reached', addr)
break
out.sort(key=lambda x:x['height'])
return out
def get_history(self, addr):
out = []
o = self.listunspent(addr)
for item in o:
out.append((item['height'], item['tx_hash']))
h = self.db_hist.get(addr)
while h:
item = h[0:80]
h = h[80:]
txi = item[0:32].encode('hex')
hi = bytes4_to_int(item[36:40])
txo = item[40:72].encode('hex')
ho = bytes4_to_int(item[76:80])
out.append((hi, txi))
out.append((ho, txo))
# uniqueness
out = set(out)
# sort by height then tx_hash
out = sorted(out)
return map(lambda x: {'height':x[0], 'tx_hash':x[1]}, out)
def get_address(self, txi):
return self.db_addr.get(txi)
def get_undo_info(self, height):
s = self.db_undo.get("undo_info_%d" % (height % 100))
if s is None:
print_log("no undo info for ", height)
return eval(s)
def write_undo_info(self, height, bitcoind_height, undo_info):
if height > bitcoind_height - 100 or self.test_reorgs:
self.db_undo.put("undo_info_%d" % (height % 100), repr(undo_info))
@staticmethod
def common_prefix(word1, word2):
max_len = min(len(word1),len(word2))
for i in xrange(max_len):
if word2[i] != word1[i]:
index = i
break
else:
index = max_len
return word1[0:index]
def put_node(self, key, node):
self.db_utxo.put(key, node.serialized())
def get_node(self, key):
s = self.db_utxo.get(key)
if s is None:
return
return Node(s)
def add_key(self, target, value, height):
assert len(target) == KEYLENGTH
path = self.get_path(target, new=True)
if path is True:
return
#print "add key: target", target.encode('hex'), "path", map(lambda x: x.encode('hex'), path)
parent = path[-1]
parent_node = self.get_node(parent)
n = len(parent)
c = target[n]
if parent_node.has(c):
h, v = parent_node.get(c)
skip = self.get_skip(parent + c)
child = parent + c + skip
assert not target.startswith(child)
prefix = self.common_prefix(child, target)
index = len(prefix)
if len(child) == KEYLENGTH:
# if it's a leaf, get hash and value of new_key from parent
d = Node.from_dict({
target[index]: (None, 0),
child[index]: (h, v)
})
else:
# if it is not a leaf, update its hash because skip_string changed
child_node = self.get_node(child)
h, v = child_node.get_hash(child, prefix)
d = Node.from_dict({
target[index]: (None, 0),
child[index]: (h, v)
})
self.set_skip(prefix + target[index], target[index+1:])
self.set_skip(prefix + child[index], child[index+1:])
self.put_node(prefix, d)
path.append(prefix)
self.parents[child] = prefix
# update parent skip
new_skip = prefix[n+1:]
self.set_skip(parent+c, new_skip)
parent_node.set(c, None, 0)
self.put_node(parent, parent_node)
else:
# add new letter to parent
skip = target[n+1:]
self.set_skip(parent+c, skip)
parent_node.set(c, None, 0)
self.put_node(parent, parent_node)
# write the new leaf
s = (int_to_hex8(value) + int_to_hex4(height)).decode('hex')
self.db_utxo.put(target, s)
# the hash of a leaf is the txid
_hash = target[20:52]
self.update_node_hash(target, path, _hash, value)
def update_node_hash(self, node, path, _hash, value):
c = node
for x in path[::-1]:
self.parents[c] = x
c = x
self.hash_list[node] = (_hash, value)
def update_hashes(self):
nodes = {} # nodes to write
for i in xrange(KEYLENGTH, -1, -1):
for node in self.hash_list.keys():
if len(node) != i:
continue
node_hash, node_value = self.hash_list.pop(node)
parent = self.parents[node] if node!='' else ''
if i != KEYLENGTH and node_hash is None:
n = self.get_node(node)
node_hash, node_value = n.get_hash(node, parent)
assert node_hash is not None
if node == '':
self.root_hash = node_hash
self.root_value = node_value
assert self.root_hash is not None
break
# read parent
d = nodes.get(parent)
if d is None:
d = self.get_node(parent)
assert d is not None
# write value into parent
letter = node[len(parent)]
d.set(letter, node_hash, node_value)
nodes[parent] = d
# iterate
grandparent = self.parents[parent] if parent != '' else None
parent_hash, parent_value = d.get_hash(parent, grandparent)
if parent_hash is not None:
self.hash_list[parent] = (parent_hash, parent_value)
for k, v in nodes.iteritems():
self.put_node(k, v)
# cleanup
assert self.hash_list == {}
self.parents = {}
self.skip_batch = {}
def get_path(self, target, new=False):
x = self.db_utxo.get(target)
if not new and x is None:
raise BaseException('key not in tree', target.encode('hex'))
if new and x is not None:
# raise BaseException('key already in tree', target.encode('hex'))
# occurs at block 91880 (duplicate txid)
print_log('key already in tree', target.encode('hex'))
return True
remaining = target
key = ''
path = []
while key != target:
node = self.get_node(key)
if node is None:
break
#raise # should never happen
path.append(key)
c = remaining[0]
if not node.has(c):
break
skip = self.get_skip(key + c)
key = key + c + skip
if not target.startswith(key):
break
remaining = target[len(key):]
return path
def delete_key(self, leaf):
path = self.get_path(leaf)
#print "delete key", leaf.encode('hex'), map(lambda x: x.encode('hex'), path)
s = self.db_utxo.get(leaf)
self.db_utxo.delete(leaf)
if leaf in self.hash_list:
del self.hash_list[leaf]
parent = path[-1]
letter = leaf[len(parent)]
parent_node = self.get_node(parent)
parent_node.remove(letter)
# remove key if it has a single child
if parent_node.is_singleton(parent):
#print "deleting parent", parent.encode('hex')
self.db_utxo.delete(parent)
if parent in self.hash_list:
del self.hash_list[parent]
l = parent_node.get_singleton()
_hash, value = parent_node.get(l)
skip = self.get_skip(parent + l)
otherleaf = parent + l + skip
# update skip value in grand-parent
gp = path[-2]
gp_items = self.get_node(gp)
letter = otherleaf[len(gp)]
new_skip = otherleaf[len(gp)+1:]
gp_items.set(letter, None, 0)
self.set_skip(gp+ letter, new_skip)
#print "gp new_skip", gp.encode('hex'), new_skip.encode('hex')
self.put_node(gp, gp_items)
# note: k is not necessarily a leaf
if len(otherleaf) == KEYLENGTH:
ss = self.db_utxo.get(otherleaf)
_hash, value = otherleaf[20:52], bytes8_to_int(ss[0:8])
else:
_hash, value = None, None
self.update_node_hash(otherleaf, path[:-1], _hash, value)
else:
self.put_node(parent, parent_node)
_hash, value = None, None
self.update_node_hash(parent, path[:-1], _hash, value)
return s
def get_parent(self, x):
p = self.get_path(x)
return p[-1]
def get_root_hash(self):
return self.root_hash if self.root_hash else ''
def batch_write(self):
for db in [self.db_utxo, self.db_addr, self.db_hist, self.db_undo]:
db.write()
def close(self):
for db in [self.db_utxo, self.db_addr, self.db_hist, self.db_undo]:
db.close()
def save_height(self, block_hash, block_height):
self.db_undo.put('height', repr((block_hash, block_height, DB_VERSION)))
def add_to_history(self, addr, tx_hash, tx_pos, value, tx_height):
key = self.address_to_key(addr)
txo = (tx_hash + int_to_hex4(tx_pos)).decode('hex')
# write the new history
self.add_key(key + txo, value, tx_height)
# backlink
self.db_addr.put(txo, addr)
def revert_add_to_history(self, addr, tx_hash, tx_pos, value, tx_height):
key = self.address_to_key(addr)
txo = (tx_hash + int_to_hex4(tx_pos)).decode('hex')
# delete
self.delete_key(key + txo)
# backlink
self.db_addr.delete(txo)
def get_utxo_value(self, addr, txi):
key = self.address_to_key(addr)
leaf = key + txi
s = self.db_utxo.get(leaf)
value = bytes8_to_int(s[0:8])
return value
def set_spent(self, addr, txi, txid, index, height, undo):
key = self.address_to_key(addr)
leaf = key + txi
s = self.delete_key(leaf)
value = bytes8_to_int(s[0:8])
in_height = bytes4_to_int(s[8:12])
undo[leaf] = value, in_height
# delete backlink txi-> addr
self.db_addr.delete(txi)
# add to history
s = self.db_hist.get(addr)
if s is None: s = ''
txo = (txid + int_to_hex4(index) + int_to_hex4(height)).decode('hex')
s += txi + int_to_bytes4(in_height) + txo
s = s[ -80*self.pruning_limit:]
self.db_hist.put(addr, s)
def revert_set_spent(self, addr, txi, undo):
key = self.address_to_key(addr)
leaf = key + txi
# restore backlink
self.db_addr.put(txi, addr)
v, height = undo.pop(leaf)
self.add_key(leaf, v, height)
# revert add to history
s = self.db_hist.get(addr)
# s might be empty if pruning limit was reached
if not s:
return
assert s[-80:-44] == txi
s = s[:-80]
self.db_hist.put(addr, s)
def import_transaction(self, txid, tx, block_height, touched_addr):
undo = { 'prev_addr':[] } # contains the list of pruned items for each address in the tx; also, 'prev_addr' is a list of prev addresses
prev_addr = []
for i, x in enumerate(tx.get('inputs')):
txi = (x.get('prevout_hash') + int_to_hex4(x.get('prevout_n'))).decode('hex')
addr = self.get_address(txi)
if addr is not None:
self.set_spent(addr, txi, txid, i, block_height, undo)
touched_addr.add(addr)
prev_addr.append(addr)
undo['prev_addr'] = prev_addr
# here I add only the outputs to history; maybe I want to add inputs too (that's in the other loop)
for x in tx.get('outputs'):
addr = x.get('address')
if addr is None: continue
self.add_to_history(addr, txid, x.get('index'), x.get('value'), block_height)
touched_addr.add(addr)
return undo
def revert_transaction(self, txid, tx, block_height, touched_addr, undo):
#print_log("revert tx", txid)
for x in reversed(tx.get('outputs')):
addr = x.get('address')
if addr is None: continue
self.revert_add_to_history(addr, txid, x.get('index'), x.get('value'), block_height)
touched_addr.add(addr)
prev_addr = undo.pop('prev_addr')
for i, x in reversed(list(enumerate(tx.get('inputs')))):
addr = prev_addr[i]
if addr is not None:
txi = (x.get('prevout_hash') + int_to_hex4(x.get('prevout_n'))).decode('hex')
self.revert_set_spent(addr, txi, undo)
touched_addr.add(addr)
assert undo == {}<|fim▁end|> | k = self.db_utxo.get_next(key)
p = self.get_path(k) |
<|file_name|>serve_js.js<|end_file_name|><|fim▁begin|>/**
* @fileoverview client模式,serve时js文件处理
* @author liweitao
*/
'use strict';
module.exports = function ($, appConf, moduleConf, args) {
return function (mod, modulePath, appPath) {
return new Promise(function (resolve, reject) {
var vfs = require('vinyl-fs');
var path = require('path');
var athenaMate = require('../athena_mate');
var useBabel = moduleConf.support.useBabel || { enable: false };
var enableBabel = useBabel.enable;
var jsxPragma = useBabel.jsxPragma || 'Nerv.createElement'<|fim▁hole|> cwd: appPath,
pageFiles: args.pageFiles,
module: moduleConf.module,
map: path.join('dist', 'map.json'),
dest: 'dist',
end: function () {
vfs.src(path.join(modulePath, 'dist', '_static', 'js', '**', '*.js'))
.pipe($.if(enableBabel, athenaMate.babel({
config: {
presets: [
require('babel-preset-es2015'),
require('babel-preset-stage-0')
],
plugins: [
require('babel-plugin-transform-es3-member-expression-literals'),
require('babel-plugin-transform-es3-property-literals'),
[require('babel-plugin-transform-react-jsx'), {
pragma: jsxPragma
}]
]
},
fileTest: useBabel.test || /\.js/,
exclude: useBabel.exclude || []
})))
.pipe(athenaMate.replace({
cwd: appPath,
module: moduleConf.module,
serve: true
}))
.pipe(vfs.dest(path.join(appPath, '.temp', appConf.app, moduleConf.module, 'js')))
.on('end', function () {
resolve();
})
.on('error', function (err) {
reject(err);
});
}
});
});
};
};<|fim▁end|> | athenaMate.concat({ |
<|file_name|>0356_add_webautn_auth_type.py<|end_file_name|><|fim▁begin|>"""
<|fim▁hole|>"""
from alembic import op
revision = '0356_add_webautn_auth_type'
down_revision = '0355_add_webauthn_table'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("INSERT INTO auth_type VALUES ('webauthn_auth')")
op.drop_constraint('ck_users_mobile_or_email_auth', 'users', type_=None, schema=None)
op.execute("""
ALTER TABLE users ADD CONSTRAINT "ck_user_has_mobile_or_other_auth"
CHECK (auth_type in ('email_auth', 'webauthn_auth') or mobile_number is not null)
NOT VALID
""")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("UPDATE users SET auth_type = 'sms_auth' WHERE auth_type = 'webauthn_auth'")
op.execute("UPDATE invited_users SET auth_type = 'sms_auth' WHERE auth_type = 'webauthn_auth'")
op.drop_constraint('ck_user_has_mobile_or_other_auth', 'users', type_=None, schema=None)
op.execute("""
ALTER TABLE users ADD CONSTRAINT "ck_users_mobile_or_email_auth"
CHECK (auth_type = 'email_auth' or mobile_number is not null)
NOT VALID
""")
op.execute("DELETE FROM auth_type WHERE name = 'webauthn_auth'")
# ### end Alembic commands ###<|fim▁end|> | Revision ID: 0356_add_webautn_auth_type
Revises: 0355_add_webauthn_table
Create Date: 2021-05-13 12:42:45.190269
|
<|file_name|>atari5200.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os, struct, array
from fcntl import ioctl
SDL_JOY_0_SELECT = 8
SDL_JOY_0_START = 9
SDL_JOY_0_TRIGGER1 = 0
SDL_JOY_0_TRIGGER2 = 1
SDL_JOY_0_ASTERISK = 2
SDL_JOY_0_HASH = 3
SDL_JOY_0_SECOND_AXIS = 2
# Iterate over the joystick devices.
# print('Available devices:')
devices = sorted(os.listdir('/dev/input'))
joysticks = []
for fn in devices:
if fn.startswith('js'):
# print(' /dev/input/%s' % fn)
joysticks.append("/dev/input/%s" % fn)
joysticks = sorted(joysticks)
print "First joystick is %s" % joysticks[0]
# Open the joystick device.
fn = joysticks[0]
# print('Opening %s...' % fn)
jsdev = open(fn, 'rb')
buf = array.array('c', ['\0'] * 64)
ioctl(jsdev, 0x80006a13 + (0x10000 * len(buf)), buf) # JSIOCGNAME(len)
js_name = ("%s" % buf.tostring()).partition(b'\0')[0]
# print('Device name: %s' % js_name)
jsdev.close()
js_cfg = "/opt/retropie/configs/all/retroarch-joypads/%s.cfg" % js_name.replace(" ", "")
print "Getting Retroarch configuration for %s" % js_cfg
# print(js_cfg)
f = open("%s" % js_cfg, "r")
content = f.read()
lines = content.split("\n")
for line in lines:
if line:
p = line.replace(" ", "").split("=")
# print "Processing %s" % p[0]
if p[0] == "input_select_btn":
SDL_JOY_0_SELECT = p[1].replace('"', '')
elif p[0] == "input_start_btn":
SDL_JOY_0_START = p[1].replace('"', '')
elif p[0] == "input_a_btn":
SDL_JOY_0_TRIGGER1 = p[1].replace('"', '')
elif p[0] == "input_b_btn":
SDL_JOY_0_TRIGGER2 = p[1].replace('"', '')
elif p[0] == "input_x_btn":
SDL_JOY_0_ASTERISK = p[1].replace('"', '')
elif p[0] == "input_y_btn":
SDL_JOY_0_HASH = p[1].replace('"', '')
elif p[0] == "input_r_x_minus_axis":
SDL_JOY_0_SECOND_AXIS = p[1].replace('"', '').replace("-", "")
f.close()
atari800_cfg = "/home/pi/.atari800.cfg"
print "Updating configuration in %s with" % atari800_cfg
print "SDL_JOY_0_SELECT=%s" % SDL_JOY_0_SELECT
print "SDL_JOY_0_START=%s" % SDL_JOY_0_START
print "SDL_JOY_0_TRIGGER1=%s" % SDL_JOY_0_TRIGGER1
print "SDL_JOY_0_TRIGGER2=%s" % SDL_JOY_0_TRIGGER2
print "SDL_JOY_0_ASTERISK=%s" % SDL_JOY_0_ASTERISK
print "SDL_JOY_0_HASH=%s" % SDL_JOY_0_HASH
print "SDL_JOY_0_SECOND_AXIS=%s" % SDL_JOY_0_SECOND_AXIS
f = open("%s" % atari800_cfg, "r")
content = f.read()
f.close()
new_data = ""
lines = content.split("\n")
for line in lines:
if line.startswith("SDL_JOY_0_SELECT"):
line = "SDL_JOY_0_SELECT=%s" % SDL_JOY_0_SELECT
elif line.startswith("SDL_JOY_0_START"):
line = "SDL_JOY_0_START=%s" % SDL_JOY_0_START
elif line.startswith("SDL_JOY_0_TRIGGER1"):
line = "SDL_JOY_0_TRIGGER1=%s" % SDL_JOY_0_TRIGGER1
elif line.startswith("SDL_JOY_0_TRIGGER2"):
line = "SDL_JOY_0_TRIGGER2=%s" % SDL_JOY_0_TRIGGER2
elif line.startswith("SDL_JOY_0_ASTERISK"):
line = "SDL_JOY_0_ASTERISK=%s" % SDL_JOY_0_ASTERISK<|fim▁hole|> line = "SDL_JOY_0_SECOND_AXIS=%s" % SDL_JOY_0_SECOND_AXIS
new_data += line + "\n"
# print new_data
f = open("%s" % atari800_cfg, 'w')
f.write(new_data)
f.close()<|fim▁end|> | elif line.startswith("SDL_JOY_0_HASH"):
line = "SDL_JOY_0_HASH=%s" % SDL_JOY_0_HASH
elif line.startswith("SDL_JOY_0_SECOND_AXIS"): |
<|file_name|>ixbx.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
lantz.drivers.legacy.olympus.ixbx
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When talking about the z-axis of a microscope, use "near" and "far" instead of "up" and "down." "Nearer" always means the objective ends closer to the sample; "farther" means the objective ends farther away. On an inverted microscope, "near" is up and "far" is down; on an upright microscope it is exactly the reverse. Better to use "near" and "far" to avoid confusion.
You can always get the current state of the system by sending the command you would use to change that state followed by ?. For example, to get the current objective position, send 1OB?. The microscope returns 1OB 3, say, if the current objective is position 3 on the nosepiece.
The microscope only understands positive integers, no negative numbers, no floating point. All distances are sent as positive integers measured in hundredths of a micron. All voltages are sent as tenths of a volt. Where negative numbers are needed, such as to specify relative motion, an extra argument is used to tell the microscope the sign of the number.
Sources::
- Olympus IX-81 Chassis Commands `link <http://madhadron.com/?p=89>`_
- Labview IX BX Series Driver `link <http://sine.ni.com/apps/utf8/niid_web_display.download_page?p_id_guid=0472CB8CEE4473B8E0440003BA7CCD71>`_
- Lantz reverse engineering
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from lantz import Feat, Action, Q_
from lantz.errors import InstrumentError
from lantz.drivers.legacy.serial import SerialDriver
# Physical units used by the IX/BX microscopes
DECIVOLT = Q_(0.1, 'V')
ZSTEP = Q_(0.01, 'micrometer')
# Booleans mappings used by the IX/BX microscopes
ON_OFF = {True: 'ON', False: 'OFF'}
IN_OUT = {True: 'IN', False: 'OUT'}
CLOSED_OPEN = {True: 'IN', False: 'OUT'}
ONE_ZERO = {True: '1', False: '0'}
ONE_TWO = {True: '1', False: '2'}
FH_FRM = {True: 'FH', False: 'FRM'}
EPI_DIA = {True: 'EPI', False: 'DIA'}
INTSTR = (int, str)
def ofeat(command, doc, **kwargs):
"""Build Feat<|fim▁hole|>
:param command: command root (without ?)
:param doc: docstring to be applied to the feature
"""
def _get(self):
response = self.query(command + '?')
return response
def _set(self, value):
self.query('{} {}'.format(command, value))
return Feat(_get, _set, doc=doc, **kwargs)
class IXBX(SerialDriver):
""" IX or BX Olympus microscope body.
"""
RECV_TERMINATION = '\r\n'
SEND_TERMINATION = '\r\n'
def __init__(self, port=1, baudrate=19200, bytesize=8, parity='Even',
stopbits=1, flow=0, timeout=None, write_timeout=None):
super().__init__(port, timeout=timeout, write_timeout=write_timeout,
baudrate=baudrate, bytesize=bytesize, parity=parity,
stopbits=stopbits, flow=flow)
self.send('1LOG IN\n')
self.send('2LOG IN')
def query(self, command, *, send_args=(None, None), recv_args=(None, None)):
"""Query the instrument and parse the response.
:raises: InstrumentError
"""
response = super().query(command, send_args=recv_args, recv_args=recv_args)
command = command.strip()[0]
if response in ('1x', '2x'):
raise InstrumentError("Unknown command: '{}'".format(command))
if not response.startswith(command):
raise InstrumentError("Unknown response: '{}'".format(response))
if response == 'X' or 'x':
raise InstrumentError('Unable to set')
elif not response == '+':
raise InstrumentError("Unknown response: '{}'".format(response))
return response
@Feat(read_once=True)
def idn(self):
"""Microscope identification
"""
return parse_response(self.query('1UNIT?'))
fluo_shutter = ofeat('1LED',
'External shutter for the fluorescent light source',
values=ONE_ZERO)
lamp_epi_enabled = ofeat('1LMPSEL',
'Illumination source lamp.',
values=EPI_DIA)
lamp_enabled = ofeat('1LMPSW',
'Turn the currently selected lamp onf and off',
values=ON_OFF)
lamp_intensity = ofeat('1LMP',
'Transmitted light intensity',
procs=(INTSTR, ))
def lamp_status(self):
#LMPSTS OK, X
pass
objective = ofeat('1OB',
'Objective nosepiece position',
procs=(INTSTR, ))
body_locked = ofeat('1LOG',
'Turn the currently selected lamp on and off',
values=ON_OFF)
focus_locked = ofeat('2LOG',
'Turn the currently selected lamp on and off',
values=ON_OFF)
@Feat(units=(ZSTEP, ZSTEP))
def soft_limits(self):
near = self.query('2NEARLMT?')
far = self.query('2FARLMT?')
return near, far
@soft_limits.setter
def soft_limits(self, near, far):
self.query('2NEARLMT {:d}'.format(near))
self.query('2FARLMT {:d}'.format(far))
move_to_start_enabled = ofeat('INITRET',
'Sets / cancels returning operation to the start '
'position after initializing the origin.',
values=ON_OFF)
jog_enabled = ofeat('JOG', 'Jog enabled', values=ON_OFF)
jog_sensitivity = ofeat('JOGSNS',' Jog sensitivity', procs=(INTSTR, ))
jog_dial = ofeat('JOGSEL', 'Jog selection (Handle/BLA) ???', values=FH_FRM)
jog_limit_enabled = ofeat('joglmt', 'Jog limit enabled', values=ON_OFF)
@Feat()
def movement_status(self):
return self.query('ZDRV?')
@Action(units=ZSTEP)
def move_relative(self, distance):
if distance == 0:
return
elif distance < 0:
distance = -distance
direction = 'N'
else:
direction = 'F'
self.query('2MOV {:s} {:d}'.format(distance, direction))
@Feat(units=ZSTEP)
def z(self):
"""Position of the objective.
"""
# OPTIMAL?? start accel, speed tenth of microns/s, end accel
return int(self.query('2POS'))
@z.setter
def z(self, value):
# OPTIMAL?? start accel, speed tenth of microns/s, end accel
self.query('2MOV D {:d}'.format(value))
def stop(self):
"""Stop any currently executing motion
"""
# Stop any currently executing motion. Always responds with 2STOP +.
# If there is a 2MOV command in progress,
# it also aborts and returns an error condition with 2MOV !,E02133.
self.query('2STOP')
def init_origin(self):
"""Init origin
"""
#INITORG
pass
class IX2(IXBX):
""" Olympus IX2 Body
"""
bottom_port_closed = ofeat('1BPORT', 'Bottom port', values=CLOSED_OPEN)
shutter1_closed = ofeat('SHUT1', 'Shutter', values=IN_OUT)
shutter2_closed = ofeat('SHUT2', 'Shutter', values=IN_OUT)
filter_wheel = ofeat('FW', 'Filter wheel position', procs=(INTSTR, ))
condensor = ofeat('CD', 'Condensor position', procs=(INTSTR, ))
mirror_unit = ofeat('MU', 'Mirror unit position', procs=(INTSTR, ))
camera_port_enabled= ofeat('PRISM', 'Prism position', values=ONE_TWO)
class BX2A(IXBX):
""" Olympus BX2A Body
"""
shutter_closed = ofeat('SHUTTER', 'Shutter RFAA', values=IN_OUT)
aperture_stop_diameter = ofeat('EAS', 'Aperture stop diameter (EPI AS RLAA)', procs=(INTSTR, ))
aperture_stop_diameter = ofeat('DAS', 'Aperture stop diameter (DIA AS UCD)', procs=(INTSTR, ))
condenser_top_lens_enabled = ofeat('CDTOP', 'Condenser top lens (UCD)', values=IN_OUT)
turret = ofeat('TURRET', 'Turret position (UCD)', procs=(INTSTR, ))
cube = ofeat('CUBE', 'Cube position (RFAA/RLAA)', procs=(INTSTR, ))
configure_filterwheel = ofeat('FW', 'Configure filterwheel', procs=(INTSTR, ))<|fim▁end|> | |
<|file_name|>login.rs<|end_file_name|><|fim▁begin|>use command_prelude::*;
use std::io::{self, BufRead};
use cargo::core::{Source, SourceId};
use cargo::sources::RegistrySource;
use cargo::util::{CargoError, CargoResultExt};
use cargo::ops;
pub fn cli() -> App {
subcommand("login")
.about(
"Save an api token from the registry locally. \
If token is not specified, it will be read from stdin.",
)
.arg(Arg::with_name("token"))
.arg(opt("host", "Host to set the token for").value_name("HOST"))
.arg(opt("registry", "Registry to use").value_name("REGISTRY"))
}
pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult {<|fim▁hole|> let token = match args.value_of("token") {
Some(token) => token.to_string(),
None => {
let host = match registry {
Some(ref _registry) => {
return Err(format_err!(
"token must be provided when \
--registry is provided."
).into());
}
None => {
let src = SourceId::crates_io(config)?;
let mut src = RegistrySource::remote(&src, config);
src.update()?;
let config = src.config()?.unwrap();
args.value_of("host")
.map(|s| s.to_string())
.unwrap_or(config.api.unwrap())
}
};
println!("please visit {}me and paste the API Token below", host);
let mut line = String::new();
let input = io::stdin();
input
.lock()
.read_line(&mut line)
.chain_err(|| "failed to read stdin")
.map_err(CargoError::from)?;
line.trim().to_string()
}
};
ops::registry_login(config, token, registry)?;
Ok(())
}<|fim▁end|> | let registry = args.registry(config)?;
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
const signup = require('./signup');
const handler = require('feathers-errors/handler');
const notFound = require('./not-found-handler');
const logger = require('./logger');
module.exports = function() {
// Add your custom middleware here. Remember, that
// just like Express the order matters, so error
// handling middleware should go last.
const app = this;
app.post('/signup', signup(app));
app.use(notFound());
app.use(logger(app));<|fim▁hole|>};<|fim▁end|> | app.use(handler()); |
<|file_name|>derive_getable.rs<|end_file_name|><|fim▁begin|>extern crate env_logger;
#[macro_use]
extern crate gluon_codegen;
extern crate gluon;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate gluon_vm;
mod init;
use gluon::{
import,
vm::{
self,
api::{
self,
generic::{self, L, R},
OpaqueValue,
},
ExternModule,
},
RootedThread, Thread, ThreadExt,
};
use init::new_vm;
#[derive(Getable, VmType, Debug, Serialize, Deserialize)]
#[gluon(vm_type = "types.TupleEnum")]
enum TupleEnum {
Variant,
OtherVariant,
One(u32),
LotsOfTupleThings(i32, String, f64),
}
fn load_tuple_enum_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
tuple_enum_to_str => primitive!(1, tuple_enum_to_str),
};
ExternModule::new(vm, module)
}
fn tuple_enum_to_str(val: TupleEnum) -> String {
format!("{:?}", val)
}
#[test]
fn enum_tuple_variants() {
let vm = new_vm();
let src = api::typ::make_source::<TupleEnum>(&vm).unwrap();
vm.load_script("types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_tuple_enum_mod);
let script = r#"
let { TupleEnum } = import! types
let { tuple_enum_to_str } = import! functions
let { assert } = import! std.test
let _ = assert (tuple_enum_to_str Variant == "Variant")
let _ = assert (tuple_enum_to_str OtherVariant == "OtherVariant")
let _ = assert (tuple_enum_to_str (One 1) == "One(1)")
assert (tuple_enum_to_str (LotsOfTupleThings 42 "Text" 0.0) == "LotsOfTupleThings(42, \"Text\", 0.0)")
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}
#[derive(Getable, VmType, Debug, Serialize, Deserialize)]
#[gluon(vm_type = "types.StructEnum")]
enum StructEnum {
OneField { field: i32 },
TwoFields { name: String, val: f64 },
}
fn load_struct_enum_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
struct_enum_to_str => primitive!(1, struct_enum_to_str),
};
ExternModule::new(vm, module)
}
fn struct_enum_to_str(val: StructEnum) -> String {
format!("{:?}", val)
}
#[test]
fn enum_struct_variants() {
let vm = new_vm();
let src = api::typ::make_source::<StructEnum>(&vm).unwrap();
println!("Types:\n{}", src);
vm.load_script("types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_struct_enum_mod);
let script = r#"
let { StructEnum } = import! types
let { struct_enum_to_str } = import! functions
let { assert } = import! std.test
let _ = assert (struct_enum_to_str (OneField { field = 1337 }) == "OneField { field: 1337 }")
assert (struct_enum_to_str (TwoFields { name = "Pi", val = 3.14 }) == "TwoFields { name: \"Pi\", val: 3.14 }")
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}
#[derive(Getable, VmType)]
enum Either<L, R> {
Left(L),
Right(R),
}
fn load_either_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
type Either l r => Either<L, R>,
left => primitive!(1, left),
extract_str => primitive!(1, extract_str),
};
ExternModule::new(vm, module)
}
type GenericL = OpaqueValue<RootedThread, generic::L>;
type GenericR = OpaqueValue<RootedThread, generic::R>;
fn left(either: Either<GenericL, GenericR>) -> Option<GenericL> {
match either {
Either::Left(left) => Some(left),
_ => None,
}
}<|fim▁hole|> Either::Left(string) => string,
Either::Right(string) => string,
}
}
#[test]
fn enum_generic_variants() {
let _ = env_logger::try_init();
let vm = new_vm();
import::add_extern_module(&vm, "functions", load_either_mod);
let script = r#"
let { Either, left, extract_str } = import! functions
let { assert } = import! std.test
let l: Either Int Float = Left 42
let _ = assert (left l == Some 42)
let r: Either Int Float = Right 0.0
let _ = assert (left r == None)
let _ = assert (extract_str (Left "left") == "left")
assert (extract_str (Right "right") == "right")
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}
#[derive(Getable, Pushable, VmType)]
enum Enum {
TestVariant,
TestVariant2(i32),
}
#[test]
fn derive_generates_same_type_as_gluon_define() {
let _ = env_logger::try_init();
let vm = new_vm();
vm.get_database_mut().implicit_prelude(false);
import::add_extern_module(&vm, "test", |vm| {
ExternModule::new(vm, primitive!(1, "test", |_: Enum| ()))
});
let script = r#"
let test = import! test
type Enum = | TestVariant | TestVariant2 Int
let _ = test TestVariant
test (TestVariant2 123)
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}
#[derive(Getable)]
struct LifetimeStruct<'a> {
_str: &'a str,
}
// TODO: impl tests for lifetimes, this requires
// a safe interface for Getable::from_value()
#[derive(Getable, VmType, Debug, Serialize, Deserialize)]
#[gluon(vm_type = "types.Struct")]
struct Struct {
string: String,
int: i32,
tuple: (f64, f64),
}
fn load_struct_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
struct_to_str => primitive!(1, struct_to_str),
};
ExternModule::new(vm, module)
}
fn struct_to_str(val: Struct) -> String {
format!("{:?}", val)
}
#[test]
fn struct_derive() {
let vm = new_vm();
let src = api::typ::make_source::<Struct>(&vm).unwrap();
vm.load_script("types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_struct_mod);
let script = r#"
let { Struct } = import! types
let { struct_to_str } = import! functions
let { assert } = import! std.test
assert (struct_to_str { string = "test", int = 55, tuple = (0.0, 1.0) } == "Struct { string: \"test\", int: 55, tuple: (0.0, 1.0) }")
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}
#[derive(Serialize, Deserialize, Debug, VmType, Getable)]
#[gluon(vm_type = "types.TupleStruct")]
struct TupleStruct(i32, i32);
fn load_tuple_struct_mod(vm: &Thread) -> vm::Result<ExternModule> {
let module = record! {
tuple_struct_to_str => primitive!(1, tuple_struct_to_str),
};
ExternModule::new(vm, module)
}
fn tuple_struct_to_str(val: TupleStruct) -> String {
format!("{:?}", val)
}
#[test]
fn tuple_struct_derive() {
let vm = new_vm();
let src = r#"
type TupleStruct = (Int, Int)
{ TupleStruct }
"#;
vm.load_script("types", &src).unwrap();
import::add_extern_module(&vm, "functions", load_tuple_struct_mod);
let script = r#"
let { TupleStruct } = import! types
let { tuple_struct_to_str } = import! functions
let { assert } = import! std.test
assert (tuple_struct_to_str (1, 2) == "TupleStruct(1, 2)")
"#;
if let Err(why) = vm.run_expr::<()>("test", script) {
panic!("{}", why);
}
}<|fim▁end|> |
fn extract_str(either: Either<String, String>) -> String {
match either { |
<|file_name|>disk.rs<|end_file_name|><|fim▁begin|>use peripheral_card::PeripheralCard;
use std::io::Read;
/* Disk has 35 concentric tracks.
* Outer = $00, inner = $22
*
* 16 sectors per track.
* $0 to $F
*
* 256 bytes can be stored in each sector.
* $00 to $100
*/
/* The rom for the Disk2.
* It will be "copied" into
* the Apple II's memory.
* Taken from Apple Win.
*/
static DISK2_ROM: [u8; 0x100] =
[0xA2, 0x20, 0xA0, 0x00, 0xA2, 0x03, 0x86, 0x3C, 0x8A, 0x0A, 0x24, 0x3C, 0xF0, 0x10, 0x05,
0x3C, 0x49, 0xFF, 0x29, 0x7E, 0xB0, 0x08, 0x4A, 0xD0, 0xFB, 0x98, 0x9D, 0x56, 0x03, 0xC8,
0xE8, 0x10, 0xE5, 0x20, 0x58, 0xFF, 0xBA, 0xBD, 0x00, 0x01, 0x0A, 0x0A, 0x0A, 0x0A, 0x85,
0x2B, 0xAA, 0xBD, 0x8E, 0xC0, 0xBD, 0x8C, 0xC0, 0xBD, 0x8A, 0xC0, 0xBD, 0x89, 0xC0, 0xA0,
0x50, 0xBD, 0x80, 0xC0, 0x98, 0x29, 0x03, 0x0A, 0x05, 0x2B, 0xAA, 0xBD, 0x81, 0xC0, 0xA9,
0x56, 0x20, 0xA8, 0xFC, 0x88, 0x10, 0xEB, 0x85, 0x26, 0x85, 0x3D, 0x85, 0x41, 0xA9, 0x08,
0x85, 0x27, 0x18, 0x08, 0xBD, 0x8C, 0xC0, 0x10, 0xFB, 0x49, 0xD5, 0xD0, 0xF7, 0xBD, 0x8C,
0xC0, 0x10, 0xFB, 0xC9, 0xAA, 0xD0, 0xF3, 0xEA, 0xBD, 0x8C, 0xC0, 0x10, 0xFB, 0xC9, 0x96,
0xF0, 0x09, 0x28, 0x90, 0xDF, 0x49, 0xAD, 0xF0, 0x25, 0xD0, 0xD9, 0xA0, 0x03, 0x85, 0x40,
0xBD, 0x8C, 0xC0, 0x10, 0xFB, 0x2A, 0x85, 0x3C, 0xBD, 0x8C, 0xC0, 0x10, 0xFB, 0x25, 0x3C,
0x88, 0xD0, 0xEC, 0x28, 0xC5, 0x3D, 0xD0, 0xBE, 0xA5, 0x40, 0xC5, 0x41, 0xD0, 0xB8, 0xB0,
0xB7, 0xA0, 0x56, 0x84, 0x3C, 0xBC, 0x8C, 0xC0, 0x10, 0xFB, 0x59, 0xD6, 0x02, 0xA4, 0x3C,
0x88, 0x99, 0x00, 0x03, 0xD0, 0xEE, 0x84, 0x3C, 0xBC, 0x8C, 0xC0, 0x10, 0xFB, 0x59, 0xD6,
0x02, 0xA4, 0x3C, 0x91, 0x26, 0xC8, 0xD0, 0xEF, 0xBC, 0x8C, 0xC0, 0x10, 0xFB, 0x59, 0xD6,
0x02, 0xD0, 0x87, 0xA0, 0x00, 0xA2, 0x56, 0xCA, 0x30, 0xFB, 0xB1, 0x26, 0x5E, 0x00, 0x03,
0x2A, 0x5E, 0x00, 0x03, 0x2A, 0x91, 0x26, 0xC8, 0xD0, 0xEE, 0xE6, 0x27, 0xE6, 0x3D, 0xA5,
0x3D, 0xCD, 0x00, 0x08, 0xA6, 0x2B, 0x90, 0xDB, 0x4C, 0x01, 0x08, 0x00, 0x00, 0x00, 0x00,
0x00];
/* Helps with the bit fiddling necessary to extract the bottom
* two bits during the 256 - 342 byte nibblize.
*/
static TAB1: [u8; 64] = [0x00, 0x08, 0x04, 0x0C, 0x20, 0x28, 0x24, 0x2C, 0x10, 0x18, 0x14, 0x1C,
0x30, 0x38, 0x34, 0x3C, 0x80, 0x88, 0x84, 0x8C, 0xA0, 0xA8, 0xA4, 0xAC,
0x90, 0x98, 0x94, 0x9C, 0xB0, 0xB8, 0xB4, 0xBC, 0x40, 0x48, 0x44, 0x4C,
0x60, 0x68, 0x64, 0x6C, 0x50, 0x58, 0x54, 0x5C, 0x70, 0x78, 0x74, 0x7C,
0xC0, 0xC8, 0xC4, 0xCC, 0xE0, 0xE8, 0xE4, 0xEC, 0xD0, 0xD8, 0xD4, 0xDC,
0xF0, 0xF8, 0xF4, 0xFC];
/* Translates to "disk bytes"
*/
static TAB2: [u8; 64] = [0x96, 0x97, 0x9A, 0x9B, 0x9D, 0x9E, 0x9F, 0xA6, 0xA7, 0xAB, 0xAC, 0xAD,
0xAE, 0xAF, 0xB2, 0xB3, 0xB4, 0xB5, 0xB6, 0xB7, 0xB9, 0xBA, 0xBB, 0xBC,
0xBD, 0xBE, 0xBF, 0xCB, 0xCD, 0xCE, 0xCF, 0xD3, 0xD6, 0xD7, 0xD9, 0xDA,
0xDB, 0xDC, 0xDD, 0xDE, 0xDF, 0xE5, 0xE6, 0xE7, 0xE9, 0xEA, 0xEB, 0xEC,
0xED, 0xEE, 0xEF, 0xF2, 0xF3, 0xF4, 0xF5, 0xF6, 0xF7, 0xF9, 0xFA, 0xFB,
0xFC, 0xFD, 0xFE, 0xFF];
/* Dos 3.3 to physical sector conversion
*/
static PHYS: [u8; 16] = [0x00, 0x0D, 0x0B, 0x09, 0x07, 0x05, 0x03, 0x01, 0x0E, 0x0C, 0x0A, 0x08,
0x06, 0x04, 0x02, 0x0F];
pub struct Drive {
sectors: Option<Box<[[[u8; 0x200]; 16]; 70]>>,
track: usize,
sector: usize,
idx: usize,
/* holds bitmap of magnets enabled */
magnets: u32,
/* holds current magnet phase */
phase: u32,
}
impl Drive {
pub fn new() -> Drive
{
Drive {
sectors: None,
track: 0,
sector: 15,
idx: 0,
magnets: 0,
phase: 0,
}
}
pub fn add_disk<R>(&mut self, mut disk: R)
where R: Read
{
let mut data = [[[0; 0x200]; 16]; 70];
for (track_num, track) in data.iter_mut().enumerate() {<|fim▁hole|> for (sector_num, sector) in track.iter_mut().enumerate() {
let mut idx = 0;
let phys_sector = PHYS[sector_num];
for _ in 0..16 {
sector[idx] = 0xFF;
idx += 1;
}
/* address header */
sector[idx] = 0xD5;
idx += 1;
sector[idx] = 0xAA;
idx += 1;
sector[idx] = 0x96;
idx += 1;
/* disk volume = 254 */
sector[idx] = 0xFF;
idx += 1;
sector[idx] = 0xFE;
idx += 1;
sector[idx] = Drive::nib_odd(track_num as u8);
idx += 1;
sector[idx] = Drive::nib_even(track_num as u8);
idx += 1;
sector[idx] = Drive::nib_odd(phys_sector);
idx += 1;
sector[idx] = Drive::nib_even(phys_sector);
idx += 1;
let checksum = 254 ^ track_num ^ phys_sector as usize;
sector[idx] = Drive::nib_odd(checksum as u8);
idx += 1;
sector[idx] = Drive::nib_even(checksum as u8);
idx += 1;
/* address trailer */
sector[idx] = 0xDE;
idx += 1;
sector[idx] = 0xAA;
idx += 1;
sector[idx] = 0xEB;
idx += 1;
for _ in 0..8 {
sector[idx] = 0xFF;
idx += 1;
}
/* data header */
sector[idx] = 0xD5;
idx += 1;
sector[idx] = 0xAA;
idx += 1;
sector[idx] = 0xAD;
idx += 1;
/* encode data */
let mut buf = [0u8; 344];
/* ignore if it doesn't read the entire length */
disk.read(&mut buf[0x56..0x56 + 0x100]).unwrap();
for off in 0..0x56 {
let i = (buf[off + 0x56] & 3) | (buf[off + 0x56 + 0x56] & 3) << 2 |
(buf[off + 0x56 + 0x56 + 0x56] & 3) << 4;
buf[off] = TAB1[i as usize];
}
sector[idx] = buf[0];
for off in 1..343 {
sector[idx + off] = buf[off - 1] ^ buf[off];
}
for off in 0..343 {
sector[idx + off] = TAB2[(sector[idx + off] >> 2) as usize];
}
idx += 343;
/* data trailer */
sector[idx] = 0xDE;
idx += 1;
sector[idx] = 0xAA;
idx += 1;
sector[idx] = 0xEB;
}
}
self.sectors = Some(Box::new(data));
}
fn step_motor(&mut self, magnet: u16, enable: bool) {
/* magnet is range 0-3 inclusive */
if enable {
self.magnets |= 1 << magnet as u32;
} else {
self.magnets &= !(1 << magnet as u32);
}
if self.magnets & (1 << ((self.phase + 1) % 4)) != 0 && self.phase < 140 {
self.phase += 1;
}
if self.magnets & (1 << ((self.phase + 3) % 4)) != 0 && self.phase > 0 {
self.phase -= 1;
}
if self.track != ((self.phase + 1) / 2) as usize {
info!("track {}", (self.phase + 1) / 2);
}
self.track = ((self.phase + 1) / 2) as usize;
}
fn read(&mut self) -> u8 {
match self.sectors {
Some(ref data) => {
let mut ret = data[self.track][self.sector][self.idx];
if ret == 0 {
self.sector += 15;
self.sector %= 16;
info!("sector {}", self.sector);
self.idx = 0;
ret = data[self.track][self.sector][self.idx];
}
self.idx += 1;
ret
}
None => 0xFF,
}
}
fn read_without_mm(&mut self) -> u8 {
match self.sectors {
Some(ref data) => {
let mut ret = data[self.track][self.sector][self.idx];
if ret == 0 {
self.sector += 15;
self.sector %= 16;
info!("sector {}", self.sector);
self.idx = 0;
ret = data[self.track][self.sector][self.idx];
}
ret
}
None => 0xFF,
}
}
fn nib_odd(byte: u8) -> u8 {
(byte >> 1) | 0xAA
}
fn nib_even(byte: u8) -> u8 {
byte | 0xAA
}
}
enum Mode {
Read,
Write,
}
pub struct DiskII {
drives: [Drive; 2],
// write_reg: u8,
drive_num: usize,
mode: Mode,
write_protect: bool,
}
impl DiskII {
pub fn new() -> DiskII
{
DiskII {
drives: [Drive::new(), Drive::new()],
// write_reg: 0,
drive_num: 0,
mode: Mode::Read,
write_protect: false,
}
}
pub fn set_first_disk<R>(&mut self, disk: R)
where R: Read
{
self.drives[0].add_disk(disk);
}
pub fn set_second_disk<R>(&mut self, disk: R)
where R: Read
{
self.drives[1].add_disk(disk);
}
fn current_drive(&mut self) -> &mut Drive {
&mut self.drives[self.drive_num]
}
}
impl PeripheralCard for DiskII {
fn read_switch(&mut self, switch: u16) -> u8 {
match switch {
/* phase switches */
0x00...0x07 => {
info!("Phase switch {}, enable {}", switch >> 1, (switch & 1) != 0);
self.current_drive()
.step_motor(switch >> 1, (switch & 1) != 0);
0
}
/* ignore motor stuff */
0x08...0x09 => {
info!("Motor {}", switch & 1 != 0);
0
}
0x0A => {
info!("drive 0");
self.drive_num = 0;
0
}
0x0B => {
info!("drive 1");
self.drive_num = 1;
0
}
0x0C => {
match self.mode {
Mode::Read => self.current_drive().read(),
Mode::Write => 0,
}
}
0x0D => {
info!("Writing to write reg");
0x00
}
0x0E => {
info!("Setting read mode");
self.mode = Mode::Read;
if self.write_protect { 0xFF } else { 0x00 }
}
0x0F => {
info!("Setting write mode");
self.mode = Mode::Write;
0x00
}
_ => 0,
}
}
fn read_switch_without_mm(&mut self, switch: u16) -> u8 {
match switch {
/* phase switches */
0x00...0x07 => 0,
/* ignore motor stuff */
0x08...0x09 => 0,
0x0A => 0,
0x0B => 0,
0x0C => {
match self.mode {
Mode::Read => self.current_drive().read_without_mm(),
Mode::Write => 0,
}
}
0x0D => 0x00,
0x0E => if self.write_protect { 0xFF } else { 0x00 },
0x0F => 0x00,
_ => 0,
}
}
fn read_rom(&mut self, addr: u16) -> u8 {
let rom_addr = (addr & 0xFF) as usize;
match rom_addr {
0x4C => 0xA9,
0x4D => 0x00,
0x4E => 0xEA,
_ => DISK2_ROM[rom_addr],
}
}
fn read_expansion_rom(&mut self, _addr: u16) -> u8 {
0
}
}<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>import django
import time
from uuid import uuid1
from datetime import timedelta
from threading import Thread
from django.template import Template
from django.test import TestCase, TransactionTestCase
from django.contrib.auth.models import User, Group
from django.utils import timezone
from django.core import management, mail
from django.core.mail import send_mail
from django.conf import settings
from django.db.models.signals import post_save
from alert.utils import BaseAlert, ALERT_TYPES, BaseAlertBackend, ALERT_BACKENDS,\
super_accepter, unsubscribe_user
from alert.exceptions import AlertIDAlreadyInUse, AlertBackendIDAlreadyInUse, CouldNotSendError
from alert.models import Alert, AlertPreference, AdminAlert
from alert.forms import AlertPreferenceForm, UnsubscribeForm
from alert.admin import AdminAlertAdmin
class SubclassTestingAlert(BaseAlert):
"""
This will never send any alerts - it's just a check to make sure that
subclassing alerts doesn't explode
"""
title = 'Welcome new users'
description = 'When a new user signs up, send them a welcome email'
signal = post_save
sender = User
default = True
def before(self, **kwargs):
return False
def get_applicable_users(self, instance, **kwargs):
return [instance]
class WelcomeAlert(SubclassTestingAlert):
"""
everything is inherited from SubclassTestingAlert
only change is that alerts will actually be sent
"""
def before(self, created, **kwargs):
return created
class DummyBackend(BaseAlertBackend):
title = "Dummy"
def send(self, alert):
pass
class EpicFailBackend(BaseAlertBackend):
"""
Backend that fails to send on the first try for every alert
"""
id = "EpicFail"
title = "Epic Fail"
def send(self, alert):
if not alert.failed:
raise CouldNotSendError
class SlowBackend(BaseAlertBackend):
"""
Backend that takes a full second to send an alert
"""
title = "Slow backend"
def send(self, alert):
time.sleep(1)
send_mail("asdf", 'woot', '[email protected]', ['[email protected]'])
#################################################
### Tests ###
#################################################
class AlertTests(TestCase):
def setUp(self):
pass
def test_alert_creation(self):
username = str(uuid1().hex)[:16]
email = "%[email protected]" % username
user = User.objects.create(username=username, email=email)
alerts = Alert.objects.filter(user=user)
self.assertEqual(len(alerts), len(ALERT_BACKENDS))
for alert in alerts:
self.assertEqual(alert.alert_type, "WelcomeAlert")
if alert.backend == 'EmailBackend':
self.assertEqual(alert.title, "email subject")
self.assertEqual(alert.body, "email body")
else:
self.assertEqual(alert.title, "default title")
self.assertEqual(alert.body, "default body")
def test_alert_registration_only_happens_once(self):
self.assertTrue(isinstance(ALERT_TYPES["WelcomeAlert"], WelcomeAlert))
self.assertEquals(len(ALERT_TYPES), 3)
def define_again():
class WelcomeAlert(BaseAlert):
title = 'Welcome new users'
signal = post_save
self.assertRaises(AlertIDAlreadyInUse, define_again)
def test_alert_id_is_key_in_ALERT_TYPES(self):
for key, alert in ALERT_TYPES.items():
self.assertEqual(key, alert.id)
class AlertBackendTests(TestCase):
def setUp(self):
username = str(uuid1().hex)[:16]
email = "%[email protected]" % username
self.user = User.objects.create(username=username, email=email)
def test_backend_creation(self):
self.assertTrue(isinstance(ALERT_BACKENDS["DummyBackend"], DummyBackend))
def test_backends_use_supplied_id(self):
self.assertTrue(isinstance(ALERT_BACKENDS["EpicFail"], EpicFailBackend))
def test_pending_manager(self):
self.assertEqual(Alert.pending.all().count(), len(ALERT_BACKENDS))
management.call_command("send_alerts")
self.assertEqual(Alert.pending.all().count(), 1)
def test_backend_registration_only_happens_once(self):
self.assertEquals(len(ALERT_BACKENDS), 4)
def define_again():
class DummyBackend(BaseAlertBackend):
title = 'dummy'
self.assertRaises(AlertBackendIDAlreadyInUse, define_again)
def test_backend_fails_to_send(self):
alert_that_should_fail = Alert.objects.filter(backend='EpicFail')[0]
before_send = timezone.now()
alert_that_should_fail.send()
after_send = timezone.now()
<|fim▁hole|> self.assertTrue(alert_that_should_fail.last_attempt is not None)
self.assertTrue(alert_that_should_fail.last_attempt > before_send)
self.assertTrue(alert_that_should_fail.last_attempt < after_send)
# and now retry
before_send = timezone.now()
alert_that_should_fail.send()
after_send = timezone.now()
self.assertFalse(alert_that_should_fail.failed)
self.assertTrue(alert_that_should_fail.is_sent)
self.assertTrue(alert_that_should_fail.last_attempt is not None)
self.assertTrue(alert_that_should_fail.last_attempt > before_send)
self.assertTrue(alert_that_should_fail.last_attempt < after_send)
class ConcurrencyTests(TransactionTestCase):
def setUp(self):
username = str(uuid1().hex)[:16]
email = "%[email protected]" % username
self.user = User.objects.create(username=username, email=email)
def testMultipleSimultaneousSendScripts(self):
# Sqlite uses an in-memory database, which does not work with the concurrency tests.
if "sqlite" in settings.DATABASES['default']['ENGINE']:
# Note that the alert django app will work fine with Sqlite. It's only the
# concurrency *tests* that do not work with sqlite.""")
return
self.assertEqual(len(mail.outbox), 0)
threads = [Thread(target=management.call_command, args=('send_alerts',)) for i in range(100)]
for t in threads:
t.start()
# space them out a little tiny bit
time.sleep(0.001)
[t.join() for t in threads]
self.assertEqual(len(mail.outbox), 2)
class EmailBackendTests(TestCase):
def setUp(self):
pass
class FormTests(TestCase):
def setUp(self):
self.user = User.objects.create(username='wootz', email='[email protected]')
def testNoArgs(self):
pref_form = self.assertRaises(TypeError, AlertPreferenceForm)
unsubscribe_form = self.assertRaises(TypeError, UnsubscribeForm)
def testSimpleCase(self):
pref_form = AlertPreferenceForm(user=self.user)
unsubscribe_form = UnsubscribeForm(user=self.user)
self.assertEqual(len(pref_form.fields), len(ALERT_TYPES) * len(ALERT_BACKENDS))
self.assertEqual(len(unsubscribe_form.fields), len(ALERT_TYPES) * len(ALERT_BACKENDS))
def testUnsubscribeFormHasNoVisibleFields(self):
from django.forms import HiddenInput
unsubscribe_form = UnsubscribeForm(user=self.user)
for field in unsubscribe_form.fields.values():
self.assertTrue(isinstance(field.widget, HiddenInput))
def testSuperAccepterNone(self):
types = super_accepter(None, ALERT_TYPES)
backends = super_accepter(None, ALERT_BACKENDS)
self.assertEqual(len(types), len(ALERT_TYPES))
self.assertEqual(len(backends), len(ALERT_BACKENDS))
def testSuperAccepterSingle(self):
backends_by_class = super_accepter(EpicFailBackend, ALERT_BACKENDS)
backends_by_id = super_accepter("EpicFail", ALERT_BACKENDS)
self.assertEqual(len(backends_by_class), 1)
self.assertEqual(len(backends_by_id), 1)
self.assertEqual(backends_by_class, backends_by_id)
def testSuperAccepterList(self):
backends_by_class = super_accepter([EpicFailBackend, DummyBackend], ALERT_BACKENDS)
backends_by_id = super_accepter(["EpicFail", "DummyBackend"], ALERT_BACKENDS)
backends_by_mixed = super_accepter(["EpicFail", DummyBackend], ALERT_BACKENDS)
self.assertEqual(len(backends_by_class), 2)
self.assertEqual(len(backends_by_id), 2)
self.assertEqual(len(backends_by_mixed), 2)
self.assertEqual(backends_by_class, backends_by_id)
self.assertEqual(backends_by_class, backends_by_mixed)
self.assertEqual(backends_by_mixed, backends_by_id)
def testSuperAccepterDuplicates(self):
backends = super_accepter([EpicFailBackend, DummyBackend, "EpicFail"], ALERT_BACKENDS)
self.assertEqual(len(backends), 2)
def testUnsubscribe(self):
details = {
"alert_type": WelcomeAlert.id,
"backend": EpicFailBackend.id,
"user": self.user,
}
AlertPreference.objects.create(preference=True, **details)
self.assertEqual(AlertPreference.objects.get(**details).preference, True)
unsubscribe_user(self.user, alerts=WelcomeAlert, backends=EpicFailBackend)
self.assertEqual(AlertPreference.objects.get(**details).preference, False)
class AdminAlertTests(TestCase):
def setUp(self):
group = Group.objects.create(name='test_group')
self.admin_alert = AdminAlert(
title="Hello users!",
body="woooord!",
recipients=group
)
def send_it(self):
AdminAlertAdmin.save_model(AdminAlertAdmin(AdminAlert, None), None, self.admin_alert, None, None)
def testDraftMode(self):
self.admin_alert.draft = True
self.send_it()
self.assertEqual(Alert.objects.count(), 0)
self.send_it()
self.assertEqual(Alert.objects.count(), User.objects.count())
def testScheduling(self):
send_at = timezone.now() + timedelta(days=1)
self.admin_alert.send_at = send_at
self.send_it()
for alert in Alert.objects.all():
self.assertEqual(alert.when, send_at)
def testOnlySendOnce(self):
self.assertFalse(self.admin_alert.sent)
self.send_it()
self.assertTrue(self.admin_alert.sent)
alert_count = Alert.objects.count()
self.send_it()
self.assertEqual(alert_count, Alert.objects.count())
# Email Templates aren't supported before django 1.8
if django.VERSION[:2] >= (1, 8):
from django.template import engines
from alert.utils import render_email_to_string
def get_template_contents(tmpl):
fs_loader = engines['django'].engine.template_loaders[0]
source, origin = fs_loader.load_template_source(tmpl)
return source
class EmailTemplateTests(TestCase):
def check_template(self, name, cx):
template_file = "{0}.email".format(name)
expected_txt = get_template_contents("{0}.expected.txt".format(name))
expected_html = get_template_contents("{0}.expected.html".format(name))
rendered_default = render_email_to_string(template_file, cx)
rendered_txt = render_email_to_string(template_file, cx, alert_type="txt")
rendered_html = render_email_to_string(template_file, cx, alert_type="html")
# Default shard ext is "txt"
self.assertEqual(rendered_default, rendered_txt)
self.assertEqual(rendered_txt, expected_txt)
self.assertEqual(rendered_html, expected_html)
def test_basic_use(self):
self.check_template("basic", {
"username": "Alex"
})<|fim▁end|> | self.assertTrue(alert_that_should_fail.failed)
self.assertFalse(alert_that_should_fail.is_sent) |
<|file_name|>gt.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }<|fim▁hole|> //
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
type T = i32;
type A = T;
type B = T;
#[test]
fn gt_test1() {
let array_a: [A; 3] = [
0, 1, 2
];
let array_b: [B; 3] = [
1, 2, 3
];
assert_eq!(array_a.ge(&array_b), false);
assert_eq!(array_a > array_b, false);
}
#[test]
fn gt_test2() {
let array_a: [A; 3] = [
0, 1, 2
];
let array_b: [B; 3] = [
0, 1, 2
];
assert_eq!(array_a.gt(&array_b), false);
assert_eq!(array_a > array_b, false);
}
#[test]
fn gt_test3() {
let array_a: [A; 3] = [
1, 2, 3
];
let array_b: [B; 3] = [
0, 1, 2
];
assert_eq!(array_a.gt(&array_b), true);
assert_eq!(array_a > array_b, true);
}
}<|fim▁end|> | // } |
<|file_name|>performance.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::PerformanceBinding;
use dom::bindings::codegen::Bindings::PerformanceBinding::PerformanceMethods;
use dom::bindings::js::{JS, Root};
use dom::bindings::num::Finite;
use dom::bindings::reflector::{Reflector, reflect_dom_object};
use dom::performancetiming::PerformanceTiming;
use dom::window::Window;
use time;
pub type DOMHighResTimeStamp = Finite<f64>;
#[dom_struct]
pub struct Performance {
reflector_: Reflector,
timing: JS<PerformanceTiming>,
}
impl Performance {
fn new_inherited(window: &Window,
navigation_start: u64,
navigation_start_precise: f64) -> Performance {
Performance {<|fim▁hole|> navigation_start_precise)),
}
}
pub fn new(window: &Window,
navigation_start: u64,
navigation_start_precise: f64) -> Root<Performance> {
reflect_dom_object(box Performance::new_inherited(window,
navigation_start,
navigation_start_precise),
window,
PerformanceBinding::Wrap)
}
}
impl PerformanceMethods for Performance {
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/NavigationTiming/Overview.html#performance-timing-attribute
fn Timing(&self) -> Root<PerformanceTiming> {
Root::from_ref(&*self.timing)
}
// https://dvcs.w3.org/hg/webperf/raw-file/tip/specs/HighResolutionTime/Overview.html#dom-performance-now
fn Now(&self) -> DOMHighResTimeStamp {
let nav_start = self.timing.navigation_start_precise();
let now = (time::precise_time_ns() as f64 - nav_start) / 1000000 as f64;
Finite::wrap(now)
}
}<|fim▁end|> | reflector_: Reflector::new(),
timing: JS::from_ref(&*PerformanceTiming::new(window,
navigation_start, |
<|file_name|>AbstractCDOIDByteArray.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
*/
package org.eclipse.emf.cdo.spi.common.id;
import java.io.IOException;
import java.util.Arrays;
import org.eclipse.emf.cdo.common.id.CDOIDUtil;
import org.eclipse.net4j.util.CheckUtil;
import org.eclipse.net4j.util.io.ExtendedDataInput;
import org.eclipse.net4j.util.io.ExtendedDataOutput;
<|fim▁hole|> * @since 4.1
* @noextend This interface is not intended to be extended by clients.
*/
public abstract class AbstractCDOIDByteArray extends AbstractCDOID
{
public static final String NULL_VALUE = null;
private static final long serialVersionUID = 1L;
private byte[] value;
public AbstractCDOIDByteArray()
{
}
public AbstractCDOIDByteArray(byte[] value)
{
CheckUtil.checkArg(value != null && value.length == 16, "Illegal UUID value");
this.value = value;
}
public byte[] getByteArrayValue()
{
return value;
}
public String toURIFragment()
{
return CDOIDUtil.encodeUUID(value);
}
@Override
public void read(String fragmentPart)
{
value = CDOIDUtil.decodeUUID(fragmentPart);
}
@Override
public void read(ExtendedDataInput in) throws IOException
{
value = in.readByteArray();
}
@Override
public void write(ExtendedDataOutput out) throws IOException
{
out.writeByteArray(value);
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
if (obj != null && obj.getClass() == getClass())
{
AbstractCDOIDByteArray that = (AbstractCDOIDByteArray)obj;
return Arrays.equals(value, that.value);
}
return false;
}
@Override
public int hashCode()
{
return getClass().hashCode() ^ Arrays.hashCode(value);
}
}<|fim▁end|> | /**
* @author Eike Stepper |
<|file_name|>component_len.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::unsigned_rational_sequence_gen;
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_demo!(runner, demo_rational_sequence_component_len);
register_bench!(runner, benchmark_rational_sequence_component_len);
}
fn demo_rational_sequence_component_len(gm: GenMode, config: GenConfig, limit: usize) {
for xs in unsigned_rational_sequence_gen::<u8>()
.get(gm, &config)
.take(limit)
{
println!("component_len({}) = {}", xs, xs.component_len());
}
}
fn benchmark_rational_sequence_component_len(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
"RationalSequence.component_len()",
BenchmarkType::Single,
unsigned_rational_sequence_gen::<u8>().get(gm, &config),
gm.name(),
limit,
file_name,
&rational_sequence_len_bucketer("xs"),
&mut [("Malachite", &mut |xs| no_out!(xs.component_len()))],
);
}<|fim▁end|> | use malachite_base_test_util::bench::bucketers::rational_sequence_len_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType}; |
<|file_name|>Socket.js<|end_file_name|><|fim▁begin|>/**
* Websocket Client
*/
(function() {
window.Socket = window.Socket || {}
var PacketTypes = {
Command : 0,
Status : 1
}
class Socket {
constructor(uri, game) {
uri = uri || ""
if(uri.length <= 0) {
let loc = window.location
uri = 'ws:'
if(loc.protocol === 'https:') {
uri = 'wss:'
}
uri += '//' + loc.host
uri += loc.pathname + 'ws'
}
this.game = game
this.ready = false
this.socket = new WebSocket(uri)
this.socket.binaryType = "arraybuffer"
this.socket.onopen = () => { this.onOpen() }
this.socket.onmessage = (ev) => { this.onMessage(ev) }
this.socket.onclose = () => { this.onClose() }
}
get closed() {
this.socket.readyState == WebSocket.CLOSED
}
close() {
this.socket.close()
}
onOpen() {
this.ready = true
}
onMessage(ev) {
let data = new Uint8Array(ev.data)
let packetObject = msgpack.unpack(data)
// If data is not correctly
if(packetObject == null || packetObject.Type == "undefined") {
return
}
switch(packetObject.Type) {
case PacketTypes.Command:
this.pushCommand(packetObject.Data)
break
case PacketTypes.Status:
this.handleStatus(packetObject.Data)
break
}
}
send(type, data) {
// Ignore send data if socket not open
if(this.socket.readyState != WebSocket.OPEN) {
return
}
let rawPacket = {
Type: type,
Data: data
}
// Should package as binary
let packet = new Uint8Array(msgpack.pack(rawPacket))
this.socket.send(packet)
}
execCommand(name, team, params) {
params = params || {}
this.send(
PacketTypes.Command,
{
Name: name,
Team: team,
Params: params
}
)
}
pushCommand(data) {
let CommandClass = Command[data.Name]
if(!CommandClass) { // Invalid command
return
}
let CommandInstance = new CommandClass(this.game, data.Team)
CommandInstance.deserialize(data.Params)
// Send command
Command.Resolver.push(CommandInstance)
}
handleStatus(stat) {
// TODO: the handler can improve more
switch(stat.Name){
case "Register":
if(stat.Value == 1) {
Game.Status = GameStatus.Registered
this.updateStatus("Match", 0)
}
break
case "Match":
if(stat.Value == 1) {
Game.Status = GameStatus.Start
}
break
case "Exit":
Game.Status = GameStatus.End
break
}
}
updateStatus(name, value) {
this.send(
PacketTypes.Status,
{
Name: name,
Value: value
}
)
}
onClose() {
}<|fim▁hole|>
window.Socket = Socket
window.PacketType = PacketTypes
}())<|fim▁end|> | } |
<|file_name|>TestSea6Task1.java<|end_file_name|><|fim▁begin|>public class TestSea6Task1 {
public static void main(String[] args) {
String text = "Sun is shining. Today is a good day for test. Sun is shining. The students are happy. The birds are blue.";
int indexSent = -1;
int lengthSen = 0;<|fim▁hole|> for (int i = 0; i < text.length(); i++) {
if (text.charAt(i) >= 'A' && text.charAt(i) <= 'Z') {
counterSen++;
lengthSen = i - indexLast;
indexLast = i;
}
if (i == text.length() - 1) {
lengthSen = text.length() - 1 - indexLast;
}
if (maxLengthSen < lengthSen) {
maxLengthSen = lengthSen;
indexSent = indexLast - maxLengthSen;
}
}
String sentence = text.substring(indexSent, indexSent + maxLengthSen);
System.out.println(sentence);
System.out.println(counterSen);
}
}<|fim▁end|> | int counterSen = 0;
int indexLast = 0;
int maxLengthSen = 0;
|
<|file_name|>manager.rs<|end_file_name|><|fim▁begin|>use super::*;
use crate::app::{ddns, settings, user, vfs};
#[derive(Clone)]
pub struct Manager {
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
}
impl Manager {
pub fn new(
settings_manager: settings::Manager,
user_manager: user::Manager,
vfs_manager: vfs::Manager,
ddns_manager: ddns::Manager,
) -> Self {
Self {
settings_manager,
user_manager,
vfs_manager,
ddns_manager,
}
}
pub fn apply(&self, config: &Config) -> Result<(), Error> {
if let Some(new_settings) = &config.settings {
self.settings_manager
.amend(new_settings)
.map_err(|_| Error::Unspecified)?;<|fim▁hole|> }
if let Some(mount_dirs) = &config.mount_dirs {
self.vfs_manager
.set_mount_dirs(&mount_dirs)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ddns_config) = &config.ydns {
self.ddns_manager
.set_config(&ddns_config)
.map_err(|_| Error::Unspecified)?;
}
if let Some(ref users) = config.users {
let old_users: Vec<user::User> =
self.user_manager.list().map_err(|_| Error::Unspecified)?;
// Delete users that are not in new list
for old_user in old_users
.iter()
.filter(|old_user| !users.iter().any(|u| u.name == old_user.name))
{
self.user_manager
.delete(&old_user.name)
.map_err(|_| Error::Unspecified)?;
}
// Insert new users
for new_user in users
.iter()
.filter(|u| !old_users.iter().any(|old_user| old_user.name == u.name))
{
self.user_manager
.create(new_user)
.map_err(|_| Error::Unspecified)?;
}
// Update users
for user in users {
self.user_manager
.set_password(&user.name, &user.password)
.map_err(|_| Error::Unspecified)?;
self.user_manager
.set_is_admin(&user.name, user.admin)
.map_err(|_| Error::Unspecified)?;
}
}
Ok(())
}
}<|fim▁end|> | |
<|file_name|>survey_data_define.go<|end_file_name|><|fim▁begin|>package models
type SurveyResult struct {
SurveyId int
Answers []Answer
DeviceInfo DeviceInfomation
}
type Question struct {
QuestionId int
Type int //0代表单选题,1代表多选题,2代表填空题
Title string
Content []string
// CreatedTime int64
}<|fim▁hole|>type Survey struct {
SurveyId int
Title string
Description string
// CreatedTime int64
}
type Answer struct {
QuestionId int
AnswerId int
QuestionType int
Options []int
Fill string
CreatedTime int64
}
type DeviceInfomation struct {
DeviceId int
DeviceName string
SystemVersion string
AppVersion string
}<|fim▁end|> | |
<|file_name|>error_mutex.rs<|end_file_name|><|fim▁begin|>use std::sync::Mutex;
lazy_static! {<|fim▁hole|><|fim▁end|> | pub static ref ERROR_MUTEX: Mutex<()> = Mutex::new(());
} |
<|file_name|>no-implied-eval.js<|end_file_name|><|fim▁begin|>/**
* @fileoverview Rule to flag use of implied eval via setTimeout and setInterval
* @author James Allardice
*/
"use strict";
//------------------------------------------------------------------------------
// Requirements
//------------------------------------------------------------------------------
const astUtils = require("./utils/ast-utils");
const { getStaticValue } = require("eslint-utils");
//------------------------------------------------------------------------------
// Rule Definition
//------------------------------------------------------------------------------
module.exports = {
meta: {
type: "suggestion",
docs: {
description: "disallow the use of `eval()`-like methods",
category: "Best Practices",
recommended: false,
url: "https://eslint.org/docs/rules/no-implied-eval"
},
schema: [],
messages: {
impliedEval: "Implied eval. Consider passing a function instead of a string."
}
},
create(context) {
const EVAL_LIKE_FUNCS = Object.freeze(["setTimeout", "execScript", "setInterval"]);
const GLOBAL_CANDIDATES = Object.freeze(["global", "window", "globalThis"]);
/**
* Checks whether a node is evaluated as a string or not.
* @param {ASTNode} node A node to check.
* @returns {boolean} True if the node is evaluated as a string.
*/
function isEvaluatedString(node) {
if (
(node.type === "Literal" && typeof node.value === "string") ||
node.type === "TemplateLiteral"
) {
return true;
}
if (node.type === "BinaryExpression" && node.operator === "+") {
return isEvaluatedString(node.left) || isEvaluatedString(node.right);
}
return false;
}
/**
* Checks whether a node is an Identifier node named one of the specified names.
* @param {ASTNode} node A node to check.
* @param {string[]} specifiers Array of specified name.
* @returns {boolean} True if the node is a Identifier node which has specified name.
*/
function isSpecifiedIdentifier(node, specifiers) {
return node.type === "Identifier" && specifiers.includes(node.name);
}
/**
* Checks a given node is a MemberExpression node which has the specified name's
* property.
* @param {ASTNode} node A node to check.
* @param {string[]} specifiers Array of specified name.
* @returns {boolean} `true` if the node is a MemberExpression node which has
* the specified name's property
*/
function isSpecifiedMember(node, specifiers) {
return node.type === "MemberExpression" && specifiers.includes(astUtils.getStaticPropertyName(node));
}
/**
* Reports if the `CallExpression` node has evaluated argument.
* @param {ASTNode} node A CallExpression to check.
* @returns {void}
*/
function reportImpliedEvalCallExpression(node) {
const [firstArgument] = node.arguments;
if (firstArgument) {
const staticValue = getStaticValue(firstArgument, context.getScope());
const isStaticString = staticValue && typeof staticValue.value === "string";
const isString = isStaticString || isEvaluatedString(firstArgument);
if (isString) {<|fim▁hole|> context.report({
node,
messageId: "impliedEval"
});
}
}
}
/**
* Reports calls of `implied eval` via the global references.
* @param {Variable} globalVar A global variable to check.
* @returns {void}
*/
function reportImpliedEvalViaGlobal(globalVar) {
const { references, name } = globalVar;
references.forEach(ref => {
const identifier = ref.identifier;
let node = identifier.parent;
while (isSpecifiedMember(node, [name])) {
node = node.parent;
}
if (isSpecifiedMember(node, EVAL_LIKE_FUNCS)) {
const parent = node.parent;
if (parent.type === "CallExpression" && parent.callee === node) {
reportImpliedEvalCallExpression(parent);
}
}
});
}
//--------------------------------------------------------------------------
// Public
//--------------------------------------------------------------------------
return {
CallExpression(node) {
if (isSpecifiedIdentifier(node.callee, EVAL_LIKE_FUNCS)) {
reportImpliedEvalCallExpression(node);
}
},
"Program:exit"() {
const globalScope = context.getScope();
GLOBAL_CANDIDATES
.map(candidate => astUtils.getVariableByName(globalScope, candidate))
.filter(globalVar => !!globalVar && globalVar.defs.length === 0)
.forEach(reportImpliedEvalViaGlobal);
}
};
}
};<|fim▁end|> | |
<|file_name|>jodirectsend.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core'
import {JoDirectService, JoDirectResponse, JoDirectCode} from '../../services/jodirect.service'
@Component({
selector: `jodirect-send-page`,
template: `
<div id="parent">
<span [style.display]="itemDisplay">You can only send one message to a token, so make sure you don't make a mistake.</span><br>
<md-input-container [style.display]="itemDisplay">
<input mdInput placeholder="Token" [(ngModel)]="token">
</md-input-container>
<md-input-container [style.display]="itemDisplay">
<textarea mdInput cols="55" rows="15" placeholder="Message" [(ngModel)]="message"></textarea>
</md-input-container>
<span [style.display]="errorDisplay" style="color:red;font-style: italic;font-size: 14px">{{error}}</span>
<br>
<button md-raised-button color="accent" class="button" (click)="onSend()" [style.display]="itemDisplay">
Send
</button>
<span [style.display]="successDisplay">Successfully sent message!</span>
</div>
`,
styles: [`
#parent {
position: absolute;
transform: translateX(-50%) translateY(-50%);
left: 50%;
top: 50%;
text-align: center;
}
.button {<|fim▁hole|> margin-bottom: 10px;
}
`]
})
export class JoDirectSendComponent {
token = ''
message = ''
error = ''
errorDisplay = 'none'
itemDisplay = 'block'
successDisplay = 'none'
constructor(private joDirectService: JoDirectService) {
}
onSend() {
this.error = ''
this.errorDisplay = 'none'
if (this.token === '') {
this.error = 'Token is empty!'
} else if (this.message === '') {
this.error = 'Message is empty!'
}
if (this.error !== '') {
this.errorDisplay = 'block'
} else {
this.joDirectService.send(this.token, this.message).forEach((response: JoDirectResponse) => {
if (response.status() === JoDirectCode.NO_ERROR) {
this.itemDisplay = 'none'
this.errorDisplay = 'none'
this.successDisplay = 'block'
} else if (response.status() === JoDirectCode.INVALID_TOKEN) {
this.error = this.token + ' does not exists!'
} else if (response.status() === JoDirectCode.MESSAGE_ALREADY_SENT) {
this.error = 'You already sent a message to this token!'
}
if (this.error !== '') {
this.errorDisplay = 'block'
}
})
}
}
}<|fim▁end|> | margin-top: 10px; |
<|file_name|>SharedBuffer.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2006, 2008 Apple Inc. All rights reserved.
* Copyright (C) Research In Motion Limited 2009-2010. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "flutter/sky/engine/platform/SharedBuffer.h"
#include "flutter/common/threads.h"
#include "flutter/sky/engine/public/platform/Platform.h"
#include "flutter/sky/engine/wtf/unicode/Unicode.h"
#include "flutter/sky/engine/wtf/unicode/UTF8.h"
#undef SHARED_BUFFER_STATS<|fim▁hole|>
#ifdef SHARED_BUFFER_STATS
#include "flutter/sky/engine/wtf/DataLog.h"
#endif
namespace blink {
static const unsigned segmentSize = 0x1000;
static const unsigned segmentPositionMask = 0x0FFF;
static inline unsigned segmentIndex(unsigned position) {
return position / segmentSize;
}
static inline unsigned offsetInSegment(unsigned position) {
return position & segmentPositionMask;
}
static inline char* allocateSegment() {
return static_cast<char*>(fastMalloc(segmentSize));
}
static inline void freeSegment(char* p) {
fastFree(p);
}
#ifdef SHARED_BUFFER_STATS
static Mutex& statsMutex() {
DEFINE_STATIC_LOCAL(Mutex, mutex, ());
return mutex;
}
static HashSet<SharedBuffer*>& liveBuffers() {
DEFINE_STATIC_LOCAL(HashSet<SharedBuffer*>, buffers, ());
return buffers;
}
static bool sizeComparator(SharedBuffer* a, SharedBuffer* b) {
return a->size() > b->size();
}
static CString snippetForBuffer(SharedBuffer* sharedBuffer) {
const unsigned kMaxSnippetLength = 64;
char* snippet = 0;
unsigned snippetLength = std::min(sharedBuffer->size(), kMaxSnippetLength);
CString result = CString::newUninitialized(snippetLength, snippet);
const char* segment;
unsigned offset = 0;
while (unsigned segmentLength = sharedBuffer->getSomeData(segment, offset)) {
unsigned length = std::min(segmentLength, snippetLength - offset);
memcpy(snippet + offset, segment, length);
offset += segmentLength;
if (offset >= snippetLength)
break;
}
for (unsigned i = 0; i < snippetLength; ++i) {
if (!isASCIIPrintable(snippet[i]))
snippet[i] = '?';
}
return result;
}
static void printStats() {
MutexLocker locker(statsMutex());
Vector<SharedBuffer*> buffers;
for (HashSet<SharedBuffer*>::const_iterator iter = liveBuffers().begin();
iter != liveBuffers().end(); ++iter)
buffers.append(*iter);
std::sort(buffers.begin(), buffers.end(), sizeComparator);
dataLogF("---- Shared Buffer Stats ----\n");
for (size_t i = 0; i < buffers.size() && i < 64; ++i) {
CString snippet = snippetForBuffer(buffers[i]);
dataLogF("Buffer size=%8u %s\n", buffers[i]->size(), snippet.data());
}
}
static void didCreateSharedBuffer(SharedBuffer* buffer) {
MutexLocker locker(statsMutex());
liveBuffers().add(buffer);
Threads::UI()->PostTask(printStats);
}
static void willDestroySharedBuffer(SharedBuffer* buffer) {
MutexLocker locker(statsMutex());
liveBuffers().remove(buffer);
}
#endif
SharedBuffer::SharedBuffer()
: m_size(0), m_buffer(PurgeableVector::NotPurgeable) {
#ifdef SHARED_BUFFER_STATS
didCreateSharedBuffer(this);
#endif
}
SharedBuffer::SharedBuffer(size_t size)
: m_size(size), m_buffer(PurgeableVector::NotPurgeable) {
m_buffer.reserveCapacity(size);
m_buffer.grow(size);
#ifdef SHARED_BUFFER_STATS
didCreateSharedBuffer(this);
#endif
}
SharedBuffer::SharedBuffer(const char* data, int size)
: m_size(0), m_buffer(PurgeableVector::NotPurgeable) {
// FIXME: Use unsigned consistently, and check for invalid casts when calling
// into SharedBuffer from other code.
if (size < 0)
CRASH();
append(data, size);
#ifdef SHARED_BUFFER_STATS
didCreateSharedBuffer(this);
#endif
}
SharedBuffer::SharedBuffer(const char* data,
int size,
PurgeableVector::PurgeableOption purgeable)
: m_size(0), m_buffer(purgeable) {
// FIXME: Use unsigned consistently, and check for invalid casts when calling
// into SharedBuffer from other code.
if (size < 0)
CRASH();
append(data, size);
#ifdef SHARED_BUFFER_STATS
didCreateSharedBuffer(this);
#endif
}
SharedBuffer::SharedBuffer(const unsigned char* data, int size)
: m_size(0), m_buffer(PurgeableVector::NotPurgeable) {
// FIXME: Use unsigned consistently, and check for invalid casts when calling
// into SharedBuffer from other code.
if (size < 0)
CRASH();
append(reinterpret_cast<const char*>(data), size);
#ifdef SHARED_BUFFER_STATS
didCreateSharedBuffer(this);
#endif
}
SharedBuffer::~SharedBuffer() {
clear();
#ifdef SHARED_BUFFER_STATS
willDestroySharedBuffer(this);
#endif
}
PassRefPtr<SharedBuffer> SharedBuffer::adoptVector(Vector<char>& vector) {
RefPtr<SharedBuffer> buffer = create();
buffer->m_buffer.adopt(vector);
buffer->m_size = buffer->m_buffer.size();
return buffer.release();
}
unsigned SharedBuffer::size() const {
return m_size;
}
const char* SharedBuffer::data() const {
mergeSegmentsIntoBuffer();
return m_buffer.data();
}
void SharedBuffer::append(PassRefPtr<SharedBuffer> data) {
const char* segment;
size_t position = 0;
while (size_t length = data->getSomeData(segment, position)) {
append(segment, length);
position += length;
}
}
void SharedBuffer::append(const char* data, unsigned length) {
ASSERT(isLocked());
if (!length)
return;
ASSERT(m_size >= m_buffer.size());
unsigned positionInSegment = offsetInSegment(m_size - m_buffer.size());
m_size += length;
if (m_size <= segmentSize) {
// No need to use segments for small resource data.
m_buffer.append(data, length);
return;
}
char* segment;
if (!positionInSegment) {
segment = allocateSegment();
m_segments.append(segment);
} else
segment = m_segments.last() + positionInSegment;
unsigned segmentFreeSpace = segmentSize - positionInSegment;
unsigned bytesToCopy = std::min(length, segmentFreeSpace);
for (;;) {
memcpy(segment, data, bytesToCopy);
if (static_cast<unsigned>(length) == bytesToCopy)
break;
length -= bytesToCopy;
data += bytesToCopy;
segment = allocateSegment();
m_segments.append(segment);
bytesToCopy = std::min(length, segmentSize);
}
}
void SharedBuffer::append(const Vector<char>& data) {
append(data.data(), data.size());
}
void SharedBuffer::clear() {
for (unsigned i = 0; i < m_segments.size(); ++i)
freeSegment(m_segments[i]);
m_segments.clear();
m_size = 0;
m_buffer.clear();
}
PassRefPtr<SharedBuffer> SharedBuffer::copy() const {
RefPtr<SharedBuffer> clone(adoptRef(new SharedBuffer));
clone->m_size = m_size;
clone->m_buffer.reserveCapacity(m_size);
clone->m_buffer.append(m_buffer.data(), m_buffer.size());
if (!m_segments.isEmpty()) {
const char* segment = 0;
unsigned position = m_buffer.size();
while (unsigned segmentSize = getSomeData(segment, position)) {
clone->m_buffer.append(segment, segmentSize);
position += segmentSize;
}
ASSERT(position == clone->size());
}
return clone.release();
}
void SharedBuffer::mergeSegmentsIntoBuffer() const {
unsigned bufferSize = m_buffer.size();
if (m_size > bufferSize) {
m_buffer.reserveCapacity(m_size);
unsigned bytesLeft = m_size - bufferSize;
for (unsigned i = 0; i < m_segments.size(); ++i) {
unsigned bytesToCopy = std::min(bytesLeft, segmentSize);
m_buffer.append(m_segments[i], bytesToCopy);
bytesLeft -= bytesToCopy;
freeSegment(m_segments[i]);
}
m_segments.clear();
}
}
unsigned SharedBuffer::getSomeData(const char*& someData,
unsigned position) const {
ASSERT(isLocked());
unsigned totalSize = size();
if (position >= totalSize) {
someData = 0;
return 0;
}
ASSERT_WITH_SECURITY_IMPLICATION(position < m_size);
unsigned consecutiveSize = m_buffer.size();
if (position < consecutiveSize) {
someData = m_buffer.data() + position;
return consecutiveSize - position;
}
position -= consecutiveSize;
unsigned segments = m_segments.size();
unsigned maxSegmentedSize = segments * segmentSize;
unsigned segment = segmentIndex(position);
if (segment < segments) {
unsigned bytesLeft = totalSize - consecutiveSize;
unsigned segmentedSize = std::min(maxSegmentedSize, bytesLeft);
unsigned positionInSegment = offsetInSegment(position);
someData = m_segments[segment] + positionInSegment;
return segment == segments - 1 ? segmentedSize - position
: segmentSize - positionInSegment;
}
ASSERT_NOT_REACHED();
return 0;
}
sk_sp<SkData> SharedBuffer::getAsSkData() const {
unsigned bufferLength = size();
char* buffer = static_cast<char*>(sk_malloc_throw(bufferLength));
const char* segment = 0;
unsigned position = 0;
while (unsigned segmentSize = getSomeData(segment, position)) {
memcpy(buffer + position, segment, segmentSize);
position += segmentSize;
}
if (position != bufferLength) {
ASSERT_NOT_REACHED();
// Don't return the incomplete SkData.
return nullptr;
}
return SkData::MakeFromMalloc(buffer, bufferLength);
}
bool SharedBuffer::lock() {
return m_buffer.lock();
}
void SharedBuffer::unlock() {
mergeSegmentsIntoBuffer();
m_buffer.unlock();
}
bool SharedBuffer::isLocked() const {
return m_buffer.isLocked();
}
} // namespace blink<|fim▁end|> | |
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::sync::Arc;
use std::collections::{HashMap, HashSet};
use bigint::prelude::U256;
use bigint::hash::H256;
use util::Address;
use bytes::Bytes;
use {
CallType, Schedule, EnvInfo,
ReturnData, Ext, ContractCreateResult, MessageCallResult,
CreateContractAddress, Result, GasLeft,
};
pub struct FakeLogEntry {
pub topics: Vec<H256>,
pub data: Bytes
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub enum FakeCallType {
Call, Create
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct FakeCall {
pub call_type: FakeCallType,
pub gas: U256,
pub sender_address: Option<Address>,
pub receive_address: Option<Address>,
pub value: Option<U256>,
pub data: Bytes,
pub code_address: Option<Address>,
}
/// Fake externalities test structure.
///
/// Can't do recursive calls.
#[derive(Default)]
pub struct FakeExt {
pub store: HashMap<H256, H256>,
pub suicides: HashSet<Address>,
pub calls: HashSet<FakeCall>,
pub sstore_clears: usize,<|fim▁hole|> pub depth: usize,
pub blockhashes: HashMap<U256, H256>,
pub codes: HashMap<Address, Arc<Bytes>>,
pub logs: Vec<FakeLogEntry>,
pub info: EnvInfo,
pub schedule: Schedule,
pub balances: HashMap<Address, U256>,
pub tracing: bool,
pub is_static: bool,
}
// similar to the normal `finalize` function, but ignoring NeedsReturn.
pub fn test_finalize(res: Result<GasLeft>) -> Result<U256> {
match res {
Ok(GasLeft::Known(gas)) => Ok(gas),
Ok(GasLeft::NeedsReturn{..}) => unimplemented!(), // since ret is unimplemented.
Err(e) => Err(e),
}
}
impl FakeExt {
pub fn new() -> Self {
FakeExt::default()
}
pub fn new_byzantium() -> Self {
let mut ext = FakeExt::default();
ext.schedule = Schedule::new_byzantium();
ext
}
}
impl Ext for FakeExt {
fn storage_at(&self, key: &H256) -> Result<H256> {
Ok(self.store.get(key).unwrap_or(&H256::new()).clone())
}
fn set_storage(&mut self, key: H256, value: H256) -> Result<()> {
self.store.insert(key, value);
Ok(())
}
fn exists(&self, address: &Address) -> Result<bool> {
Ok(self.balances.contains_key(address))
}
fn exists_and_not_null(&self, address: &Address) -> Result<bool> {
Ok(self.balances.get(address).map_or(false, |b| !b.is_zero()))
}
fn origin_balance(&self) -> Result<U256> {
unimplemented!()
}
fn balance(&self, address: &Address) -> Result<U256> {
Ok(self.balances[address])
}
fn blockhash(&mut self, number: &U256) -> H256 {
self.blockhashes.get(number).unwrap_or(&H256::new()).clone()
}
fn create(&mut self, gas: &U256, value: &U256, code: &[u8], _address: CreateContractAddress) -> ContractCreateResult {
self.calls.insert(FakeCall {
call_type: FakeCallType::Create,
gas: *gas,
sender_address: None,
receive_address: None,
value: Some(*value),
data: code.to_vec(),
code_address: None
});
ContractCreateResult::Failed
}
fn call(&mut self,
gas: &U256,
sender_address: &Address,
receive_address: &Address,
value: Option<U256>,
data: &[u8],
code_address: &Address,
_output: &mut [u8],
_call_type: CallType
) -> MessageCallResult {
self.calls.insert(FakeCall {
call_type: FakeCallType::Call,
gas: *gas,
sender_address: Some(sender_address.clone()),
receive_address: Some(receive_address.clone()),
value: value,
data: data.to_vec(),
code_address: Some(code_address.clone())
});
MessageCallResult::Success(*gas, ReturnData::empty())
}
fn extcode(&self, address: &Address) -> Result<Arc<Bytes>> {
Ok(self.codes.get(address).unwrap_or(&Arc::new(Bytes::new())).clone())
}
fn extcodesize(&self, address: &Address) -> Result<usize> {
Ok(self.codes.get(address).map_or(0, |c| c.len()))
}
fn log(&mut self, topics: Vec<H256>, data: &[u8]) -> Result<()> {
self.logs.push(FakeLogEntry {
topics: topics,
data: data.to_vec()
});
Ok(())
}
fn ret(self, _gas: &U256, _data: &ReturnData, _apply_state: bool) -> Result<U256> {
unimplemented!();
}
fn suicide(&mut self, refund_address: &Address) -> Result<()> {
self.suicides.insert(refund_address.clone());
Ok(())
}
fn schedule(&self) -> &Schedule {
&self.schedule
}
fn env_info(&self) -> &EnvInfo {
&self.info
}
fn depth(&self) -> usize {
self.depth
}
fn is_static(&self) -> bool {
self.is_static
}
fn inc_sstore_clears(&mut self) {
self.sstore_clears += 1;
}
fn trace_next_instruction(&mut self, _pc: usize, _instruction: u8) -> bool {
self.tracing
}
}<|fim▁end|> | |
<|file_name|>$VirtualScrollExample.ts<|end_file_name|><|fim▁begin|>import { Behavior } from '@aelea/core'
import { $text, component, style } from '@aelea/dom'
import { $card, $column, $row, $seperator, $TextField, $VirtualScroll, layoutSheet, ScrollRequest, ScrollResponse } from '@aelea/ui-components'
import { pallete } from '@aelea/ui-components-theme'
import { at, debounce, empty, join, map, merge, now, snapshot, startWith, switchLatest } from '@most/core'
import { Stream } from '@most/types'
function filterArrayByText(array: string[], filter: string) {
const filterLowercase = filter.toLocaleLowerCase()
return array.filter(id =>
id.indexOf(filterLowercase) > -1
)
}
const $label = (label: string, value: Stream<string> | string) => $row(layoutSheet.spacingSmall)(
$text(style({ color: pallete.foreground }))(label),
$text(value)
)
export const $VirtualScrollExample = component((
[scrollRequest, scrollRequestTether]: Behavior<ScrollRequest, ScrollRequest>,
[delayResponse, delayResponseTether]: Behavior<string, number>,
[filter, filterTether]: Behavior<string, string>,
) => {
const PAGE_SIZE = 25
const TOTAL_ITEMS = 1000
const formatNumber = Intl.NumberFormat().format
const initialDelayResponse = now(1600)
const delayWithInitial = merge(initialDelayResponse, delayResponse)
let i = 0
const $item = $text(style({ padding: '3px 10px' }))
const stubbedData = Array(TOTAL_ITEMS).fill(null).map(() =>
`item: ${Math.random().toString(36).substring(7)} ${formatNumber(++i)}`
)
const dataSourceFilter = (filter: string) => join(
snapshot((delay, requestNumber): Stream<ScrollResponse> => {
const pageStart = requestNumber * PAGE_SIZE
const pageEnd = pageStart + PAGE_SIZE
const filteredItems = filterArrayByText(stubbedData, filter)
const $items = filteredItems.slice(pageStart, pageEnd).map(id => {
return $item(id)
})
return at(delay, { $items: $items, offset: 0, pageSize: PAGE_SIZE })
}, delayWithInitial, scrollRequest)
)
const filterText = startWith('', filter)
const debouncedFilterText = debounce(300, filterText)
return [
$column(layoutSheet.spacingBig)(
$text(`High performance dynamically loaded list based on Intersection Observer Web API. this example shows a very common pagination and REST like fetching asynchnously more pages`),
$row(layoutSheet.spacingBig)(
$label('Page: ', map(l => String(l), scrollRequest)),
$label(`Page Size:`, String(PAGE_SIZE)),
$label(`Total Items:`, String(TOTAL_ITEMS)),
),
$row(layoutSheet.spacingBig)(
$TextField({
label: 'Filter',
value: empty(),
hint: 'Remove any items that does not match filter and debounce changes by 300ms to prevert spamming',
containerOp: layoutSheet.flex
})({
change: filterTether()
}),
$TextField({
label: 'Delay Response(ms)',
value: initialDelayResponse,
hint: 'Emulate the duration of a datasource response, show a stubbed $node instead',
containerOp: layoutSheet.flex
})({
change: delayResponseTether(
map(Number)
)
}),
),
$seperator,
$card(style({ padding: 0 }))(
switchLatest(
map(searchText =>
$VirtualScroll({
dataSource: dataSourceFilter(searchText),<|fim▁hole|> , debouncedFilterText)
)
)
)
]
})<|fim▁end|> | containerOps: style({ padding: '8px', maxHeight: '400px' })
})({
scrollIndex: scrollRequestTether(),
}) |
<|file_name|>htmllinkelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListBinding::DOMTokenListMethods;
use crate::dom::bindings::codegen::Bindings::HTMLLinkElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLLinkElementBinding::HTMLLinkElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::{DomRoot, MutNullableDom, RootedReference};
use crate::dom::bindings::str::DOMString;
use crate::dom::cssstylesheet::CSSStyleSheet;
use crate::dom::document::Document;
use crate::dom::domtokenlist::DOMTokenList;
use crate::dom::element::{
cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
};
use crate::dom::element::{AttributeMutation, Element, ElementCreator};
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{document_from_node, window_from_node, Node, UnbindContext};
use crate::dom::stylesheet::StyleSheet as DOMStyleSheet;
use crate::dom::virtualmethods::VirtualMethods;
use crate::stylesheet_loader::{StylesheetContextSource, StylesheetLoader, StylesheetOwner};
use cssparser::{Parser as CssParser, ParserInput};
use dom_struct::dom_struct;
use embedder_traits::EmbedderMsg;
use html5ever::{LocalName, Prefix};
use net_traits::ReferrerPolicy;
use servo_arc::Arc;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use style::attr::AttrValue;
use style::media_queries::MediaList;
use style::parser::ParserContext as CssParserContext;
use style::str::HTML_SPACE_CHARACTERS;
use style::stylesheets::{CssRuleType, Stylesheet};
use style_traits::ParsingMode;
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub struct RequestGenerationId(u32);
impl RequestGenerationId {
fn increment(self) -> RequestGenerationId {
RequestGenerationId(self.0 + 1)
}
}
#[dom_struct]
pub struct HTMLLinkElement {
htmlelement: HTMLElement,
rel_list: MutNullableDom<DOMTokenList>,
#[ignore_malloc_size_of = "Arc"]
stylesheet: DomRefCell<Option<Arc<Stylesheet>>>,
cssom_stylesheet: MutNullableDom<CSSStyleSheet>,
/// <https://html.spec.whatwg.org/multipage/#a-style-sheet-that-is-blocking-scripts>
parser_inserted: Cell<bool>,
/// The number of loads that this link element has triggered (could be more
/// than one because of imports) and have not yet finished.
pending_loads: Cell<u32>,
/// Whether any of the loads have failed.
any_failed_load: Cell<bool>,
/// A monotonically increasing counter that keeps track of which stylesheet to apply.
request_generation_id: Cell<RequestGenerationId>,
}
impl HTMLLinkElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> HTMLLinkElement {
HTMLLinkElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
rel_list: Default::default(),
parser_inserted: Cell::new(creator.is_parser_created()),
stylesheet: DomRefCell::new(None),
cssom_stylesheet: MutNullableDom::new(None),
pending_loads: Cell::new(0),
any_failed_load: Cell::new(false),
request_generation_id: Cell::new(RequestGenerationId(0)),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> DomRoot<HTMLLinkElement> {
Node::reflect_node(
Box::new(HTMLLinkElement::new_inherited(
local_name, prefix, document, creator,
)),
document,
HTMLLinkElementBinding::Wrap,
)
}
pub fn get_request_generation_id(&self) -> RequestGenerationId {
self.request_generation_id.get()
}
// FIXME(emilio): These methods are duplicated with
// HTMLStyleElement::set_stylesheet.
pub fn set_stylesheet(&self, s: Arc<Stylesheet>) {
let doc = document_from_node(self);
if let Some(ref s) = *self.stylesheet.borrow() {
doc.remove_stylesheet(self.upcast(), s)
}
*self.stylesheet.borrow_mut() = Some(s.clone());
self.cssom_stylesheet.set(None);
doc.add_stylesheet(self.upcast(), s);
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
pub fn get_cssom_stylesheet(&self) -> Option<DomRoot<CSSStyleSheet>> {
self.get_stylesheet().map(|sheet| {
self.cssom_stylesheet.or_init(|| {
CSSStyleSheet::new(
&window_from_node(self),
self.upcast::<Element>(),
"text/css".into(),
None, // todo handle location
None, // todo handle title
sheet,
)
})
})
}
pub fn is_alternate(&self) -> bool {
let rel = get_attr(self.upcast(), &local_name!("rel"));
match rel {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("alternate")),
None => false,
}
}
}
fn get_attr(element: &Element, local_name: &LocalName) -> Option<String> {
let elem = element.get_attribute(&ns!(), local_name);
elem.map(|e| {
let value = e.value();
(**value).to_owned()
})
}
fn string_is_stylesheet(value: &Option<String>) -> bool {
match *value {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("stylesheet")),
None => false,
}
}
/// Favicon spec usage in accordance with CEF implementation:
/// only url of icon is required/used
/// <https://html.spec.whatwg.org/multipage/#rel-icon>
fn is_favicon(value: &Option<String>) -> bool {
match *value {
Some(ref value) => value
.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("icon") || s.eq_ignore_ascii_case("apple-touch-icon")),
None => false,
}
}
impl VirtualMethods for HTMLLinkElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if !self.upcast::<Node>().is_in_doc() || mutation.is_removal() {
return;
}
let rel = get_attr(self.upcast(), &local_name!("rel"));
match attr.local_name() {
&local_name!("href") => {
if string_is_stylesheet(&rel) {
self.handle_stylesheet_url(&attr.value());
} else if is_favicon(&rel) {
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
self.handle_favicon_url(rel.as_ref().unwrap(), &attr.value(), &sizes);
}
},
&local_name!("sizes") => {
if is_favicon(&rel) {
if let Some(ref href) = get_attr(self.upcast(), &local_name!("href")) {
self.handle_favicon_url(
rel.as_ref().unwrap(),
href,
&Some(attr.value().to_string()),
);
}
}
},
_ => {},
}
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("rel") => AttrValue::from_serialized_tokenlist(value.into()),
_ => self
.super_type()
.unwrap()
.parse_plain_attribute(name, value),
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
let element = self.upcast();
let rel = get_attr(element, &local_name!("rel"));
let href = get_attr(element, &local_name!("href"));
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
match href {
Some(ref href) if string_is_stylesheet(&rel) => {
self.handle_stylesheet_url(href);
},
Some(ref href) if is_favicon(&rel) => {
self.handle_favicon_url(rel.as_ref().unwrap(), href, &sizes);
},
_ => {},
}
}
}
fn unbind_from_tree(&self, context: &UnbindContext) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(context);
}
if let Some(s) = self.stylesheet.borrow_mut().take() {
document_from_node(self).remove_stylesheet(self.upcast(), &s);
}
}
}
impl HTMLLinkElement {
/// <https://html.spec.whatwg.org/multipage/#concept-link-obtain>
fn handle_stylesheet_url(&self, href: &str) {
let document = document_from_node(self);
if document.browsing_context().is_none() {
return;
}
// Step 1.
if href.is_empty() {
return;
}
// Step 2.
let link_url = match document.base_url().join(href) {
Ok(url) => url,
Err(e) => {
debug!("Parsing url {} failed: {}", href, e);
return;
},
};
let element = self.upcast::<Element>();
// Step 3
let cors_setting = cors_setting_for_element(element);
let mq_attribute = element.get_attribute(&ns!(), &local_name!("media"));
let value = mq_attribute.r().map(|a| a.value());
let mq_str = match value {
Some(ref value) => &***value,
None => "",
};
let mut input = ParserInput::new(&mq_str);
let mut css_parser = CssParser::new(&mut input);
let doc_url = document.url();
let window = document.window();
// FIXME(emilio): This looks somewhat fishy, since we use the context
// only to parse the media query list, CssRuleType::Media doesn't make
// much sense.
let context = CssParserContext::new_for_cssom(
&doc_url,
Some(CssRuleType::Media),
ParsingMode::DEFAULT,
document.quirks_mode(),
window.css_error_reporter(),
None,
);
let media = MediaList::parse(&context, &mut css_parser);
let im_attribute = element.get_attribute(&ns!(), &local_name!("integrity"));
let integrity_val = im_attribute.r().map(|a| a.value());
let integrity_metadata = match integrity_val {
Some(ref value) => &***value,
None => "",
};
self.request_generation_id
.set(self.request_generation_id.get().increment());
// TODO: #8085 - Don't load external stylesheets if the node's mq
// doesn't match.
let loader = StylesheetLoader::for_element(self.upcast());
loader.load(
StylesheetContextSource::LinkElement { media: Some(media) },
link_url,
cors_setting,
integrity_metadata.to_owned(),
);
}
fn handle_favicon_url(&self, _rel: &str, href: &str, _sizes: &Option<String>) {
let document = document_from_node(self);
match document.base_url().join(href) {
Ok(url) => {
let window = document.window();
if window.is_top_level() {
let msg = EmbedderMsg::NewFavicon(url.clone());
window.send_to_embedder(msg);
}
},
Err(e) => debug!("Parsing url {} failed: {}", href, e),
}
}
}
impl StylesheetOwner for HTMLLinkElement {
fn increment_pending_loads_count(&self) {
self.pending_loads.set(self.pending_loads.get() + 1)
}
fn load_finished(&self, succeeded: bool) -> Option<bool> {
assert!(self.pending_loads.get() > 0, "What finished?");
if !succeeded {
self.any_failed_load.set(true);
}
self.pending_loads.set(self.pending_loads.get() - 1);
if self.pending_loads.get() != 0 {
return None;
}
let any_failed = self.any_failed_load.get();
self.any_failed_load.set(false);
Some(any_failed)
}
fn parser_inserted(&self) -> bool {
self.parser_inserted.get()
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
if self.RelList().Contains("noreferrer".into()) {
return Some(ReferrerPolicy::NoReferrer);
}
None
}
fn set_origin_clean(&self, origin_clean: bool) {
if let Some(stylesheet) = self.get_cssom_stylesheet() {
stylesheet.set_origin_clean(origin_clean);
}
}
}
impl HTMLLinkElementMethods for HTMLLinkElement {
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_url_getter!(Href, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_setter!(SetHref, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
make_getter!(Rel, "rel");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
fn SetRel(&self, rel: DOMString) {
self.upcast::<Element>()
.set_tokenlist_attribute(&local_name!("rel"), rel);
}
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_getter!(Media, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_setter!(SetMedia, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_getter!(Integrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_setter!(SetIntegrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_getter!(Hreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_setter!(SetHreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_getter!(Type, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_setter!(SetType, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-rellist
fn RelList(&self) -> DomRoot<DOMTokenList> {
self.rel_list
.or_init(|| DOMTokenList::new(self.upcast(), &local_name!("rel")))
}
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_getter!(Charset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_setter!(SetCharset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-rev
make_getter!(Rev, "rev");<|fim▁hole|> // https://html.spec.whatwg.org/multipage/#dom-link-rev
make_setter!(SetRev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_getter!(Target, "target");
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_setter!(SetTarget, "target");
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn GetCrossOrigin(&self) -> Option<DOMString> {
reflect_cross_origin_attribute(self.upcast::<Element>())
}
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn SetCrossOrigin(&self, value: Option<DOMString>) {
set_cross_origin_attribute(self.upcast::<Element>(), value);
}
// https://drafts.csswg.org/cssom/#dom-linkstyle-sheet
fn GetSheet(&self) -> Option<DomRoot<DOMStyleSheet>> {
self.get_cssom_stylesheet().map(DomRoot::upcast)
}
}<|fim▁end|> | |
<|file_name|>landing-page.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, HostListener } from '@angular/core';
@Component({<|fim▁hole|>export class LandingPageComponent implements OnInit {
constructor() { }
@HostListener('window:scroll', ['$event'])
onScroll(event) {
const wScroll = window.scrollY;
if (wScroll <= document.getElementById('parallax-header').clientHeight) {
const parallaxLayer1 = <HTMLElement>document.getElementById('parallax-layer-1');
parallaxLayer1.style.transform = 'translate(0px, ' + wScroll / 5 + '%)';
const parallaxLayer2 = <HTMLElement>document.getElementById('parallax-layer-2');
parallaxLayer2.style.transform = 'translate(0px, ' + wScroll / 4 + '%)';
const parallaxLayer3 = <HTMLElement>document.getElementById('parallax-layer-3');
parallaxLayer3.style.transform = 'translate(0px, ' + wScroll / 8 + '%)';
}
}
ngOnInit() {
}
}<|fim▁end|> | selector: 'app-landing-page',
templateUrl: './landing-page.component.html',
styleUrls: ['./landing-page.component.scss']
}) |
<|file_name|>PathStageN.py<|end_file_name|><|fim▁begin|>'''author@esilgard'''
#
# Copyright (c) 2014-2016 Fred Hutchinson Cancer Research Center
#
# Licensed under the Apache License, Version 2.0: http://www.apache.org/licenses/LICENSE-2.0
#
from OneFieldPerReport import OneFieldPerReport
import global_strings as gb
class PathStageN(OneFieldPerReport):
''' extract the explicit node staging '''
__version__ = 'PathStageN1.0'
def __init__(self):
super(PathStageN, self).__init__()
self.field_name = 'PathStageN'
self.regex = r'(([PpYy]+|[pP]athological)[ ]*N[0123Xx][abc]?)'
self.confidence = .92
self.match_style = 'all'<|fim▁hole|><|fim▁end|> | self.table = gb.PATHOLOGY_TABLE
self.value_type = 'match' |
<|file_name|>test_frame.py<|end_file_name|><|fim▁begin|>"""
Test basic DataFrame functionality.
"""
import pandas as pd
import pytest
import weld.grizzly as gr
def get_frames(cls, strings):
"""
Returns two DataFrames for testing binary operators.
The DataFrames have columns of overlapping/different names, types, etc.
"""
df1 = pd.DataFrame({
'name': ['Bob', 'Sally', 'Kunal', 'Deepak', 'James', 'Pratiksha'],
'lastName': ['Kahn', 'Lopez', 'Smith', 'Narayanan', 'Thomas', 'Thaker'],
'age': [20, 30, 35, 20, 50, 35],
'score': [20.0, 30.0, 35.0, 50.0, 35.0, 25.0]
})
df2 = pd.DataFrame({
'firstName': ['Bob', 'Sally', 'Kunal', 'Deepak', 'James', 'Pratiksha'],
'lastName': ['Kahn', 'Lopez', 'smith', 'narayanan', 'Thomas', 'thaker'],
'age': [25, 30, 45, 20, 60, 35],
'scores': [20.0, 30.0, 35.0, 50.0, 35.0, 25.0]
})
if not strings:
df1 = df1.drop(['name', 'lastName'], axis=1)
df2 = df2.drop(['firstName', 'lastName'], axis=1)
return (cls(df1), cls(df2))
def _test_binop(pd_op, gr_op, strings=True):
"""
Test a binary operator.
Binary operators align on column name. For columns that don't exist in both
DataFrames, the column is filled with NaN (for non-comparison operations) and
or False (for comparison operations).
If the RHS is a Series, the Series should be added to all columns.
"""
df1, df2 = get_frames(pd.DataFrame, strings)
gdf1, gdf2 = get_frames(gr.GrizzlyDataFrame, strings)<|fim▁hole|> assert expect.equals(result)
def test_evaluation():
# Test to make sure that evaluating a DataFrame once caches the result/
# doesn't cause another evaluation.
df1 = gr.GrizzlyDataFrame({
'age': [20, 30, 35, 20, 50, 35],
'score': [20.0, 30.0, 35.0, 50.0, 35.0, 25.0]
})
df2 = gr.GrizzlyDataFrame({
'age': [20, 30, 35, 20, 50, 35],
'scores': [20.0, 30.0, 35.0, 50.0, 35.0, 25.0]
})
df3 = (df1 + df2) * df2 + df1 / df2
assert not df3.is_value
df3.evaluate()
assert df3.is_value
weld_value = df3.weld_value
df3.evaluate()
# The same weld_value should be returned.
assert weld_value is df3.weld_value
def test_add():
_test_binop(pd.DataFrame.add, gr.GrizzlyDataFrame.add, strings=False)
def test_sub():
_test_binop(pd.DataFrame.sub, gr.GrizzlyDataFrame.sub, strings=False)
def test_mul():
_test_binop(pd.DataFrame.mul, gr.GrizzlyDataFrame.mul, strings=False)
def test_div():
_test_binop(pd.DataFrame.div, gr.GrizzlyDataFrame.div, strings=False)
def test_eq():
_test_binop(pd.DataFrame.eq, gr.GrizzlyDataFrame.eq, strings=True)
def test_ne():
_test_binop(pd.DataFrame.ne, gr.GrizzlyDataFrame.ne, strings=True)
def test_le():
_test_binop(pd.DataFrame.le, gr.GrizzlyDataFrame.le, strings=False)
def test_lt():
_test_binop(pd.DataFrame.lt, gr.GrizzlyDataFrame.lt, strings=False)
def test_ge():
_test_binop(pd.DataFrame.ge, gr.GrizzlyDataFrame.ge, strings=False)
def test_gt():
_test_binop(pd.DataFrame.gt, gr.GrizzlyDataFrame.gt, strings=False)<|fim▁end|> |
expect = pd_op(df1, df2)
result = gr_op(gdf1, gdf2).to_pandas() |
<|file_name|>RoleRepository.java<|end_file_name|><|fim▁begin|>package com.doctorAppointmentBookingSystem.repository;
import com.doctorAppointmentBookingSystem.entity.Role;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
/**<|fim▁hole|> */
@Repository
public interface RoleRepository extends JpaRepository<Role, Long> {
Role findOneByAuthority(String authority);
}<|fim▁end|> | * Created by Edi on 16-Apr-17. |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import Vue from 'vue'
import Router from 'vue-router'
import Resource from'vue-resource'
import { sync } from 'vuex-router-sync'
Vue.use(Router)
Vue.use(Resource)
// components
import App from './components/App.vue'
import Login from './components/Login/Login.vue'
import Dashboard from './components/Dashboard/Dashboard.vue'
import Counter from './components/Counter/Counter.vue'
// model
import store from './vuex/store.js'
// routing
var router = new Router()
router.map({
'/login': {
component: Login
},
'/dashboard': {
component: Dashboard
},
'/counter': {
component: Counter
}
})
router.beforeEach(function() {
window.scrollTo(0, 0)
})
router.redirect({
'*': '/login'
})
sync(store, router)
router.start(App, 'body')
Vue.config.debug = true
Vue.http.interceptors.push({
request: function(request) {
Vue.http.headers.common['Authorization'] = 'JWT' + sessionStorage.getItem('token')<|fim▁hole|> return request
},
response: function(response) {
if (response.status === 401) {
router.go('/login')
}
return response
}
});<|fim▁end|> | |
<|file_name|>label.cpp<|end_file_name|><|fim▁begin|>/* _______ __ __ __ ______ __ __ _______ __ __
* / _____/\ / /\ / /\ / /\ / ____/\ / /\ / /\ / ___ /\ / |\/ /\
* / /\____\// / // / // / // /\___\// /_// / // /\_/ / // , |/ / /
* / / /__ / / // / // / // / / / ___ / // ___ / // /| ' / /
* / /_// /\ / /_// / // / // /_/_ / / // / // /\_/ / // / | / /
* /______/ //______/ //_/ //_____/\ /_/ //_/ //_/ //_/ //_/ /|_/ /
* \______\/ \______\/ \_\/ \_____\/ \_\/ \_\/ \_\/ \_\/ \_\/ \_\/
*
* Copyright (c) 2004, 2005 darkbits Js_./
* Per Larsson a.k.a finalman _RqZ{a<^_aa
* Olof Naessén a.k.a jansem/yakslem _asww7!uY`> )\a//
* _Qhm`] _f "'c 1!5m
* Visit: http://guichan.darkbits.org )Qk<P ` _: :+' .' "{[
* .)j(] .d_/ '-( P . S
* License: (BSD) <Td/Z <fP"5(\"??"\a. .L
* Redistribution and use in source and _dV>ws?a-?' ._/L #'
* binary forms, with or without )4d[#7r, . ' )d`)[
* modification, are permitted provided _Q-5'5W..j/?' -?!\)cam'
* that the following conditions are met: j<<WP+k/);. _W=j f
* 1. Redistributions of source code must .$%w\/]Q . ."' . mj$
* retain the above copyright notice, ]E.pYY(Q]>. a J@\
* this list of conditions and the j(]1u<sE"L,. . ./^ ]{a
* following disclaimer. 4'_uomm\. )L);-4 (3=
* 2. Redistributions in binary form must )_]X{Z('a_"a7'<a"a, ]"[
* reproduce the above copyright notice, #}<]m7`Za??4,P-"'7. ).m
* this list of conditions and the ]d2e)Q(<Q( ?94 b- LQ/
* following disclaimer in the <B!</]C)d_, '(<' .f. =C+m
* documentation and/or other materials .Z!=J ]e []('-4f _ ) -.)m]'
* provided with the distribution. .w[5]' _[ /.)_-"+? _/ <W"
* 3. Neither the name of Guichan nor the :$we` _! + _/ . j?
* names of its contributors may be used =3)= _f (_yQmWW$#( "
* to endorse or promote products derived - W, sQQQQmZQ#Wwa]..
* from this software without specific (js, \[QQW$QWW#?!V"".
* prior written permission. ]y:.<\.. .
* -]n w/ ' [.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT )/ )/ !
* HOLDERS AND CONTRIBUTORS "AS IS" AND ANY < (; sac , '
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, ]^ .- %
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF c < r
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR aga< <La
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE 5% )P'-3L
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR _bQf` y`..)a
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ,J?4P'.P"_(\?d'.,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES _Pa,)!f/<[]/ ?"
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT _2-..:. .r+_,.. .
* OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, ?a.<%"' " -'.a_ _,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ^
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/*
* For comments regarding functions please see the header file.
*/
#include "guichan/widgets/label.h"
#include "guichan/exception.h"
namespace gcn
{
Label::Label()
{
mAlignment = Graphics::LEFT;
}
Label::Label(const std::string& caption)
{
mCaption = caption;
mAlignment = Graphics::LEFT;
setWidth(getFont()->getWidth(caption));
setHeight(getFont()->getHeight());
}
const std::string &Label::getCaption() const
{
return mCaption;
}
void Label::setCaption(const std::string& caption)
{
mCaption = caption;
}
void Label::setAlignment(unsigned int alignment)
{
mAlignment = alignment;
}
unsigned int Label::getAlignment()
{
return mAlignment;
}
void Label::draw(Graphics* graphics)
{
int textX;
int textY = getHeight() / 2 - getFont()->getHeight() / 2;
<|fim▁hole|>
switch (getAlignment())
{
case Graphics::LEFT:
textX = 0;
break;
case Graphics::CENTER:
textX = getWidth() / 2;
break;
case Graphics::RIGHT:
textX = getWidth();
break;
default:
throw GCN_EXCEPTION("Unknown alignment.");
}
graphics->setFont(getFont());
graphics->setColor(getForegroundColor());
graphics->drawText(getCaption(), textX, textY, getAlignment());
}
void Label::drawBorder(Graphics* graphics)
{
Color faceColor = getBaseColor();
Color highlightColor, shadowColor;
int alpha = getBaseColor().a;
int width = getWidth() + getBorderSize() * 2 - 1;
int height = getHeight() + getBorderSize() * 2 - 1;
highlightColor = faceColor + 0x303030;
highlightColor.a = alpha;
shadowColor = faceColor - 0x303030;
shadowColor.a = alpha;
unsigned int i;
for (i = 0; i < getBorderSize(); ++i)
{
graphics->setColor(shadowColor);
graphics->drawLine(i,i, width - i, i);
graphics->drawLine(i,i + 1, i, height - i - 1);
graphics->setColor(highlightColor);
graphics->drawLine(width - i,i + 1, width - i, height - i);
graphics->drawLine(i,height - i, width - i - 1, height - i);
}
}
void Label::adjustSize()
{
setWidth(getFont()->getWidth(getCaption()));
setHeight(getFont()->getHeight());
}
}<|fim▁end|> | |
<|file_name|>options.js<|end_file_name|><|fim▁begin|>// ------------------------------------
// #POSTCSS - LOAD OPTIONS - OPTIONS
// ------------------------------------
'use strict'
/**
*
* @method options
*
* @param {Object} options PostCSS Config
*
* @return {Object} options PostCSS Options
*/
module.exports = function options (options) {
if (options.parser) {
options.parser = require(options.parser)<|fim▁hole|> }
if (options.stringifier) {
options.stringifier = require(options.stringifier)
}
if (options.plugins) {
delete options.plugins
}
return options
}<|fim▁end|> | }
if (options.syntax) {
options.syntax = require(options.syntax) |
<|file_name|>htmltableelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::attr::Attr;
use crate::dom::bindings::codegen::Bindings::HTMLCollectionBinding::HTMLCollectionMethods;
use crate::dom::bindings::codegen::Bindings::HTMLTableElementBinding;
use crate::dom::bindings::codegen::Bindings::HTMLTableElementBinding::HTMLTableElementMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::root::{Dom, DomRoot, LayoutDom, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element, RawLayoutElementHelpers};
use crate::dom::htmlcollection::{CollectionFilter, HTMLCollection};
use crate::dom::htmlelement::HTMLElement;
use crate::dom::htmltablecaptionelement::HTMLTableCaptionElement;
use crate::dom::htmltablecolelement::HTMLTableColElement;
use crate::dom::htmltablerowelement::HTMLTableRowElement;
use crate::dom::htmltablesectionelement::HTMLTableSectionElement;
use crate::dom::node::{document_from_node, window_from_node, Node};
use crate::dom::virtualmethods::VirtualMethods;
use cssparser::RGBA;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use std::cell::Cell;
use style::attr::{parse_unsigned_integer, AttrValue, LengthOrPercentageOrAuto};
#[dom_struct]
pub struct HTMLTableElement {
htmlelement: HTMLElement,
border: Cell<Option<u32>>,
cellspacing: Cell<Option<u32>>,
tbodies: MutNullableDom<HTMLCollection>,
}
#[allow(unrooted_must_root)]
#[derive(JSTraceable, MallocSizeOf)]
struct TableRowFilter {
sections: Vec<Dom<Node>>,
}
impl CollectionFilter for TableRowFilter {
fn filter(&self, elem: &Element, root: &Node) -> bool {
elem.is::<HTMLTableRowElement>() &&
(root.is_parent_of(elem.upcast()) ||
self.sections
.iter()
.any(|ref section| section.is_parent_of(elem.upcast())))
}
}
impl HTMLTableElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLTableElement {
HTMLTableElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
border: Cell::new(None),
cellspacing: Cell::new(None),
tbodies: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLTableElement> {
Node::reflect_node(
Box::new(HTMLTableElement::new_inherited(
local_name, prefix, document,
)),
document,
HTMLTableElementBinding::Wrap,
)
}
pub fn get_border(&self) -> Option<u32> {
self.border.get()
}
// https://html.spec.whatwg.org/multipage/#dom-table-thead
// https://html.spec.whatwg.org/multipage/#dom-table-tfoot
fn get_first_section_of_type(
&self,
atom: &LocalName,
) -> Option<DomRoot<HTMLTableSectionElement>> {
self.upcast::<Node>()
.child_elements()
.find(|n| n.is::<HTMLTableSectionElement>() && n.local_name() == atom)
.and_then(|n| n.downcast().map(DomRoot::from_ref))
}
// https://html.spec.whatwg.org/multipage/#dom-table-thead
// https://html.spec.whatwg.org/multipage/#dom-table-tfoot
fn set_first_section_of_type<P>(
&self,
atom: &LocalName,
section: Option<&HTMLTableSectionElement>,
reference_predicate: P,
) -> ErrorResult
where
P: FnMut(&DomRoot<Element>) -> bool,
{
if let Some(e) = section {
if e.upcast::<Element>().local_name() != atom {
return Err(Error::HierarchyRequest);
}
}
self.delete_first_section_of_type(atom);
let node = self.upcast::<Node>();
if let Some(section) = section {
let reference_element = node.child_elements().find(reference_predicate);
let reference_node = reference_element.as_ref().map(|e| e.upcast());
node.InsertBefore(section.upcast(), reference_node)?;
}
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-table-createthead
// https://html.spec.whatwg.org/multipage/#dom-table-createtfoot
fn create_section_of_type(&self, atom: &LocalName) -> DomRoot<HTMLTableSectionElement> {
if let Some(section) = self.get_first_section_of_type(atom) {
return section;
}
let section = HTMLTableSectionElement::new(atom.clone(), None, &document_from_node(self));
match atom {
&local_name!("thead") => self.SetTHead(Some(§ion)),
&local_name!("tfoot") => self.SetTFoot(Some(§ion)),
_ => unreachable!("unexpected section type"),
}
.expect("unexpected section type");
section
}
// https://html.spec.whatwg.org/multipage/#dom-table-deletethead
// https://html.spec.whatwg.org/multipage/#dom-table-deletetfoot
fn delete_first_section_of_type(&self, atom: &LocalName) {
if let Some(thead) = self.get_first_section_of_type(atom) {
thead.upcast::<Node>().remove_self();
}
}
fn get_rows(&self) -> TableRowFilter {
TableRowFilter {
sections: self
.upcast::<Node>()
.children()
.filter_map(|ref node| {
node.downcast::<HTMLTableSectionElement>()
.map(|_| Dom::from_ref(&**node))
})
.collect(),
}
}
}
impl HTMLTableElementMethods for HTMLTableElement {
// https://html.spec.whatwg.org/multipage/#dom-table-rows
fn Rows(&self) -> DomRoot<HTMLCollection> {
let filter = self.get_rows();
HTMLCollection::new(&window_from_node(self), self.upcast(), Box::new(filter))
}
// https://html.spec.whatwg.org/multipage/#dom-table-caption
fn GetCaption(&self) -> Option<DomRoot<HTMLTableCaptionElement>> {
self.upcast::<Node>()
.children()
.filter_map(DomRoot::downcast)
.next()
}
// https://html.spec.whatwg.org/multipage/#dom-table-caption
fn SetCaption(&self, new_caption: Option<&HTMLTableCaptionElement>) {
if let Some(ref caption) = self.GetCaption() {
caption.upcast::<Node>().remove_self();
}
<|fim▁hole|> node.InsertBefore(caption.upcast(), node.GetFirstChild().as_deref())
.expect("Insertion failed");
}
}
// https://html.spec.whatwg.org/multipage/#dom-table-createcaption
fn CreateCaption(&self) -> DomRoot<HTMLTableCaptionElement> {
match self.GetCaption() {
Some(caption) => caption,
None => {
let caption = HTMLTableCaptionElement::new(
local_name!("caption"),
None,
&document_from_node(self),
);
self.SetCaption(Some(&caption));
caption
},
}
}
// https://html.spec.whatwg.org/multipage/#dom-table-deletecaption
fn DeleteCaption(&self) {
if let Some(caption) = self.GetCaption() {
caption.upcast::<Node>().remove_self();
}
}
// https://html.spec.whatwg.org/multipage/#dom-table-thead
fn GetTHead(&self) -> Option<DomRoot<HTMLTableSectionElement>> {
self.get_first_section_of_type(&local_name!("thead"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-thead
fn SetTHead(&self, thead: Option<&HTMLTableSectionElement>) -> ErrorResult {
self.set_first_section_of_type(&local_name!("thead"), thead, |n| {
!n.is::<HTMLTableCaptionElement>() && !n.is::<HTMLTableColElement>()
})
}
// https://html.spec.whatwg.org/multipage/#dom-table-createthead
fn CreateTHead(&self) -> DomRoot<HTMLTableSectionElement> {
self.create_section_of_type(&local_name!("thead"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-deletethead
fn DeleteTHead(&self) {
self.delete_first_section_of_type(&local_name!("thead"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-tfoot
fn GetTFoot(&self) -> Option<DomRoot<HTMLTableSectionElement>> {
self.get_first_section_of_type(&local_name!("tfoot"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-tfoot
fn SetTFoot(&self, tfoot: Option<&HTMLTableSectionElement>) -> ErrorResult {
self.set_first_section_of_type(&local_name!("tfoot"), tfoot, |n| {
if n.is::<HTMLTableCaptionElement>() || n.is::<HTMLTableColElement>() {
return false;
}
if n.is::<HTMLTableSectionElement>() {
let name = n.local_name();
if name == &local_name!("thead") || name == &local_name!("tbody") {
return false;
}
}
true
})
}
// https://html.spec.whatwg.org/multipage/#dom-table-createtfoot
fn CreateTFoot(&self) -> DomRoot<HTMLTableSectionElement> {
self.create_section_of_type(&local_name!("tfoot"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-deletetfoot
fn DeleteTFoot(&self) {
self.delete_first_section_of_type(&local_name!("tfoot"))
}
// https://html.spec.whatwg.org/multipage/#dom-table-tbodies
fn TBodies(&self) -> DomRoot<HTMLCollection> {
#[derive(JSTraceable)]
struct TBodiesFilter;
impl CollectionFilter for TBodiesFilter {
fn filter(&self, elem: &Element, root: &Node) -> bool {
elem.is::<HTMLTableSectionElement>() &&
elem.local_name() == &local_name!("tbody") &&
elem.upcast::<Node>().GetParentNode().as_deref() == Some(root)
}
}
self.tbodies.or_init(|| {
let window = window_from_node(self);
let filter = Box::new(TBodiesFilter);
HTMLCollection::create(&window, self.upcast(), filter)
})
}
// https://html.spec.whatwg.org/multipage/#dom-table-createtbody
fn CreateTBody(&self) -> DomRoot<HTMLTableSectionElement> {
let tbody =
HTMLTableSectionElement::new(local_name!("tbody"), None, &document_from_node(self));
let node = self.upcast::<Node>();
let last_tbody = node
.rev_children()
.filter_map(DomRoot::downcast::<Element>)
.find(|n| n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody"));
let reference_element = last_tbody.and_then(|t| t.upcast::<Node>().GetNextSibling());
node.InsertBefore(tbody.upcast(), reference_element.as_deref())
.expect("Insertion failed");
tbody
}
// https://html.spec.whatwg.org/multipage/#dom-table-insertrow
fn InsertRow(&self, index: i32) -> Fallible<DomRoot<HTMLTableRowElement>> {
let rows = self.Rows();
let number_of_row_elements = rows.Length();
if index < -1 || index > number_of_row_elements as i32 {
return Err(Error::IndexSize);
}
let new_row = HTMLTableRowElement::new(local_name!("tr"), None, &document_from_node(self));
let node = self.upcast::<Node>();
if number_of_row_elements == 0 {
// append new row to last or new tbody in table
if let Some(last_tbody) = node
.rev_children()
.filter_map(DomRoot::downcast::<Element>)
.find(|n| {
n.is::<HTMLTableSectionElement>() && n.local_name() == &local_name!("tbody")
})
{
last_tbody
.upcast::<Node>()
.AppendChild(new_row.upcast::<Node>())
.expect("InsertRow failed to append first row.");
} else {
let tbody = self.CreateTBody();
node.AppendChild(tbody.upcast())
.expect("InsertRow failed to append new tbody.");
tbody
.upcast::<Node>()
.AppendChild(new_row.upcast::<Node>())
.expect("InsertRow failed to append first row.");
}
} else if index == number_of_row_elements as i32 || index == -1 {
// append new row to parent of last row in table
let last_row = rows
.Item(number_of_row_elements - 1)
.expect("InsertRow failed to find last row in table.");
let last_row_parent = last_row
.upcast::<Node>()
.GetParentNode()
.expect("InsertRow failed to find parent of last row in table.");
last_row_parent
.upcast::<Node>()
.AppendChild(new_row.upcast::<Node>())
.expect("InsertRow failed to append last row.");
} else {
// insert new row before the index-th row in rows using the same parent
let ith_row = rows
.Item(index as u32)
.expect("InsertRow failed to find a row in table.");
let ith_row_parent = ith_row
.upcast::<Node>()
.GetParentNode()
.expect("InsertRow failed to find parent of a row in table.");
ith_row_parent
.upcast::<Node>()
.InsertBefore(new_row.upcast::<Node>(), Some(ith_row.upcast::<Node>()))
.expect("InsertRow failed to append row");
}
Ok(new_row)
}
// https://html.spec.whatwg.org/multipage/#dom-table-deleterow
fn DeleteRow(&self, mut index: i32) -> Fallible<()> {
let rows = self.Rows();
// Step 1.
if index == -1 {
index = rows.Length() as i32 - 1;
}
// Step 2.
if index < 0 || index as u32 >= rows.Length() {
return Err(Error::IndexSize);
}
// Step 3.
DomRoot::upcast::<Node>(rows.Item(index as u32).unwrap()).remove_self();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-table-bgcolor
make_getter!(BgColor, "bgcolor");
// https://html.spec.whatwg.org/multipage/#dom-table-bgcolor
make_legacy_color_setter!(SetBgColor, "bgcolor");
// https://html.spec.whatwg.org/multipage/#dom-table-width
make_getter!(Width, "width");
// https://html.spec.whatwg.org/multipage/#dom-table-width
make_nonzero_dimension_setter!(SetWidth, "width");
}
pub trait HTMLTableElementLayoutHelpers {
fn get_background_color(&self) -> Option<RGBA>;
fn get_border(&self) -> Option<u32>;
fn get_cellspacing(&self) -> Option<u32>;
fn get_width(&self) -> LengthOrPercentageOrAuto;
}
impl HTMLTableElementLayoutHelpers for LayoutDom<HTMLTableElement> {
#[allow(unsafe_code)]
fn get_background_color(&self) -> Option<RGBA> {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("bgcolor"))
.and_then(AttrValue::as_color)
.cloned()
}
}
#[allow(unsafe_code)]
fn get_border(&self) -> Option<u32> {
unsafe { (*self.unsafe_get()).border.get() }
}
#[allow(unsafe_code)]
fn get_cellspacing(&self) -> Option<u32> {
unsafe { (*self.unsafe_get()).cellspacing.get() }
}
#[allow(unsafe_code)]
fn get_width(&self) -> LengthOrPercentageOrAuto {
unsafe {
(*self.upcast::<Element>().unsafe_get())
.get_attr_for_layout(&ns!(), &local_name!("width"))
.map(AttrValue::as_dimension)
.cloned()
.unwrap_or(LengthOrPercentageOrAuto::Auto)
}
}
}
impl VirtualMethods for HTMLTableElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match *attr.local_name() {
local_name!("border") => {
// According to HTML5 § 14.3.9, invalid values map to 1px.
self.border.set(
mutation
.new_value(attr)
.map(|value| parse_unsigned_integer(value.chars()).unwrap_or(1)),
);
},
local_name!("cellspacing") => {
self.cellspacing.set(
mutation
.new_value(attr)
.and_then(|value| parse_unsigned_integer(value.chars()).ok()),
);
},
_ => {},
}
}
fn parse_plain_attribute(&self, local_name: &LocalName, value: DOMString) -> AttrValue {
match *local_name {
local_name!("border") => AttrValue::from_u32(value.into(), 1),
local_name!("width") => AttrValue::from_nonzero_dimension(value.into()),
local_name!("bgcolor") => AttrValue::from_legacy_color(value.into()),
_ => self
.super_type()
.unwrap()
.parse_plain_attribute(local_name, value),
}
}
}<|fim▁end|> | if let Some(caption) = new_caption {
let node = self.upcast::<Node>(); |
<|file_name|>ex2-comments-and-pound-characters.py<|end_file_name|><|fim▁begin|># A comment, this is so you can read your program later.
# Anything after the # is ignored by python.
print "I could have code like this." # and the comment after is ignored
# You can also use a comment to "disable" or comment out a piece of code:
# print "This won't run."
<|fim▁hole|>print "This will run."<|fim▁end|> | |
<|file_name|>_managed_database_queries_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_by_query_request(
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
subscription_id: str,
*,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
interval: Optional[Union[str, "_models.QueryTimeGrainType"]] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-11-01-preview"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}/statistics')
path_format_arguments = {
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str'),
"managedInstanceName": _SERIALIZER.url("managed_instance_name", managed_instance_name, 'str'),
"databaseName": _SERIALIZER.url("database_name", database_name, 'str'),
"queryId": _SERIALIZER.url("query_id", query_id, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if start_time is not None:
query_parameters['startTime'] = _SERIALIZER.query("start_time", start_time, 'str')
if end_time is not None:
query_parameters['endTime'] = _SERIALIZER.query("end_time", end_time, 'str')
if interval is not None:
query_parameters['interval'] = _SERIALIZER.query("interval", interval, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class ManagedDatabaseQueriesOperations(object):
"""ManagedDatabaseQueriesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.sql.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
**kwargs: Any
) -> "_models.ManagedInstanceQuery":
"""Get query by query id.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param query_id:
:type query_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedInstanceQuery, or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.ManagedInstanceQuery
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedInstanceQuery"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedInstanceQuery', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}'} # type: ignore
@distributed_trace
def list_by_query(
self,
resource_group_name: str,
managed_instance_name: str,
database_name: str,
query_id: str,
start_time: Optional[str] = None,
end_time: Optional[str] = None,
interval: Optional[Union[str, "_models.QueryTimeGrainType"]] = None,
**kwargs: Any
) -> Iterable["_models.ManagedInstanceQueryStatistics"]:<|fim▁hole|>
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal.
:type resource_group_name: str
:param managed_instance_name: The name of the managed instance.
:type managed_instance_name: str
:param database_name: The name of the database.
:type database_name: str
:param query_id:
:type query_id: str
:param start_time: Start time for observed period.
:type start_time: str
:param end_time: End time for observed period.
:type end_time: str
:param interval: The time step to be used to summarize the metric values.
:type interval: str or ~azure.mgmt.sql.models.QueryTimeGrainType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedInstanceQueryStatistics or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.sql.models.ManagedInstanceQueryStatistics]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedInstanceQueryStatistics"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_query_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
start_time=start_time,
end_time=end_time,
interval=interval,
template_url=self.list_by_query.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_query_request(
resource_group_name=resource_group_name,
managed_instance_name=managed_instance_name,
database_name=database_name,
query_id=query_id,
subscription_id=self._config.subscription_id,
start_time=start_time,
end_time=end_time,
interval=interval,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedInstanceQueryStatistics", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_query.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/managedInstances/{managedInstanceName}/databases/{databaseName}/queries/{queryId}/statistics'} # type: ignore<|fim▁end|> | """Get query execution statistics by query id. |
<|file_name|>mathLawObject.ts<|end_file_name|><|fim▁begin|>import { MathObject } from "../index";
export abstract class MathLawObject implements MathObject {
item: MathObject;
args: any[];
constructor(item: MathObject, args: any[]) {
this.item = item;
this.args = args;
}
abstract isApplicable(): boolean;
abstract apply(): void;
abstract explanation(): string;
abstract definition(): string;<|fim▁hole|> }
}<|fim▁end|> |
toLiteral(): string {
return this.definition(); |
<|file_name|>termcolors.py<|end_file_name|><|fim▁begin|>"""
termcolors.py
"""
from django.utils import six
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = {color_names[x]: '3%s' % x for x in range(8)}
background = {color_names[x]: '4%s' % x for x in range(8)}
RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
def colorize(text='', opts=(), **kwargs):
"""
Returns your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Returns the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold'
'underscore'
'blink'
'reverse'
'conceal'
'noreset' - string will not be auto-terminated with the RESET code
Examples:
colorize('hello', fg='red', bg='blue', opts=('blink',))
colorize()
colorize('goodbye', opts=('underscore',))
print(colorize('first line', fg='red', opts=('noreset',)))
print('this should be red too')
print(colorize('and so should this'))
print('this should not be red')
"""
code_list = []
if text == '' and len(opts) == 1 and opts[0] == 'reset':
return '\x1b[%sm' % RESET
for k, v in six.iteritems(kwargs):
if k == 'fg':
code_list.append(foreground[v])
elif k == 'bg':
code_list.append(background[v])
for o in opts:
if o in opt_dict:
code_list.append(opt_dict[o])
if 'noreset' not in opts:
text = '%s\x1b[%sm' % (text or '', RESET)
return '%s%s' % (('\x1b[%sm' % ';'.join(code_list)), text or '')
def make_style(opts=(), **kwargs):
"""
Returns a function with default parameters for colorize()
Example:
bold_red = make_style(opts=('bold',), fg='red')
print(bold_red('hello'))
KEYWORD = make_style(fg='yellow')
COMMENT = make_style(fg='blue', opts=('bold',))
"""
return lambda text: colorize(text, opts, **kwargs)
NOCOLOR_PALETTE = 'nocolor'
DARK_PALETTE = 'dark'
LIGHT_PALETTE = 'light'
PALETTES = {
NOCOLOR_PALETTE: {
'ERROR': {},
'WARNING': {},
'NOTICE': {},
'SQL_FIELD': {},
'SQL_COLTYPE': {},
'SQL_KEYWORD': {},
'SQL_TABLE': {},
'HTTP_INFO': {},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {},
'HTTP_NOT_MODIFIED': {},
'HTTP_BAD_REQUEST': {},
'HTTP_NOT_FOUND': {},
'HTTP_SERVER_ERROR': {},
'MIGRATE_HEADING': {},
'MIGRATE_LABEL': {},
'MIGRATE_SUCCESS': {},
'MIGRATE_FAILURE': {},
},
DARK_PALETTE: {
'ERROR': {'fg': 'red', 'opts': ('bold',)},
'WARNING': {'fg': 'yellow', 'opts': ('bold',)},
'NOTICE': {'fg': 'red'},
'SQL_FIELD': {'fg': 'green', 'opts': ('bold',)},
'SQL_COLTYPE': {'fg': 'green'},
'SQL_KEYWORD': {'fg': 'yellow'},
'SQL_TABLE': {'opts': ('bold',)},
'HTTP_INFO': {'opts': ('bold',)},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {'fg': 'green'},
'HTTP_NOT_MODIFIED': {'fg': 'cyan'},
'HTTP_BAD_REQUEST': {'fg': 'red', 'opts': ('bold',)},
'HTTP_NOT_FOUND': {'fg': 'yellow'},
'HTTP_SERVER_ERROR': {'fg': 'magenta', 'opts': ('bold',)},
'MIGRATE_HEADING': {'fg': 'cyan', 'opts': ('bold',)},
'MIGRATE_LABEL': {'opts': ('bold',)},
'MIGRATE_SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'MIGRATE_FAILURE': {'fg': 'red', 'opts': ('bold',)},
},
LIGHT_PALETTE: {
'ERROR': {'fg': 'red', 'opts': ('bold',)},
'WARNING': {'fg': 'yellow', 'opts': ('bold',)},
'NOTICE': {'fg': 'red'},
'SQL_FIELD': {'fg': 'green', 'opts': ('bold',)},
'SQL_COLTYPE': {'fg': 'green'},
'SQL_KEYWORD': {'fg': 'blue'},
'SQL_TABLE': {'opts': ('bold',)},
'HTTP_INFO': {'opts': ('bold',)},
'HTTP_SUCCESS': {},
'HTTP_REDIRECT': {'fg': 'green', 'opts': ('bold',)},
'HTTP_NOT_MODIFIED': {'fg': 'green'},
'HTTP_BAD_REQUEST': {'fg': 'red', 'opts': ('bold',)},
'HTTP_NOT_FOUND': {'fg': 'red'},
'HTTP_SERVER_ERROR': {'fg': 'magenta', 'opts': ('bold',)},
'MIGRATE_HEADING': {'fg': 'cyan', 'opts': ('bold',)},
'MIGRATE_LABEL': {'opts': ('bold',)},
'MIGRATE_SUCCESS': {'fg': 'green', 'opts': ('bold',)},
'MIGRATE_FAILURE': {'fg': 'red', 'opts': ('bold',)},
}
}
DEFAULT_PALETTE = DARK_PALETTE
def parse_color_setting(config_string):
"""Parse a DJANGO_COLORS environment variable to produce the system palette
The general form of a pallete definition is:
"palette;role=fg;role=fg/bg;role=fg,option,option;role=fg/bg,option,option"
where:
palette is a named palette; one of 'light', 'dark', or 'nocolor'.
role is a named style used by Django
fg is a background color.
bg is a background color.
option is a display options.<|fim▁hole|>
Valid roles:
'error', 'notice', 'sql_field', 'sql_coltype', 'sql_keyword', 'sql_table',
'http_info', 'http_success', 'http_redirect', 'http_bad_request',
'http_not_found', 'http_server_error'
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold', 'underscore', 'blink', 'reverse', 'conceal'
"""
if not config_string:
return PALETTES[DEFAULT_PALETTE]
# Split the color configuration into parts
parts = config_string.lower().split(';')
palette = PALETTES[NOCOLOR_PALETTE].copy()
for part in parts:
if part in PALETTES:
# A default palette has been specified
palette.update(PALETTES[part])
elif '=' in part:
# Process a palette defining string
definition = {}
# Break the definition into the role,
# plus the list of specific instructions.
# The role must be in upper case
role, instructions = part.split('=')
role = role.upper()
styles = instructions.split(',')
styles.reverse()
# The first instruction can contain a slash
# to break apart fg/bg.
colors = styles.pop().split('/')
colors.reverse()
fg = colors.pop()
if fg in color_names:
definition['fg'] = fg
if colors and colors[-1] in color_names:
definition['bg'] = colors[-1]
# All remaining instructions are options
opts = tuple(s for s in styles if s in opt_dict.keys())
if opts:
definition['opts'] = opts
# The nocolor palette has all available roles.
# Use that palette as the basis for determining
# if the role is valid.
if role in PALETTES[NOCOLOR_PALETTE] and definition:
palette[role] = definition
# If there are no colors specified, return the empty palette.
if palette == PALETTES[NOCOLOR_PALETTE]:
return None
return palette<|fim▁end|> |
Specifying a named palette is the same as manually specifying the individual
definitions for each role. Any individual definitions following the pallete
definition will augment the base palette definition. |
<|file_name|>test_detci-4.py<|end_file_name|><|fim▁begin|>import pytest
from forte.solvers import solver_factory, HF, ActiveSpaceSolver<|fim▁hole|> """CASCI test of Forte DETCI using the SparseList algorithm to build the sigma vector"""
ref_hf_energy = -99.977636678461636
ref_fci_energy = -100.113732484560970
xyz = """
F
H 1 1.0
"""
input = solver_factory(molecule=xyz, basis='6-31g')
state = input.state(charge=0, multiplicity=1, sym='a1')
hf = HF(input, state=state, e_convergence=1.0e-12, d_convergence=1.0e-8)
# create a detci solver
fci = ActiveSpaceSolver(
hf,
type='detci',
states=state,
mo_spaces=input.mo_spaces(frozen_docc=[1, 0, 0, 0]),
options={'active_ref_type': 'cas'}
)
fci.run()
# check results
assert hf.value('hf energy') == pytest.approx(ref_hf_energy, 1.0e-10)
assert fci.value('active space energy')[state] == pytest.approx([ref_fci_energy], 1.0e-10)
if __name__ == "__main__":
test_detci_4()<|fim▁end|> |
def test_detci_4(): |
<|file_name|>info.cpp<|end_file_name|><|fim▁begin|>#include <ctype.h>
#include "driver.h"
#include "sound/samples.h"
#include "info.h"
#include "datafile.h"
/* Output format indentation */
/* Indentation */
#define INDENT "\t"
/* Possible output format */
#define OUTPUT_FORMAT_UNFORMATTED 0
#define OUTPUT_FORMAT_ONE_LEVEL 1
#define OUTPUT_FORMAT_TWO_LEVEL 2
/* Output format */
#define OUTPUT_FORMAT OUTPUT_FORMAT_ONE_LEVEL
/* Output format configuration
L list
1,2 levels
B,S,E Begin, Separator, End
*/
#if OUTPUT_FORMAT == OUTPUT_FORMAT_UNFORMATTED
#define L1B "("
#define L1P " "
#define L1N ""
#define L1E ")"
#define L2B "("
#define L2P " "
#define L2N ""
#define L2E ")"
#elif OUTPUT_FORMAT == OUTPUT_FORMAT_ONE_LEVEL
#define L1B " (\n"
#define L1P INDENT
#define L1N "\n"
#define L1E ")\n\n"
#define L2B " ("
#define L2P " "
#define L2N ""
#define L2E " )"
#elif OUTPUT_FORMAT == OUTPUT_FORMAT_TWO_LEVEL
#define L1B " (\n"
#define L1P INDENT
#define L1N "\n"
#define L1E ")\n\n"
#define L2B " (\n"
#define L2P INDENT INDENT
#define L2N "\n"
#define L2E INDENT ")"
#else
#error Wrong OUTPUT_FORMAT
#endif
/* Print a string in C format */
static void print_c_string(FILE* out, const char* s) {
fprintf(out, "\"");
if (s) {
while (*s) {
switch (*s) {
case '\a' : fprintf(out, "\\a"); break;
case '\b' : fprintf(out, "\\b"); break;
case '\f' : fprintf(out, "\\f"); break;
case '\n' : fprintf(out, "\\n"); break;
case '\r' : fprintf(out, "\\r"); break;
case '\t' : fprintf(out, "\\t"); break;
case '\v' : fprintf(out, "\\v"); break;
case '\\' : fprintf(out, "\\\\"); break;
case '\"' : fprintf(out, "\\\""); break;
default:
if (*s>=' ' && *s<='~')
fprintf(out, "%c", *s);
else
fprintf(out, "\\x%02x", (unsigned)(unsigned char)*s);
}
++s;
}
}
fprintf(out, "\"");
}
/* Print a string in statement format (remove space, parentesis, ") */
static void print_statement_string(FILE* out, const char* s) {
if (s) {
while (*s) {
if (isspace(*s)) {
fprintf(out, "_");
} else {
switch (*s) {
case '(' :
case ')' :
case '"' :
fprintf(out, "_");
break;
default:
fprintf(out, "%c", *s);
}
}
++s;
}
} else {
fprintf(out, "null");
}
}
static void print_game_switch(FILE* out, const struct GameDriver* game) {
const struct InputPortTiny* input = game->input_ports;
while ((input->type & ~IPF_MASK) != IPT_END) {
if ((input->type & ~IPF_MASK)==IPT_DIPSWITCH_NAME) {
int def = input->default_value;
const char* def_name = 0;
fprintf(out, L1P "dipswitch" L2B);
fprintf(out, L2P "name " );
print_c_string(out,input->name);
fprintf(out, "%s", L2N);
++input;
while ((input->type & ~IPF_MASK)==IPT_DIPSWITCH_SETTING) {
if (def == input->default_value)
def_name = input->name;
fprintf(out, L2P "entry " );
print_c_string(out,input->name);
fprintf(out, "%s", L2N);
++input;
}
if (def_name) {
fprintf(out, L2P "default ");
print_c_string(out,def_name);
fprintf(out, "%s", L2N);
}
fprintf(out, L2E L1N);
}
else
++input;
}
}
static void print_game_input(FILE* out, const struct GameDriver* game) {
const struct InputPortTiny* input = game->input_ports;
int nplayer = 0;
const char* control = 0;
int nbutton = 0;
int ncoin = 0;
const char* service = 0;
const char* tilt = 0;
while ((input->type & ~IPF_MASK) != IPT_END) {
switch (input->type & IPF_PLAYERMASK) {
case IPF_PLAYER1:
if (nplayer<1) nplayer = 1;
break;
case IPF_PLAYER2:
if (nplayer<2) nplayer = 2;
break;
case IPF_PLAYER3:
if (nplayer<3) nplayer = 3;
break;
case IPF_PLAYER4:
if (nplayer<4) nplayer = 4;
break;
}
switch (input->type & ~IPF_MASK) {
case IPT_JOYSTICK_UP:
case IPT_JOYSTICK_DOWN:
case IPT_JOYSTICK_LEFT:
case IPT_JOYSTICK_RIGHT:
if (input->type & IPF_2WAY)
control = "joy2way";
else if (input->type & IPF_4WAY)
control = "joy4way";
else
control = "joy8way";
break;
case IPT_JOYSTICKRIGHT_UP:
case IPT_JOYSTICKRIGHT_DOWN:
case IPT_JOYSTICKRIGHT_LEFT:
case IPT_JOYSTICKRIGHT_RIGHT:
case IPT_JOYSTICKLEFT_UP:
case IPT_JOYSTICKLEFT_DOWN:
case IPT_JOYSTICKLEFT_LEFT:
case IPT_JOYSTICKLEFT_RIGHT:
if (input->type & IPF_2WAY)
control = "doublejoy2way";
else if (input->type & IPF_4WAY)
control = "doublejoy4way";
else
control = "doublejoy8way";
break;
case IPT_BUTTON1:
if (nbutton<1) nbutton = 1;
break;
case IPT_BUTTON2:
if (nbutton<2) nbutton = 2;
break;
case IPT_BUTTON3:
if (nbutton<3) nbutton = 3;
break;
case IPT_BUTTON4:
if (nbutton<4) nbutton = 4;
break;
case IPT_BUTTON5:
if (nbutton<5) nbutton = 5;
break;
case IPT_BUTTON6:
if (nbutton<6) nbutton = 6;
break;
case IPT_BUTTON7:
if (nbutton<7) nbutton = 7;
break;
case IPT_BUTTON8:
if (nbutton<8) nbutton = 8;
break;
case IPT_PADDLE:
control = "paddle";
break;
case IPT_DIAL:
control = "dial";
break;
case IPT_TRACKBALL_X:
case IPT_TRACKBALL_Y:
control = "trackball";
break;
case IPT_AD_STICK_X:
case IPT_AD_STICK_Y:
control = "stick";
break;
case IPT_COIN1:
if (ncoin < 1) ncoin = 1;
break;
case IPT_COIN2:
if (ncoin < 2) ncoin = 2;
break;
case IPT_COIN3:
if (ncoin < 3) ncoin = 3;
break;
case IPT_COIN4:
if (ncoin < 4) ncoin = 4;
break;
case IPT_SERVICE :
service = "yes";
break;
case IPT_TILT :
tilt = "yes";
break;
}
++input;
}
fprintf(out, L1P "input" L2B);
fprintf(out, L2P "players %d" L2N, nplayer );
if (control)
fprintf(out, L2P "control %s" L2N, control );
if (nbutton)
fprintf(out, L2P "buttons %d" L2N, nbutton );
if (ncoin)
fprintf(out, L2P "coins %d" L2N, ncoin );
if (service)
fprintf(out, L2P "service %s" L2N, service );
if (tilt)
fprintf(out, L2P "tilt %s" L2N, tilt );
fprintf(out, L2E L1N);
}
static void print_game_rom(FILE* out, const struct GameDriver* game) {
const struct RomModule *rom = game->rom, *p_rom = NULL;
extern struct GameDriver driver_0;
if (!rom) return;
if (game->clone_of && game->clone_of != &driver_0) {
fprintf(out, L1P "romof %s" L1N, game->clone_of->name);
}
while (rom->name || rom->offset || rom->length) {
int region = rom->crc;
rom++;
while (rom->length) {
char name[100];
int offset, length, crc, in_parent;
sprintf(name,rom->name,game->name);
offset = rom->offset;
crc = rom->crc;
in_parent = 0;
length = 0;
do {
if (rom->name == (char *)-1)
length = 0; /* restart */
length += rom->length & ~ROMFLAG_MASK;
rom++;
} while (rom->length && (rom->name == 0 || rom->name == (char *)-1));
if(game->clone_of && crc)
{
p_rom = game->clone_of->rom;
if (p_rom)
while( !in_parent && (p_rom->name || p_rom->offset || p_rom->length) )
{
p_rom++;
while(!in_parent && p_rom->length) {
do {
if (p_rom->crc == crc)
in_parent = 1;
else
p_rom++;
} while (!in_parent && p_rom->length && (p_rom->name == 0 || p_rom->name == (char *)-1));
}
}
}
fprintf(out, L1P "rom" L2B);
if (*name)
fprintf(out, L2P "name %s" L2N, name);
if(in_parent && p_rom && p_rom->name)
fprintf(out, L2P "merge %s" L2N, p_rom->name);
fprintf(out, L2P "size %d" L2N, length);
fprintf(out, L2P "crc %08x" L2N, crc);
switch (region & ~REGIONFLAG_MASK)
{
case REGION_CPU1: fprintf(out, L2P "region cpu1" L2N); break;
case REGION_CPU2: fprintf(out, L2P "region cpu2" L2N); break;
case REGION_CPU3: fprintf(out, L2P "region cpu3" L2N); break;
case REGION_CPU4: fprintf(out, L2P "region cpu4" L2N); break;
case REGION_CPU5: fprintf(out, L2P "region cpu5" L2N); break;
case REGION_CPU6: fprintf(out, L2P "region cpu6" L2N); break;
case REGION_CPU7: fprintf(out, L2P "region cpu7" L2N); break;
case REGION_CPU8: fprintf(out, L2P "region cpu8" L2N); break;
case REGION_GFX1: fprintf(out, L2P "region gfx1" L2N); break;
case REGION_GFX2: fprintf(out, L2P "region gfx2" L2N); break;
case REGION_GFX3: fprintf(out, L2P "region gfx3" L2N); break;
case REGION_GFX4: fprintf(out, L2P "region gfx4" L2N); break;
case REGION_GFX5: fprintf(out, L2P "region gfx5" L2N); break;
case REGION_GFX6: fprintf(out, L2P "region gfx6" L2N); break;
case REGION_GFX7: fprintf(out, L2P "region gfx7" L2N); break;
case REGION_GFX8: fprintf(out, L2P "region gfx8" L2N); break;
case REGION_PROMS: fprintf(out, L2P "region proms" L2N); break;
case REGION_SOUND1: fprintf(out, L2P "region sound1" L2N); break;
case REGION_SOUND2: fprintf(out, L2P "region sound2" L2N); break;
case REGION_SOUND3: fprintf(out, L2P "region sound3" L2N); break;
case REGION_SOUND4: fprintf(out, L2P "region sound4" L2N); break;
case REGION_SOUND5: fprintf(out, L2P "region sound5" L2N); break;
case REGION_SOUND6: fprintf(out, L2P "region sound6" L2N); break;
case REGION_SOUND7: fprintf(out, L2P "region sound7" L2N); break;
case REGION_SOUND8: fprintf(out, L2P "region sound8" L2N); break;
case REGION_USER1: fprintf(out, L2P "region user1" L2N); break;
case REGION_USER2: fprintf(out, L2P "region user2" L2N); break;
case REGION_USER3: fprintf(out, L2P "region user3" L2N); break;
case REGION_USER4: fprintf(out, L2P "region user4" L2N); break;
case REGION_USER5: fprintf(out, L2P "region user5" L2N); break;
case REGION_USER6: fprintf(out, L2P "region user6" L2N); break;
case REGION_USER7: fprintf(out, L2P "region user7" L2N); break;
case REGION_USER8: fprintf(out, L2P "region user8" L2N); break;
default: fprintf(out, L2P "region 0x%x" L2N, region & ~REGIONFLAG_MASK);
}
switch (region & REGIONFLAG_MASK)
{
case 0:
break;
case REGIONFLAG_SOUNDONLY:
fprintf(out, L2P "flags soundonly" L2N);
break;
case REGIONFLAG_DISPOSE:
fprintf(out, L2P "flags dispose" L2N);
break;
default:
fprintf(out, L2P "flags 0x%x" L2N, region & REGIONFLAG_MASK);
}
fprintf(out, L2P "offs %x", offset);
fprintf(out, L2E L1N);
}
}
}
static void print_game_sample(FILE* out, const struct GameDriver* game) {
#if (HAS_SAMPLES)
int i;
for( i = 0; game->drv->sound[i].sound_type && i < MAX_SOUND; i++ )
{
const char **samplenames = NULL;
if( game->drv->sound[i].sound_type != SOUND_SAMPLES )
continue;
samplenames = ((struct Samplesinterface *)game->drv->sound[i].sound_interface)->samplenames;
if (samplenames != 0 && samplenames[0] != 0) {
int k = 0;
if (samplenames[k][0]=='*') {
/* output sampleof only if different from game name */
if (strcmp(samplenames[k] + 1, game->name)!=0) {
fprintf(out, L1P "sampleof %s" L1N, samplenames[k] + 1);
}
++k;
}
while (samplenames[k] != 0) {
/* Check if is not empty */
if (*samplenames[k]) {
/* Check if sample is duplicate */
int l = 0;
while (l<k && strcmp(samplenames[k],samplenames[l])!=0)
++l;
if (l==k) {
fprintf(out, L1P "sample %s" L1N, samplenames[k]);
}
}
++k;
}
}
}
#endif
}
static void print_game_micro(FILE* out, const struct GameDriver* game)
{
const struct MachineDriver* driver = game->drv;
const struct MachineCPU* cpu = driver->cpu;
const struct MachineSound* sound = driver->sound;
int j;
for(j=0;j<MAX_CPU;++j)
{
if (cpu[j].cpu_type!=0)
{
fprintf(out, L1P "chip" L2B);
if (cpu[j].cpu_type & CPU_AUDIO_CPU)
fprintf(out, L2P "type cpu flags audio" L2N);
else
fprintf(out, L2P "type cpu" L2N);
fprintf(out, L2P "name ");
print_statement_string(out, cputype_name(cpu[j].cpu_type));
fprintf(out, "%s", L2N);
fprintf(out, L2P "clock %d" L2N, cpu[j].cpu_clock);
fprintf(out, L2E L1N);
}
}
for(j=0;j<MAX_SOUND;++j) if (sound[j].sound_type)
{
if (sound[j].sound_type)
{
int num = sound_num(&sound[j]);
int l;
if (num == 0) num = 1;
for(l=0;l<num;++l)
{
fprintf(out, L1P "chip" L2B);
fprintf(out, L2P "type audio" L2N);
fprintf(out, L2P "name ");
print_statement_string(out, sound_name(&sound[j]));
fprintf(out, "%s", L2N);
if (sound_clock(&sound[j]))
fprintf(out, L2P "clock %d" L2N, sound_clock(&sound[j]));
fprintf(out, L2E L1N);
}
}
}
}
static void print_game_video(FILE* out, const struct GameDriver* game)
{
const struct MachineDriver* driver = game->drv;
int dx;
int dy;
int showxy;
int orientation;
fprintf(out, L1P "video" L2B);
if (driver->video_attributes & VIDEO_TYPE_VECTOR)
{
fprintf(out, L2P "screen vector" L2N);
showxy = 0;
}
else
{
fprintf(out, L2P "screen raster" L2N);
showxy = 1;
}
if (game->flags & ORIENTATION_SWAP_XY)
{
dx = driver->default_visible_area.max_y - driver->default_visible_area.min_y + 1;
dy = driver->default_visible_area.max_x - driver->default_visible_area.min_x + 1;
orientation = 1;
}
else
{
dx = driver->default_visible_area.max_x - driver->default_visible_area.min_x + 1;
dy = driver->default_visible_area.max_y - driver->default_visible_area.min_y + 1;
orientation = 0;
}
fprintf(out, L2P "orientation %s" L2N, orientation ? "vertical" : "horizontal" );
if (showxy)
{
fprintf(out, L2P "x %d" L2N, dx);
fprintf(out, L2P "y %d" L2N, dy);
}
fprintf(out, L2P "colors %d" L2N, driver->total_colors);
fprintf(out, L2P "freq %f" L2N, driver->frames_per_second);
fprintf(out, L2E L1N);
}
static void print_game_sound(FILE* out, const struct GameDriver* game) {
const struct MachineDriver* driver = game->drv;
const struct MachineCPU* cpu = driver->cpu;
const struct MachineSound* sound = driver->sound;
/* check if the game have sound emulation */
int has_sound = 0;
int i;
i = 0;
while (i < MAX_SOUND && !has_sound)
{
if (sound[i].sound_type)
has_sound = 1;
++i;
}
i = 0;
while (i < MAX_CPU && !has_sound)
{
if ((cpu[i].cpu_type & CPU_AUDIO_CPU)!=0)
has_sound = 1;
++i;
}
fprintf(out, L1P "sound" L2B);
/* sound channel */
if (has_sound) {
if (driver->sound_attributes & SOUND_SUPPORTS_STEREO)
fprintf(out, L2P "channels 2" L2N);
else
fprintf(out, L2P "channels 1" L2N);
} else
fprintf(out, L2P "channels 0" L2N);
fprintf(out, L2E L1N);
}
#define HISTORY_BUFFER_MAX 16384
static void print_game_history(FILE* out, const struct GameDriver* game) {
char buffer[HISTORY_BUFFER_MAX];
if (load_driver_history(game,buffer,HISTORY_BUFFER_MAX)==0) {
fprintf(out, L1P "history ");
print_c_string(out, buffer);
fprintf(out, "%s", L1N);
}
}
static void print_game_driver(FILE* out, const struct GameDriver* game) {
fprintf(out, L1P "driver" L2B);
if (game->flags & GAME_NOT_WORKING)
fprintf(out, L2P "status preliminary" L2N);
else
fprintf(out, L2P "status good" L2N);
if (game->flags & GAME_WRONG_COLORS)
fprintf(out, L2P "color preliminary" L2N);
else if (game->flags & GAME_IMPERFECT_COLORS)
fprintf(out, L2P "color imperfect" L2N);
else
fprintf(out, L2P "color good" L2N);
if (game->flags & GAME_NO_SOUND)
fprintf(out, L2P "sound preliminary" L2N);
else if (game->flags & GAME_IMPERFECT_SOUND)
fprintf(out, L2P "sound imperfect" L2N);
else
fprintf(out, L2P "sound good" L2N);
if (game->flags & GAME_REQUIRES_16BIT)
fprintf(out, L2P "colordeep 16" L2N);
else
fprintf(out, L2P "colordeep 8" L2N);
fprintf(out, L2E L1N);
}
/* Print the MAME info record for a game */
static void print_game_info(FILE* out, const struct GameDriver* game) {
#ifndef MESS
fprintf(out, "game" L1B );
#else
fprintf(out, "machine" L1B );
#endif
fprintf(out, L1P "name %s" L1N, game->name );
if (game->description) {
fprintf(out, L1P "description ");
print_c_string(out, game->description );
fprintf(out, "%s", L1N);
}
/* print the year only if is a number */
<|fim▁hole|> if (game->year && strspn(game->year,"0123456789")==strlen(game->year)) {
fprintf(out, L1P "year %s" L1N, game->year );
}
if (game->manufacturer) {
fprintf(out, L1P "manufacturer ");
print_c_string(out, game->manufacturer );
fprintf(out, "%s", L1N);
}
print_game_history(out,game);
if (game->clone_of && !(game->clone_of->flags & NOT_A_DRIVER)) {
fprintf(out, L1P "cloneof %s" L1N, game->clone_of->name);
}
print_game_rom(out,game);
print_game_sample(out,game);
print_game_micro(out,game);
print_game_video(out,game);
print_game_sound(out,game);
print_game_input(out,game);
print_game_switch(out,game);
print_game_driver(out,game);
fprintf(out, L1E);
}
/* Print all the MAME info database */
void print_mame_info(FILE* out, const struct GameDriver* games[]) {
int j;
for(j=0;games[j];++j)
print_game_info( out, games[j] );
#ifndef MESS
/* addictional fixed record */
fprintf(out, "resource" L1B);
fprintf(out, L1P "name neogeo" L1N);
fprintf(out, L1P "description \"Neo Geo BIOS\"" L1N);
fprintf(out, L1P "rom" L2B);
fprintf(out, L2P "name neo-geo.rom" L2N);
fprintf(out, L2P "size 131072" L2N);
fprintf(out, L2P "crc 9036d879" L2N);
fprintf(out, L2E L1N);
fprintf(out, L1P "rom" L2B);
fprintf(out, L2P "name ng-sm1.rom" L2N);
fprintf(out, L2P "size 131072" L2N);
fprintf(out, L2P "crc 97cf998b" L2N);
fprintf(out, L2E L1N);
fprintf(out, L1P "rom" L2B);
fprintf(out, L2P "name ng-sfix.rom" L2N);
fprintf(out, L2P "size 131072" L2N);
fprintf(out, L2P "crc 354029fc" L2N);
fprintf(out, L2E L1N);
fprintf(out, L1E);
#endif
}<|fim▁end|> | |
<|file_name|>match-range-fail.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
match "wow" {<|fim▁hole|> //~| start type: &'static str
//~| end type: &'static str
match "wow" {
10 ... "what" => ()
};
//~^^ ERROR only char and numeric types are allowed in range
//~| start type: _
//~| end type: &'static str
match 5 {
'c' ... 100 => { }
_ => { }
};
//~^^^ ERROR mismatched types in range
//~| expected char
//~| found integral variable
}<|fim▁end|> | "bar" ... "foo" => { }
};
//~^^ ERROR only char and numeric types are allowed in range |
<|file_name|>AuthTokenProvider.js<|end_file_name|><|fim▁begin|>"use strict";
/**
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require("./util/util");
/**
* Abstraction around FirebaseApp's token fetching capabilities.
*/
var AuthTokenProvider = /** @class */ (function () {
/**
* @param {!FirebaseApp} app_
*/
function AuthTokenProvider(app_) {
this.app_ = app_;
}
/**
* @param {boolean} forceRefresh
* @return {!Promise<FirebaseAuthTokenData>}
*/
AuthTokenProvider.prototype.getToken = function (forceRefresh) {
return this.app_['INTERNAL']['getToken'](forceRefresh).then(null,
// .catch
function (error) {
// TODO: Need to figure out all the cases this is raised and whether
// this makes sense.
if (error && error.code === 'auth/token-not-initialized') {
util_1.log('Got auth/token-not-initialized error. Treating as null token.');
return null;
}
else {
return Promise.reject(error);
}
});
};
AuthTokenProvider.prototype.addTokenChangeListener = function (listener) {
// TODO: We might want to wrap the listener and call it with no args to
// avoid a leaky abstraction, but that makes removing the listener harder.<|fim▁hole|> };
AuthTokenProvider.prototype.notifyForInvalidToken = function () {
var errorMessage = 'Provided authentication credentials for the app named "' +
this.app_.name +
'" are invalid. This usually indicates your app was not ' +
'initialized correctly. ';
if ('credential' in this.app_.options) {
errorMessage +=
'Make sure the "credential" property provided to initializeApp() ' +
'is authorized to access the specified "databaseURL" and is from the correct ' +
'project.';
}
else if ('serviceAccount' in this.app_.options) {
errorMessage +=
'Make sure the "serviceAccount" property provided to initializeApp() ' +
'is authorized to access the specified "databaseURL" and is from the correct ' +
'project.';
}
else {
errorMessage +=
'Make sure the "apiKey" and "databaseURL" properties provided to ' +
'initializeApp() match the values provided for your app at ' +
'https://console.firebase.google.com/.';
}
util_1.warn(errorMessage);
};
return AuthTokenProvider;
}());
exports.AuthTokenProvider = AuthTokenProvider;
//# sourceMappingURL=AuthTokenProvider.js.map<|fim▁end|> | this.app_['INTERNAL']['addAuthTokenListener'](listener);
};
AuthTokenProvider.prototype.removeTokenChangeListener = function (listener) {
this.app_['INTERNAL']['removeAuthTokenListener'](listener); |
<|file_name|>module_dumper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
#
# module_dumper.py - WIDS/WIPS framework file dumper module
# Copyright (C) 2009 Peter Krebs, Herbert Haas
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the
# Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, see http://www.gnu.org/licenses/gpl-2.0.html
"""Dumper module
Test module which outputs any input values in a file.
"""
# Imports
#
# Custom modules
import fw_modules.module_template
from fw_modules.module_exceptions import *
# Standard modules
import time
# Third-party modules
class DumperClass(fw_modules.module_template.ModuleClass):
"""DumperClass
Receives messages and dumps them into file.
"""
def __init__(self, controller_reference, parameter_dictionary, module_logger):
"""Constructor
"""
fw_modules.module_template.ModuleClass.__init__(self, controller=controller_reference, param_dict=parameter_dictionary, logger=module_logger)
# Default values.
try:
self.dumpfile_path = self.param_dict['dumpfile']
except KeyError:
raise FwModuleSetupError, self.module_identifier + ": ERROR: No dumpfile specified"
self.module_logger.error("No dumpfile specified")
return None<|fim▁hole|>
def after_run(self):
"""after_run()
Closes dumpfile.
"""
try:
self.DUMPFILE.close()
except IOError:
self.module_logger.warning("Couldn't close dumpfile properly")
def before_run(self):
"""before_run()
Opens dumpfile.
"""
try:
self.DUMPFILE = open(self.dumpfile_path, "w")
except IOError:
self.module_logger.error("Couldn't open file " + str(self.dumpfile_path))
return False
else:
return True
def dump_to_file(self, data):
"""dump_to_file()
Dumps input to file.
"""
self.module_logger.debug("Dumped data: " + str(data))
try:
self.DUMPFILE.write(data + "\n")
self.DUMPFILE.flush()
except IOError as err:
self.module_logger.warning("Couldn't dump to file; details: " + err.__str__())
def process(self, input):
"""process()
Main action.
"""
self.module_logger.debug("Raw input: " + str(input))
self.dump_to_file(input)
def main(controller_reference, parameter_dictionary, module_logger):
dumper_class = DumperClass(controller_reference, parameter_dictionary, module_logger)
return dumper_class
if __name__ == "__main__":
print "Warning: This module is not intended to be executed directly. Only do this for test purposes."<|fim▁end|> | # Helper values.
self.DUMPFILE = None |
<|file_name|>LocationCursor.java<|end_file_name|><|fim▁begin|>package com.daviancorp.android.data.database;
import android.database.Cursor;<|fim▁hole|>/**
* A convenience class to wrap a cursor that returns rows from the "locations"
* table. The {@link getLocation()} method will give you a Location instance
* representing the current row.
*/
public class LocationCursor extends CursorWrapper {
public LocationCursor(Cursor c) {
super(c);
}
/**
* Returns a Location object configured for the current row, or null if the
* current row is invalid.
*/
public Location getLocation() {
if (isBeforeFirst() || isAfterLast())
return null;
Location location = new Location();
long locationId = getLong(getColumnIndex(S.COLUMN_LOCATIONS_ID));
String name = getString(getColumnIndex(S.COLUMN_LOCATIONS_NAME));
String fileLocation = getString(getColumnIndex(S.COLUMN_LOCATIONS_MAP));
location.setId(locationId);
location.setName(name);
location.setFileLocation(fileLocation);
return location;
}
}<|fim▁end|> | import android.database.CursorWrapper;
import com.daviancorp.android.data.classes.Location;
|
<|file_name|>plans.py<|end_file_name|><|fim▁begin|>'''
plans.py
'''
from forex_python.converter import CurrencyCodes
from .base import Base
class Plan(Base):
'''
Plan class for making payment plans
'''
interval = None
name = None
amount = None
plan_code = None
currency = None
id = None
send_sms = True
send_invoices = True<|fim▁hole|>
def __init__(self, name, interval, amount, currency='NGN', plan_code=None,
id=None, send_sms=None, send_invoices=None, description=None):
super().__init__()
#Check if currency supplied is valid
if not CurrencyCodes().get_symbol(currency.upper()):
raise ValueError("Invalid currency supplied")
if interval.lower() not in self.__interval_values:
raise ValueError("Interval should be one of 'hourly',"
"'daily', 'weekly', 'monthly','annually'"
)
try:
amount = int(amount)
except ValueError:
raise ValueError("Invalid amount")
else:
self.interval = interval.lower()
self.name = name
self.interval = interval
self.amount = amount
self.currency = currency
self.plan_code = plan_code
self.id = id
self.send_sms = send_sms
self.send_invoices = send_invoices
self.description = description
def __str__(self):
return "%s plan" % self.name<|fim▁end|> | description = None
__interval_values = ('hourly', 'daily', 'weekly', 'monthly', 'annually') |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.