prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>default.rs<|end_file_name|><|fim▁begin|>use malachite_nz::integer::Integer;
#[test]
fn test_default() {<|fim▁hole|> assert!(default.is_valid());
assert_eq!(default, 0);
assert_eq!(default.to_string(), "0");
}<|fim▁end|>
|
let default = Integer::default();
|
<|file_name|>MountainRangeColors.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2018.
*
* This file is part of AvaIre.
*
* AvaIre is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* AvaIre is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with AvaIre. If not, see <https://www.gnu.org/licenses/>.
*
*
*/
package com.avairebot.imagegen.colors.ranks;
import com.avairebot.contracts.imagegen.BackgroundRankColors;
import javax.annotation.Nonnull;
import java.awt.*;<|fim▁hole|>
public class MountainRangeColors extends BackgroundRankColors {
@Nonnull
@Override
public Color getBackgroundColor() {
return makeColor(55, 55, 70);
}
@Nonnull
@Override
public Color getMainTextColor() {
return makeColor(226, 226, 229, 85);
}
@Nonnull
@Override
public Color getSecondaryTextColor() {
return makeColor(166, 166, 166, 85);
}
@Nonnull
@Override
public Color getExperienceBackgroundColor() {
return makeColor(38, 39, 59, 60);
}
@Nonnull
@Override
public Color getExperienceForegroundColor() {
return makeColor(96, 132, 186, 80);
}
@Nonnull
@Override
public Color getExperienceSeparatorColor() {
return makeColor(83, 180, 201, 65);
}
}<|fim▁end|>
| |
<|file_name|>collections.ts<|end_file_name|><|fim▁begin|>import { List, Set, fromJS, OrderedMap } from 'immutable';
import { get, escapeRegExp } from 'lodash';
import { stringTemplate } from 'netlify-cms-lib-widgets';
import consoleError from '../lib/consoleError';
import { CONFIG_SUCCESS } from '../actions/config';
import { FILES, FOLDER } from '../constants/collectionTypes';
import { COMMIT_DATE, COMMIT_AUTHOR } from '../constants/commitProps';
import { INFERABLE_FIELDS, IDENTIFIER_FIELDS, SORTABLE_FIELDS } from '../constants/fieldInference';
import { formatExtensions } from '../formats/formats';
import { selectMediaFolder } from './entries';
import { summaryFormatter } from '../lib/formatters';
import type {
Collection,
Collections,
CollectionFiles,
EntryField,
EntryMap,
ViewFilter,
ViewGroup,
CmsConfig,
} from '../types/redux';
import type { ConfigAction } from '../actions/config';
import type { Backend } from '../backend';
const { keyToPathArray } = stringTemplate;
const defaultState: Collections = fromJS({});
function collections(state = defaultState, action: ConfigAction) {
switch (action.type) {
case CONFIG_SUCCESS: {
const collections = action.payload.collections;
let newState = OrderedMap({});
collections.forEach(collection => {
newState = newState.set(collection.name, fromJS(collection));
});
return newState;
}
default:
return state;
}
}
const selectors = {
[FOLDER]: {
entryExtension(collection: Collection) {
return (
collection.get('extension') ||
get(formatExtensions, collection.get('format') || 'frontmatter')
).replace(/^\./, '');
},
fields(collection: Collection) {
return collection.get('fields');
},
entryPath(collection: Collection, slug: string) {
const folder = (collection.get('folder') as string).replace(/\/$/, '');
return `${folder}/${slug}.${this.entryExtension(collection)}`;
},
entrySlug(collection: Collection, path: string) {
const folder = (collection.get('folder') as string).replace(/\/$/, '');
const slug = path
.split(folder + '/')
.pop()
?.replace(new RegExp(`\\.${escapeRegExp(this.entryExtension(collection))}$`), '');
return slug;
},
allowNewEntries(collection: Collection) {
return collection.get('create');
},
allowDeletion(collection: Collection) {
return collection.get('delete', true);
},
templateName(collection: Collection) {
return collection.get('name');
},
},
[FILES]: {
fileForEntry(collection: Collection, slug: string) {
const files = collection.get('files');
return files && files.filter(f => f?.get('name') === slug).get(0);
},
fields(collection: Collection, slug: string) {
const file = this.fileForEntry(collection, slug);
return file && file.get('fields');
},
entryPath(collection: Collection, slug: string) {
const file = this.fileForEntry(collection, slug);
return file && file.get('file');
},
entrySlug(collection: Collection, path: string) {
const file = (collection.get('files') as CollectionFiles)
.filter(f => f?.get('file') === path)
.get(0);
return file && file.get('name');
},
entryLabel(collection: Collection, slug: string) {
const file = this.fileForEntry(collection, slug);
return file && file.get('label');
},
allowNewEntries() {
return false;
},
allowDeletion(collection: Collection) {
return collection.get('delete', false);
},
templateName(_collection: Collection, slug: string) {
return slug;
},
},
};
function getFieldsWithMediaFolders(fields: EntryField[]) {
const fieldsWithMediaFolders = fields.reduce((acc, f) => {
if (f.has('media_folder')) {
acc = [...acc, f];
}
if (f.has('fields')) {
const fields = f.get('fields')?.toArray() as EntryField[];
acc = [...acc, ...getFieldsWithMediaFolders(fields)];
} else if (f.has('field')) {
const field = f.get('field') as EntryField;
acc = [...acc, ...getFieldsWithMediaFolders([field])];
} else if (f.has('types')) {
const types = f.get('types')?.toArray() as EntryField[];
acc = [...acc, ...getFieldsWithMediaFolders(types)];
}
return acc;
}, [] as EntryField[]);
return fieldsWithMediaFolders;
}
export function getFileFromSlug(collection: Collection, slug: string) {
return collection
.get('files')
?.toArray()
.find(f => f.get('name') === slug);
}
export function selectFieldsWithMediaFolders(collection: Collection, slug: string) {
if (collection.has('folder')) {
const fields = collection.get('fields').toArray();
return getFieldsWithMediaFolders(fields);
} else if (collection.has('files')) {
const fields = getFileFromSlug(collection, slug)?.get('fields').toArray() || [];
return getFieldsWithMediaFolders(fields);
}
return [];
}
export function selectMediaFolders(config: CmsConfig, collection: Collection, entry: EntryMap) {
const fields = selectFieldsWithMediaFolders(collection, entry.get('slug'));
const folders = fields.map(f => selectMediaFolder(config, collection, entry, f));
if (collection.has('files')) {
const file = getFileFromSlug(collection, entry.get('slug'));
if (file) {
folders.unshift(selectMediaFolder(config, collection, entry, undefined));
}
}
if (collection.has('media_folder')) {
// stop evaluating media folders at collection level
collection = collection.delete('files');
folders.unshift(selectMediaFolder(config, collection, entry, undefined));
}
return Set(folders).toArray();
}
export function selectFields(collection: Collection, slug: string) {
return selectors[collection.get('type')].fields(collection, slug);
}
export function selectFolderEntryExtension(collection: Collection) {
return selectors[FOLDER].entryExtension(collection);
}
export function selectFileEntryLabel(collection: Collection, slug: string) {
return selectors[FILES].entryLabel(collection, slug);
}
export function selectEntryPath(collection: Collection, slug: string) {
return selectors[collection.get('type')].entryPath(collection, slug);
}
export function selectEntrySlug(collection: Collection, path: string) {
return selectors[collection.get('type')].entrySlug(collection, path);
}
export function selectAllowNewEntries(collection: Collection) {
return selectors[collection.get('type')].allowNewEntries(collection);
}
export function selectAllowDeletion(collection: Collection) {
return selectors[collection.get('type')].allowDeletion(collection);
}
export function selectTemplateName(collection: Collection, slug: string) {
return selectors[collection.get('type')].templateName(collection, slug);
}
export function getFieldsNames(fields: EntryField[], prefix = '') {
let names = fields.map(f => `${prefix}${f.get('name')}`);
fields.forEach((f, index) => {
if (f.has('fields')) {
const fields = f.get('fields')?.toArray() as EntryField[];
names = [...names, ...getFieldsNames(fields, `${names[index]}.`)];
} else if (f.has('field')) {
const field = f.get('field') as EntryField;
names = [...names, ...getFieldsNames([field], `${names[index]}.`)];
} else if (f.has('types')) {
const types = f.get('types')?.toArray() as EntryField[];
names = [...names, ...getFieldsNames(types, `${names[index]}.`)];
}
});
return names;
}
export function selectField(collection: Collection, key: string) {
const array = keyToPathArray(key);
let name: string | undefined;
let field;
let fields = collection.get('fields', List<EntryField>()).toArray();
while ((name = array.shift()) && fields) {
field = fields.find(f => f.get('name') === name);
if (field?.has('fields')) {
fields = field?.get('fields')?.toArray() as EntryField[];
} else if (field?.has('field')) {
fields = [field?.get('field') as EntryField];
} else if (field?.has('types')) {
fields = field?.get('types')?.toArray() as EntryField[];
}
}
return field;
}
export function traverseFields(<|fim▁hole|> if (done()) {
return fields;
}
fields = fields
.map(f => {
const field = updater(f as EntryField);
if (done()) {
return field;
} else if (field.has('fields')) {
return field.set('fields', traverseFields(field.get('fields')!, updater, done));
} else if (field.has('field')) {
return field.set(
'field',
traverseFields(List([field.get('field')!]), updater, done).get(0),
);
} else if (field.has('types')) {
return field.set('types', traverseFields(field.get('types')!, updater, done));
} else {
return field;
}
})
.toList() as List<EntryField>;
return fields;
}
export function updateFieldByKey(
collection: Collection,
key: string,
updater: (field: EntryField) => EntryField,
) {
const selected = selectField(collection, key);
if (!selected) {
return collection;
}
let updated = false;
function updateAndBreak(f: EntryField) {
const field = f as EntryField;
if (field === selected) {
updated = true;
return updater(field);
} else {
return field;
}
}
collection = collection.set(
'fields',
traverseFields(collection.get('fields', List<EntryField>()), updateAndBreak, () => updated),
);
return collection;
}
export function selectIdentifier(collection: Collection) {
const identifier = collection.get('identifier_field');
const identifierFields = identifier ? [identifier, ...IDENTIFIER_FIELDS] : [...IDENTIFIER_FIELDS];
const fieldNames = getFieldsNames(collection.get('fields', List()).toArray());
return identifierFields.find(id =>
fieldNames.find(name => name.toLowerCase().trim() === id.toLowerCase().trim()),
);
}
export function selectInferedField(collection: Collection, fieldName: string) {
if (fieldName === 'title' && collection.get('identifier_field')) {
return selectIdentifier(collection);
}
const inferableField = (
INFERABLE_FIELDS as Record<
string,
{
type: string;
synonyms: string[];
secondaryTypes: string[];
fallbackToFirstField: boolean;
showError: boolean;
}
>
)[fieldName];
const fields = collection.get('fields');
let field;
// If collection has no fields or fieldName is not defined within inferables list, return null
if (!fields || !inferableField) return null;
// Try to return a field of the specified type with one of the synonyms
const mainTypeFields = fields
.filter(f => f?.get('widget', 'string') === inferableField.type)
.map(f => f?.get('name'));
field = mainTypeFields.filter(f => inferableField.synonyms.indexOf(f as string) !== -1);
if (field && field.size > 0) return field.first();
// Try to return a field for each of the specified secondary types
const secondaryTypeFields = fields
.filter(f => inferableField.secondaryTypes.indexOf(f?.get('widget', 'string') as string) !== -1)
.map(f => f?.get('name'));
field = secondaryTypeFields.filter(f => inferableField.synonyms.indexOf(f as string) !== -1);
if (field && field.size > 0) return field.first();
// Try to return the first field of the specified type
if (inferableField.fallbackToFirstField && mainTypeFields.size > 0) return mainTypeFields.first();
// Coundn't infer the field. Show error and return null.
if (inferableField.showError) {
consoleError(
`The Field ${fieldName} is missing for the collection “${collection.get('name')}”`,
`Netlify CMS tries to infer the entry ${fieldName} automatically, but one couldn't be found for entries of the collection “${collection.get(
'name',
)}”. Please check your site configuration.`,
);
}
return null;
}
export function selectEntryCollectionTitle(collection: Collection, entry: EntryMap) {
// prefer formatted summary over everything else
const summaryTemplate = collection.get('summary');
if (summaryTemplate) return summaryFormatter(summaryTemplate, entry, collection);
// if the collection is a file collection return the label of the entry
if (collection.get('type') == FILES) {
const label = selectFileEntryLabel(collection, entry.get('slug'));
if (label) return label;
}
// try to infer a title field from the entry data
const entryData = entry.get('data');
const titleField = selectInferedField(collection, 'title');
const result = titleField && entryData.getIn(keyToPathArray(titleField));
// if the custom field does not yield a result, fallback to 'title'
if (!result && titleField !== 'title') {
return entryData.getIn(keyToPathArray('title'));
}
return result;
}
export function selectDefaultSortableFields(
collection: Collection,
backend: Backend,
hasIntegration: boolean,
) {
let defaultSortable = SORTABLE_FIELDS.map((type: string) => {
const field = selectInferedField(collection, type);
if (backend.isGitBackend() && type === 'author' && !field && !hasIntegration) {
// default to commit author if not author field is found
return COMMIT_AUTHOR;
}
return field;
}).filter(Boolean);
if (backend.isGitBackend() && !hasIntegration) {
// always have commit date by default
defaultSortable = [COMMIT_DATE, ...defaultSortable];
}
return defaultSortable as string[];
}
export function selectSortableFields(collection: Collection, t: (key: string) => string) {
const fields = collection
.get('sortable_fields')
.toArray()
.map(key => {
if (key === COMMIT_DATE) {
return { key, field: { name: key, label: t('collection.defaultFields.updatedOn.label') } };
}
const field = selectField(collection, key);
if (key === COMMIT_AUTHOR && !field) {
return { key, field: { name: key, label: t('collection.defaultFields.author.label') } };
}
return { key, field: field?.toJS() };
})
.filter(item => !!item.field)
.map(item => ({ ...item.field, key: item.key }));
return fields;
}
export function selectSortDataPath(collection: Collection, key: string) {
if (key === COMMIT_DATE) {
return 'updatedOn';
} else if (key === COMMIT_AUTHOR && !selectField(collection, key)) {
return 'author';
} else {
return `data.${key}`;
}
}
export function selectViewFilters(collection: Collection) {
const viewFilters = collection.get('view_filters').toJS() as ViewFilter[];
return viewFilters;
}
export function selectViewGroups(collection: Collection) {
const viewGroups = collection.get('view_groups').toJS() as ViewGroup[];
return viewGroups;
}
export function selectFieldsComments(collection: Collection, entryMap: EntryMap) {
let fields: EntryField[] = [];
if (collection.has('folder')) {
fields = collection.get('fields').toArray();
} else if (collection.has('files')) {
const file = collection.get('files')!.find(f => f?.get('name') === entryMap.get('slug'));
fields = file.get('fields').toArray();
}
const comments: Record<string, string> = {};
const names = getFieldsNames(fields);
names.forEach(name => {
const field = selectField(collection, name);
if (field?.has('comment')) {
comments[name] = field.get('comment')!;
}
});
return comments;
}
export function selectHasMetaPath(collection: Collection) {
return (
collection.has('folder') &&
collection.get('type') === FOLDER &&
collection.has('meta') &&
collection.get('meta')?.has('path')
);
}
export default collections;<|fim▁end|>
|
fields: List<EntryField>,
updater: (field: EntryField) => EntryField,
done = () => false,
) {
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># coding=utf-8
from setuptools import setup
from setuptools.command.test import test
class TestHook(test):
def run_tests(self):
import nose<|fim▁hole|> name='lxml-asserts',
version='0.1.2',
description='Handy functions for testing lxml etree objects for equality and compatibility',
url='https://github.com/SuminAndrew/lxml-asserts',
author='Andrew Sumin',
author_email='[email protected]',
classifiers=[
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Software Development :: Testing',
],
license="http://www.apache.org/licenses/LICENSE-2.0",
cmdclass={
'test': TestHook
},
packages=[
'lxml_asserts'
],
install_requires=[
'lxml',
],
test_suite='tests',
tests_require=[
'nose',
'pycodestyle == 2.3.1'
],
zip_safe=False
)<|fim▁end|>
|
nose.main(argv=['nosetests', 'tests/', '-v', '--logging-clear-handlers'])
setup(
|
<|file_name|>0021_auto__del_customer__del_unique_customer_company_name_contact_name_cont.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Customer', fields ['company_name', 'contact_name', 'contact_mail']
db.delete_unique('readings_customer', ['company_name', 'contact_name', 'contact_mail'])
# Deleting model 'Customer'
db.delete_table('readings_customer')
# Deleting model 'CustomerCallLog'
db.delete_table('readings_customercalllog')
def backwards(self, orm):
# Adding model 'Customer'
db.create_table('readings_customer', (
('payment_status', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('contact_mail', self.gf('django.db.models.fields.CharField')(max_length=100)),
('contact_address', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('contact_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('payment_confirmation', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('customer_type', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('company_name', self.gf('django.db.models.fields.CharField')(max_length=255)),
('api_key', self.gf('django.db.models.fields.CharField')(max_length=255)),
('contact_phone', self.gf('django.db.models.fields.CharField')(max_length=25, null=True, blank=True)),
))
db.send_create_signal('readings', ['Customer'])
# Adding unique constraint on 'Customer', fields ['company_name', 'contact_name', 'contact_mail']
db.create_unique('readings_customer', ['company_name', 'contact_name', 'contact_mail'])
# Adding model 'CustomerCallLog'
db.create_table('readings_customercalllog', (
('customer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['readings.Customer'])),
('max_longitude', self.gf('django.db.models.fields.FloatField')()),
('min_longitude', self.gf('django.db.models.fields.FloatField')()),
('start_time', self.gf('django.db.models.fields.BigIntegerField')()),
('results_limit', self.gf('django.db.models.fields.IntegerField')()),
('processing_time', self.gf('django.db.models.fields.FloatField')()),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('min_latitude', self.gf('django.db.models.fields.FloatField')()),
('data_format', self.gf('django.db.models.fields.CharField')(max_length=10)),
('max_latitude', self.gf('django.db.models.fields.FloatField')()),
('since_last_call', self.gf('django.db.models.fields.BooleanField')(default=False)),
('use_utc', self.gf('django.db.models.fields.BooleanField')(default=False)),
('end_time', self.gf('django.db.models.fields.BigIntegerField')()),
('results_returned', self.gf('django.db.models.fields.IntegerField')()),
('global_data', self.gf('django.db.models.fields.BooleanField')(default=False)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
))
db.send_create_signal('readings', ['CustomerCallLog'])
models = {
'readings.reading': {
'Meta': {'unique_together': "(('latitude', 'longitude', 'daterecorded', 'user_id'),)", 'object_name': 'Reading'},
'altitude': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'client_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'daterecorded': ('django.db.models.fields.BigIntegerField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'db_index': 'True'}),
'location_accuracy': ('django.db.models.fields.FloatField', [], {}),
'longitude': ('django.db.models.fields.FloatField', [], {'db_index': 'True'}),
'observation_type': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'observation_unit': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),<|fim▁hole|> 'sharing': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'tzoffset': ('django.db.models.fields.BigIntegerField', [], {}),
'user_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'readings.readingsync': {
'Meta': {'object_name': 'ReadingSync'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'processing_time': ('django.db.models.fields.FloatField', [], {}),
'readings': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['readings']<|fim▁end|>
|
'provider': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'reading': ('django.db.models.fields.FloatField', [], {}),
'reading_accuracy': ('django.db.models.fields.FloatField', [], {}),
|
<|file_name|>_make.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, division, print_function
import hashlib
import linecache
import sys
import warnings
from operator import itemgetter
from . import _config
from ._compat import PY2, isclass, iteritems, metadata_proxy, set_closure_cell
from .exceptions import (
DefaultAlreadySetError, FrozenInstanceError, NotAnAttrsClassError,
UnannotatedAttributeError
)
# This is used at least twice, so cache it here.
_obj_setattr = object.__setattr__
_init_converter_pat = "__attr_converter_{}"
_init_factory_pat = "__attr_factory_{}"
_tuple_property_pat = " {attr_name} = property(itemgetter({index}))"
_empty_metadata_singleton = metadata_proxy({})
class _Nothing(object):
"""
Sentinel class to indicate the lack of a value when ``None`` is ambiguous.
All instances of `_Nothing` are equal.
"""
def __copy__(self):
return self
def __deepcopy__(self, _):
return self
def __eq__(self, other):
return other.__class__ == _Nothing
def __ne__(self, other):
return not self == other
def __repr__(self):<|fim▁hole|> return "NOTHING"
def __hash__(self):
return 0xdeadbeef
NOTHING = _Nothing()
"""
Sentinel to indicate the lack of a value when ``None`` is ambiguous.
"""
def attrib(default=NOTHING, validator=None,
repr=True, cmp=True, hash=None, init=True,
convert=None, metadata=None, type=None, converter=None):
"""
Create a new attribute on a class.
.. warning::
Does *not* do anything unless the class is also decorated with
:func:`attr.s`!
:param default: A value that is used if an ``attrs``-generated ``__init__``
is used and no value is passed while instantiating or the attribute is
excluded using ``init=False``.
If the value is an instance of :class:`Factory`, its callable will be
used to construct a new value (useful for mutable data types like lists
or dicts).
If a default is not set (or set manually to ``attr.NOTHING``), a value
*must* be supplied when instantiating; otherwise a :exc:`TypeError`
will be raised.
The default can also be set using decorator notation as shown below.
:type default: Any value.
:param validator: :func:`callable` that is called by ``attrs``-generated
``__init__`` methods after the instance has been initialized. They
receive the initialized instance, the :class:`Attribute`, and the
passed value.
The return value is *not* inspected so the validator has to throw an
exception itself.
If a ``list`` is passed, its items are treated as validators and must
all pass.
Validators can be globally disabled and re-enabled using
:func:`get_run_validators`.
The validator can also be set using decorator notation as shown below.
:type validator: ``callable`` or a ``list`` of ``callable``\ s.
:param bool repr: Include this attribute in the generated ``__repr__``
method.
:param bool cmp: Include this attribute in the generated comparison methods
(``__eq__`` et al).
:param hash: Include this attribute in the generated ``__hash__``
method. If ``None`` (default), mirror *cmp*'s value. This is the
correct behavior according the Python spec. Setting this value to
anything else than ``None`` is *discouraged*.
:type hash: ``bool`` or ``None``
:param bool init: Include this attribute in the generated ``__init__``
method. It is possible to set this to ``False`` and set a default
value. In that case this attributed is unconditionally initialized
with the specified default value or factory.
:param callable converter: :func:`callable` that is called by
``attrs``-generated ``__init__`` methods to converter attribute's value
to the desired format. It is given the passed-in value, and the
returned value will be used as the new value of the attribute. The
value is converted before being passed to the validator, if any.
:param metadata: An arbitrary mapping, to be used by third-party
components. See :ref:`extending_metadata`.
:param type: The type of the attribute. In Python 3.6 or greater, the
preferred method to specify the type is using a variable annotation
(see `PEP 526 <https://www.python.org/dev/peps/pep-0526/>`_).
This argument is provided for backward compatibility.
Regardless of the approach used, the type will be stored on
``Attribute.type``.
.. versionadded:: 15.2.0 *convert*
.. versionadded:: 16.3.0 *metadata*
.. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
.. versionchanged:: 17.1.0
*hash* is ``None`` and therefore mirrors *cmp* by default.
.. versionadded:: 17.3.0 *type*
.. deprecated:: 17.4.0 *convert*
.. versionadded:: 17.4.0 *converter* as a replacement for the deprecated
*convert* to achieve consistency with other noun-based arguments.
"""
if hash is not None and hash is not True and hash is not False:
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
if convert is not None:
if converter is not None:
raise RuntimeError(
"Can't pass both `convert` and `converter`. "
"Please use `converter` only."
)
warnings.warn(
"The `convert` argument is deprecated in favor of `converter`. "
"It will be removed after 2019/01.",
DeprecationWarning, stacklevel=2
)
converter = convert
if metadata is None:
metadata = {}
return _CountingAttr(
default=default,
validator=validator,
repr=repr,
cmp=cmp,
hash=hash,
init=init,
converter=converter,
metadata=metadata,
type=type,
)
def _make_attr_tuple_class(cls_name, attr_names):
"""
Create a tuple subclass to hold `Attribute`s for an `attrs` class.
The subclass is a bare tuple with properties for names.
class MyClassAttributes(tuple):
__slots__ = ()
x = property(itemgetter(0))
"""
attr_class_name = "{}Attributes".format(cls_name)
attr_class_template = [
"class {}(tuple):".format(attr_class_name),
" __slots__ = ()",
]
if attr_names:
for i, attr_name in enumerate(attr_names):
attr_class_template.append(_tuple_property_pat.format(
index=i,
attr_name=attr_name,
))
else:
attr_class_template.append(" pass")
globs = {"itemgetter": itemgetter}
eval(compile("\n".join(attr_class_template), "", "exec"), globs)
return globs[attr_class_name]
# Tuple class for extracted attributes from a class definition.
# `super_attrs` is a subset of `attrs`.
_Attributes = _make_attr_tuple_class("_Attributes", [
"attrs", # all attributes to build dunder methods for
"super_attrs", # attributes that have been inherited from super classes
])
def _is_class_var(annot):
"""
Check whether *annot* is a typing.ClassVar.
The implementation is gross but importing `typing` is slow and there are
discussions to remove it from the stdlib alltogether.
"""
return str(annot).startswith("typing.ClassVar")
def _get_annotations(cls):
"""
Get annotations for *cls*.
"""
anns = getattr(cls, "__annotations__", None)
if anns is None:
return {}
# Verify that the annotations aren't merely inherited.
for super_cls in cls.__mro__[1:]:
if anns is getattr(super_cls, "__annotations__", None):
return {}
return anns
def _transform_attrs(cls, these, auto_attribs):
"""
Transform all `_CountingAttr`s on a class into `Attribute`s.
If *these* is passed, use that and don't look for them on the class.
Return an `_Attributes`.
"""
cd = cls.__dict__
anns = _get_annotations(cls)
if these is not None:
ca_list = sorted((
(name, ca)
for name, ca
in iteritems(these)
), key=lambda e: e[1].counter)
elif auto_attribs is True:
ca_names = {
name
for name, attr
in cd.items()
if isinstance(attr, _CountingAttr)
}
ca_list = []
annot_names = set()
for attr_name, type in anns.items():
if _is_class_var(type):
continue
annot_names.add(attr_name)
a = cd.get(attr_name, NOTHING)
if not isinstance(a, _CountingAttr):
if a is NOTHING:
a = attrib()
else:
a = attrib(default=a)
ca_list.append((attr_name, a))
unannotated = ca_names - annot_names
if len(unannotated) > 0:
raise UnannotatedAttributeError(
"The following `attr.ib`s lack a type annotation: " +
", ".join(sorted(
unannotated,
key=lambda n: cd.get(n).counter
)) + "."
)
else:
ca_list = sorted((
(name, attr)
for name, attr
in cd.items()
if isinstance(attr, _CountingAttr)
), key=lambda e: e[1].counter)
own_attrs = [
Attribute.from_counting_attr(
name=attr_name,
ca=ca,
type=anns.get(attr_name),
)
for attr_name, ca
in ca_list
]
super_attrs = []
taken_attr_names = {a.name: a for a in own_attrs}
# Traverse the MRO and collect attributes.
for super_cls in cls.__mro__[1:-1]:
sub_attrs = getattr(super_cls, "__attrs_attrs__", None)
if sub_attrs is not None:
for a in sub_attrs:
prev_a = taken_attr_names.get(a.name)
# Only add an attribute if it hasn't been defined before. This
# allows for overwriting attribute definitions by subclassing.
if prev_a is None:
super_attrs.append(a)
taken_attr_names[a.name] = a
attr_names = [a.name for a in super_attrs + own_attrs]
AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
attrs = AttrsClass(
super_attrs + [
Attribute.from_counting_attr(
name=attr_name,
ca=ca,
type=anns.get(attr_name)
)
for attr_name, ca
in ca_list
]
)
had_default = False
for a in attrs:
if had_default is True and a.default is NOTHING and a.init is True:
raise ValueError(
"No mandatory attributes allowed after an attribute with a "
"default value or factory. Attribute in question: {a!r}"
.format(a=a)
)
elif had_default is False and \
a.default is not NOTHING and \
a.init is not False:
had_default = True
return _Attributes((attrs, super_attrs))
def _frozen_setattrs(self, name, value):
"""
Attached to frozen classes as __setattr__.
"""
raise FrozenInstanceError()
def _frozen_delattrs(self, name):
"""
Attached to frozen classes as __delattr__.
"""
raise FrozenInstanceError()
class _ClassBuilder(object):
"""
Iteratively build *one* class.
"""
__slots__ = (
"_cls", "_cls_dict", "_attrs", "_super_names", "_attr_names", "_slots",
"_frozen", "_has_post_init",
)
def __init__(self, cls, these, slots, frozen, auto_attribs):
attrs, super_attrs = _transform_attrs(cls, these, auto_attribs)
self._cls = cls
self._cls_dict = dict(cls.__dict__) if slots else {}
self._attrs = attrs
self._super_names = set(a.name for a in super_attrs)
self._attr_names = tuple(a.name for a in attrs)
self._slots = slots
self._frozen = frozen or _has_frozen_superclass(cls)
self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
self._cls_dict["__attrs_attrs__"] = self._attrs
if frozen:
self._cls_dict["__setattr__"] = _frozen_setattrs
self._cls_dict["__delattr__"] = _frozen_delattrs
def __repr__(self):
return "<_ClassBuilder(cls={cls})>".format(cls=self._cls.__name__)
def build_class(self):
"""
Finalize class based on the accumulated configuration.
Builder cannot be used anymore after calling this method.
"""
if self._slots is True:
return self._create_slots_class()
else:
return self._patch_original_class()
def _patch_original_class(self):
"""
Apply accumulated methods and return the class.
"""
cls = self._cls
super_names = self._super_names
# Clean class of attribute definitions (`attr.ib()`s).
for name in self._attr_names:
if name not in super_names and \
getattr(cls, name, None) is not None:
delattr(cls, name)
# Attach our dunder methods.
for name, value in self._cls_dict.items():
setattr(cls, name, value)
return cls
def _create_slots_class(self):
"""
Build and return a new class with a `__slots__` attribute.
"""
super_names = self._super_names
cd = {
k: v
for k, v in iteritems(self._cls_dict)
if k not in tuple(self._attr_names) + ("__dict__",)
}
# We only add the names of attributes that aren't inherited.
# Settings __slots__ to inherited attributes wastes memory.
cd["__slots__"] = tuple(
name
for name in self._attr_names
if name not in super_names
)
qualname = getattr(self._cls, "__qualname__", None)
if qualname is not None:
cd["__qualname__"] = qualname
attr_names = tuple(self._attr_names)
def slots_getstate(self):
"""
Automatically created by attrs.
"""
return tuple(getattr(self, name) for name in attr_names)
def slots_setstate(self, state):
"""
Automatically created by attrs.
"""
__bound_setattr = _obj_setattr.__get__(self, Attribute)
for name, value in zip(attr_names, state):
__bound_setattr(name, value)
# slots and frozen require __getstate__/__setstate__ to work
cd["__getstate__"] = slots_getstate
cd["__setstate__"] = slots_setstate
# Create new class based on old class and our methods.
cls = type(self._cls)(
self._cls.__name__,
self._cls.__bases__,
cd,
)
# The following is a fix for
# https://github.com/python-attrs/attrs/issues/102. On Python 3,
# if a method mentions `__class__` or uses the no-arg super(), the
# compiler will bake a reference to the class in the method itself
# as `method.__closure__`. Since we replace the class with a
# clone, we rewrite these references so it keeps working.
for item in cls.__dict__.values():
if isinstance(item, (classmethod, staticmethod)):
# Class- and staticmethods hide their functions inside.
# These might need to be rewritten as well.
closure_cells = getattr(item.__func__, "__closure__", None)
else:
closure_cells = getattr(item, "__closure__", None)
if not closure_cells: # Catch None or the empty list.
continue
for cell in closure_cells:
if cell.cell_contents is self._cls:
set_closure_cell(cell, cls)
return cls
def add_repr(self, ns):
self._cls_dict["__repr__"] = self._add_method_dunders(
_make_repr(self._attrs, ns=ns)
)
return self
def add_str(self):
repr = self._cls_dict.get("__repr__")
if repr is None:
raise ValueError(
"__str__ can only be generated if a __repr__ exists."
)
def __str__(self):
return self.__repr__()
self._cls_dict["__str__"] = self._add_method_dunders(__str__)
return self
def make_unhashable(self):
self._cls_dict["__hash__"] = None
return self
def add_hash(self):
self._cls_dict["__hash__"] = self._add_method_dunders(
_make_hash(self._attrs)
)
return self
def add_init(self):
self._cls_dict["__init__"] = self._add_method_dunders(
_make_init(
self._attrs,
self._has_post_init,
self._frozen,
)
)
return self
def add_cmp(self):
cd = self._cls_dict
cd["__eq__"], cd["__ne__"], cd["__lt__"], cd["__le__"], cd["__gt__"], \
cd["__ge__"] = (
self._add_method_dunders(meth)
for meth in _make_cmp(self._attrs)
)
return self
def _add_method_dunders(self, method):
"""
Add __module__ and __qualname__ to a *method* if possible.
"""
try:
method.__module__ = self._cls.__module__
except AttributeError:
pass
try:
method.__qualname__ = ".".join(
(self._cls.__qualname__, method.__name__,)
)
except AttributeError:
pass
return method
def attrs(maybe_cls=None, these=None, repr_ns=None,
repr=True, cmp=True, hash=None, init=True,
slots=False, frozen=False, str=False, auto_attribs=False):
r"""
A class decorator that adds `dunder
<https://wiki.python.org/moin/DunderAlias>`_\ -methods according to the
specified attributes using :func:`attr.ib` or the *these* argument.
:param these: A dictionary of name to :func:`attr.ib` mappings. This is
useful to avoid the definition of your attributes within the class body
because you can't (e.g. if you want to add ``__repr__`` methods to
Django models) or don't want to.
If *these* is not ``None``, ``attrs`` will *not* search the class body
for attributes.
:type these: :class:`dict` of :class:`str` to :func:`attr.ib`
:param str repr_ns: When using nested classes, there's no way in Python 2
to automatically detect that. Therefore it's possible to set the
namespace explicitly for a more meaningful ``repr`` output.
:param bool repr: Create a ``__repr__`` method with a human readable
representation of ``attrs`` attributes..
:param bool str: Create a ``__str__`` method that is identical to
``__repr__``. This is usually not necessary except for
:class:`Exception`\ s.
:param bool cmp: Create ``__eq__``, ``__ne__``, ``__lt__``, ``__le__``,
``__gt__``, and ``__ge__`` methods that compare the class as if it were
a tuple of its ``attrs`` attributes. But the attributes are *only*
compared, if the type of both classes is *identical*!
:param hash: If ``None`` (default), the ``__hash__`` method is generated
according how *cmp* and *frozen* are set.
1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you.
2. If *cmp* is True and *frozen* is False, ``__hash__`` will be set to
None, marking it unhashable (which it is).
3. If *cmp* is False, ``__hash__`` will be left untouched meaning the
``__hash__`` method of the superclass will be used (if superclass is
``object``, this means it will fall back to id-based hashing.).
Although not recommended, you can decide for yourself and force
``attrs`` to create one (e.g. if the class is immutable even though you
didn't freeze it programmatically) by passing ``True`` or not. Both of
these cases are rather special and should be used carefully.
See the `Python documentation \
<https://docs.python.org/3/reference/datamodel.html#object.__hash__>`_
and the `GitHub issue that led to the default behavior \
<https://github.com/python-attrs/attrs/issues/136>`_ for more details.
:type hash: ``bool`` or ``None``
:param bool init: Create a ``__init__`` method that initializes the
``attrs`` attributes. Leading underscores are stripped for the
argument name. If a ``__attrs_post_init__`` method exists on the
class, it will be called after the class is fully initialized.
:param bool slots: Create a slots_-style class that's more
memory-efficient. See :ref:`slots` for further ramifications.
:param bool frozen: Make instances immutable after initialization. If
someone attempts to modify a frozen instance,
:exc:`attr.exceptions.FrozenInstanceError` is raised.
Please note:
1. This is achieved by installing a custom ``__setattr__`` method
on your class so you can't implement an own one.
2. True immutability is impossible in Python.
3. This *does* have a minor a runtime performance :ref:`impact
<how-frozen>` when initializing new instances. In other words:
``__init__`` is slightly slower with ``frozen=True``.
4. If a class is frozen, you cannot modify ``self`` in
``__attrs_post_init__`` or a self-written ``__init__``. You can
circumvent that limitation by using
``object.__setattr__(self, "attribute_name", value)``.
.. _slots: https://docs.python.org/3/reference/datamodel.html#slots
:param bool auto_attribs: If True, collect `PEP 526`_-annotated attributes
(Python 3.6 and later only) from the class body.
In this case, you **must** annotate every field. If ``attrs``
encounters a field that is set to an :func:`attr.ib` but lacks a type
annotation, an :exc:`attr.exceptions.UnannotatedAttributeError` is
raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't
want to set a type.
If you assign a value to those attributes (e.g. ``x: int = 42``), that
value becomes the default value like if it were passed using
``attr.ib(default=42)``. Passing an instance of :class:`Factory` also
works as expected.
Attributes annotated as :data:`typing.ClassVar` are **ignored**.
.. _`PEP 526`: https://www.python.org/dev/peps/pep-0526/
.. versionadded:: 16.0.0 *slots*
.. versionadded:: 16.1.0 *frozen*
.. versionadded:: 16.3.0 *str*, and support for ``__attrs_post_init__``.
.. versionchanged::
17.1.0 *hash* supports ``None`` as value which is also the default
now.
.. versionadded:: 17.3.0 *auto_attribs*
"""
def wrap(cls):
if getattr(cls, "__class__", None) is None:
raise TypeError("attrs only works with new-style classes.")
builder = _ClassBuilder(cls, these, slots, frozen, auto_attribs)
if repr is True:
builder.add_repr(repr_ns)
if str is True:
builder.add_str()
if cmp is True:
builder.add_cmp()
if hash is not True and hash is not False and hash is not None:
# Can't use `hash in` because 1 == True for example.
raise TypeError(
"Invalid value for hash. Must be True, False, or None."
)
elif hash is False or (hash is None and cmp is False):
pass
elif hash is True or (hash is None and cmp is True and frozen is True):
builder.add_hash()
else:
builder.make_unhashable()
if init is True:
builder.add_init()
return builder.build_class()
# maybe_cls's type depends on the usage of the decorator. It's a class
# if it's used as `@attrs` but ``None`` if used as `@attrs()`.
if maybe_cls is None:
return wrap
else:
return wrap(maybe_cls)
_attrs = attrs
"""
Internal alias so we can use it in functions that take an argument called
*attrs*.
"""
if PY2:
def _has_frozen_superclass(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
"""
return (
getattr(
cls.__setattr__, "__module__", None
) == _frozen_setattrs.__module__ and
cls.__setattr__.__name__ == _frozen_setattrs.__name__
)
else:
def _has_frozen_superclass(cls):
"""
Check whether *cls* has a frozen ancestor by looking at its
__setattr__.
"""
return cls.__setattr__ == _frozen_setattrs
def _attrs_to_tuple(obj, attrs):
"""
Create a tuple of all values of *obj*'s *attrs*.
"""
return tuple(getattr(obj, a.name) for a in attrs)
def _make_hash(attrs):
attrs = tuple(
a
for a in attrs
if a.hash is True or (a.hash is None and a.cmp is True)
)
# We cache the generated hash methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated hash %s>" % (sha1.hexdigest(),)
type_hash = hash(unique_filename)
lines = [
"def __hash__(self):",
" return hash((",
" %d," % (type_hash,),
]
for a in attrs:
lines.append(" self.%s," % (a.name))
lines.append(" ))")
script = "\n".join(lines)
globs = {}
locs = {}
bytecode = compile(script, unique_filename, "exec")
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
return locs["__hash__"]
def _add_hash(cls, attrs):
"""
Add a hash method to *cls*.
"""
cls.__hash__ = _make_hash(attrs)
return cls
def __ne__(self, other):
"""
Check equality and either forward a NotImplemented or return the result
negated.
"""
result = self.__eq__(other)
if result is NotImplemented:
return NotImplemented
return not result
def _make_cmp(attrs):
attrs = [a for a in attrs if a.cmp]
# We cache the generated eq methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated eq %s>" % (sha1.hexdigest(),)
lines = [
"def __eq__(self, other):",
" if other.__class__ is not self.__class__:",
" return NotImplemented",
]
# We can't just do a big self.x = other.x and... clause due to
# irregularities like nan == nan is false but (nan,) == (nan,) is true.
if attrs:
lines.append(" return (")
others = [
" ) == (",
]
for a in attrs:
lines.append(" self.%s," % (a.name,))
others.append(" other.%s," % (a.name,))
lines += others + [" )"]
else:
lines.append(" return True")
script = "\n".join(lines)
globs = {}
locs = {}
bytecode = compile(script, unique_filename, "exec")
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
eq = locs["__eq__"]
ne = __ne__
def attrs_to_tuple(obj):
"""
Save us some typing.
"""
return _attrs_to_tuple(obj, attrs)
def __lt__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
return attrs_to_tuple(self) < attrs_to_tuple(other)
else:
return NotImplemented
def __le__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
return attrs_to_tuple(self) <= attrs_to_tuple(other)
else:
return NotImplemented
def __gt__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
return attrs_to_tuple(self) > attrs_to_tuple(other)
else:
return NotImplemented
def __ge__(self, other):
"""
Automatically created by attrs.
"""
if isinstance(other, self.__class__):
return attrs_to_tuple(self) >= attrs_to_tuple(other)
else:
return NotImplemented
return eq, ne, __lt__, __le__, __gt__, __ge__
def _add_cmp(cls, attrs=None):
"""
Add comparison methods to *cls*.
"""
if attrs is None:
attrs = cls.__attrs_attrs__
cls.__eq__, cls.__ne__, cls.__lt__, cls.__le__, cls.__gt__, cls.__ge__ = \
_make_cmp(attrs)
return cls
def _make_repr(attrs, ns):
"""
Make a repr method for *attr_names* adding *ns* to the full name.
"""
attr_names = tuple(
a.name
for a in attrs
if a.repr
)
def __repr__(self):
"""
Automatically created by attrs.
"""
real_cls = self.__class__
if ns is None:
qualname = getattr(real_cls, "__qualname__", None)
if qualname is not None:
class_name = qualname.rsplit(">.", 1)[-1]
else:
class_name = real_cls.__name__
else:
class_name = ns + "." + real_cls.__name__
return "{0}({1})".format(
class_name,
", ".join(
name + "=" + repr(getattr(self, name, NOTHING))
for name in attr_names
)
)
return __repr__
def _add_repr(cls, ns=None, attrs=None):
"""
Add a repr method to *cls*.
"""
if attrs is None:
attrs = cls.__attrs_attrs__
cls.__repr__ = _make_repr(attrs, ns)
return cls
def _make_init(attrs, post_init, frozen):
attrs = [
a
for a in attrs
if a.init or a.default is not NOTHING
]
# We cache the generated init methods for the same kinds of attributes.
sha1 = hashlib.sha1()
sha1.update(repr(attrs).encode("utf-8"))
unique_filename = "<attrs generated init {0}>".format(
sha1.hexdigest()
)
script, globs = _attrs_to_init_script(
attrs,
frozen,
post_init,
)
locs = {}
bytecode = compile(script, unique_filename, "exec")
attr_dict = dict((a.name, a) for a in attrs)
globs.update({
"NOTHING": NOTHING,
"attr_dict": attr_dict,
})
if frozen is True:
# Save the lookup overhead in __init__ if we need to circumvent
# immutability.
globs["_cached_setattr"] = _obj_setattr
eval(bytecode, globs, locs)
# In order of debuggers like PDB being able to step through the code,
# we add a fake linecache entry.
linecache.cache[unique_filename] = (
len(script),
None,
script.splitlines(True),
unique_filename,
)
return locs["__init__"]
def _add_init(cls, frozen):
"""
Add a __init__ method to *cls*. If *frozen* is True, make it immutable.
"""
cls.__init__ = _make_init(
cls.__attrs_attrs__,
getattr(cls, "__attrs_post_init__", False),
frozen,
)
return cls
def fields(cls):
"""
Returns the tuple of ``attrs`` attributes for a class.
The tuple also allows accessing the fields by their names (see below for
examples).
:param type cls: Class to introspect.
:raise TypeError: If *cls* is not a class.
:raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs``
class.
:rtype: tuple (with name accessors) of :class:`attr.Attribute`
.. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
by name.
"""
if not isclass(cls):
raise TypeError("Passed object must be a class.")
attrs = getattr(cls, "__attrs_attrs__", None)
if attrs is None:
raise NotAnAttrsClassError(
"{cls!r} is not an attrs-decorated class.".format(cls=cls)
)
return attrs
def validate(inst):
"""
Validate all attributes on *inst* that have a validator.
Leaves all exceptions through.
:param inst: Instance of a class with ``attrs`` attributes.
"""
if _config._run_validators is False:
return
for a in fields(inst.__class__):
v = a.validator
if v is not None:
v(inst, a, getattr(inst, a.name))
def _attrs_to_init_script(attrs, frozen, post_init):
"""
Return a script of an initializer for *attrs* and a dict of globals.
The globals are expected by the generated script.
If *frozen* is True, we cannot set the attributes directly so we use
a cached ``object.__setattr__``.
"""
lines = []
if frozen is True:
lines.append(
# Circumvent the __setattr__ descriptor to save one lookup per
# assignment.
"_setattr = _cached_setattr.__get__(self, self.__class__)"
)
def fmt_setter(attr_name, value_var):
return "_setattr('%(attr_name)s', %(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "_setattr('%(attr_name)s', %(conv)s(%(value_var)s))" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
else:
def fmt_setter(attr_name, value):
return "self.%(attr_name)s = %(value)s" % {
"attr_name": attr_name,
"value": value,
}
def fmt_setter_with_converter(attr_name, value_var):
conv_name = _init_converter_pat.format(attr_name)
return "self.%(attr_name)s = %(conv)s(%(value_var)s)" % {
"attr_name": attr_name,
"value_var": value_var,
"conv": conv_name,
}
args = []
attrs_to_validate = []
# This is a dictionary of names to validator and converter callables.
# Injecting this into __init__ globals lets us avoid lookups.
names_for_globals = {}
for a in attrs:
if a.validator:
attrs_to_validate.append(a)
attr_name = a.name
arg_name = a.name.lstrip("_")
has_factory = isinstance(a.default, Factory)
if has_factory and a.default.takes_self:
maybe_self = "self"
else:
maybe_self = ""
if a.init is False:
if has_factory:
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self)))
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self)
))
names_for_globals[init_factory_name] = a.default.factory
else:
if a.converter is not None:
lines.append(fmt_setter_with_converter(
attr_name,
"attr_dict['{attr_name}'].default"
.format(attr_name=attr_name)
))
conv_name = _init_converter_pat.format(a.name)
names_for_globals[conv_name] = a.converter
else:
lines.append(fmt_setter(
attr_name,
"attr_dict['{attr_name}'].default"
.format(attr_name=attr_name)
))
elif a.default is not NOTHING and not has_factory:
args.append(
"{arg_name}=attr_dict['{attr_name}'].default".format(
arg_name=arg_name,
attr_name=attr_name,
)
)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[_init_converter_pat.format(a.name)] = (
a.converter
)
else:
lines.append(fmt_setter(attr_name, arg_name))
elif has_factory:
args.append("{arg_name}=NOTHING".format(arg_name=arg_name))
lines.append("if {arg_name} is not NOTHING:"
.format(arg_name=arg_name))
init_factory_name = _init_factory_pat.format(a.name)
if a.converter is not None:
lines.append(" " + fmt_setter_with_converter(
attr_name, arg_name
))
lines.append("else:")
lines.append(" " + fmt_setter_with_converter(
attr_name,
init_factory_name + "({0})".format(maybe_self)
))
names_for_globals[_init_converter_pat.format(a.name)] = (
a.converter
)
else:
lines.append(" " + fmt_setter(attr_name, arg_name))
lines.append("else:")
lines.append(" " + fmt_setter(
attr_name,
init_factory_name + "({0})".format(maybe_self)
))
names_for_globals[init_factory_name] = a.default.factory
else:
args.append(arg_name)
if a.converter is not None:
lines.append(fmt_setter_with_converter(attr_name, arg_name))
names_for_globals[_init_converter_pat.format(a.name)] = (
a.converter
)
else:
lines.append(fmt_setter(attr_name, arg_name))
if attrs_to_validate: # we can skip this if there are no validators.
names_for_globals["_config"] = _config
lines.append("if _config._run_validators is True:")
for a in attrs_to_validate:
val_name = "__attr_validator_{}".format(a.name)
attr_name = "__attr_{}".format(a.name)
lines.append(" {}(self, {}, self.{})".format(
val_name, attr_name, a.name))
names_for_globals[val_name] = a.validator
names_for_globals[attr_name] = a
if post_init:
lines.append("self.__attrs_post_init__()")
return """\
def __init__(self, {args}):
{lines}
""".format(
args=", ".join(args),
lines="\n ".join(lines) if lines else "pass",
), names_for_globals
class Attribute(object):
"""
*Read-only* representation of an attribute.
:attribute name: The name of the attribute.
Plus *all* arguments of :func:`attr.ib`.
For the version history of the fields, see :func:`attr.ib`.
"""
__slots__ = (
"name", "default", "validator", "repr", "cmp", "hash", "init",
"metadata", "type", "converter",
)
def __init__(self, name, default, validator, repr, cmp, hash, init,
convert=None, metadata=None, type=None, converter=None):
# Cache this descriptor here to speed things up later.
bound_setattr = _obj_setattr.__get__(self, Attribute)
# Despite the big red warning, people *do* instantiate `Attribute`
# themselves.
if convert is not None:
if converter is not None:
raise RuntimeError(
"Can't pass both `convert` and `converter`. "
"Please use `converter` only."
)
warnings.warn(
"The `convert` argument is deprecated in favor of `converter`."
" It will be removed after 2019/01.",
DeprecationWarning, stacklevel=2
)
converter = convert
bound_setattr("name", name)
bound_setattr("default", default)
bound_setattr("validator", validator)
bound_setattr("repr", repr)
bound_setattr("cmp", cmp)
bound_setattr("hash", hash)
bound_setattr("init", init)
bound_setattr("converter", converter)
bound_setattr("metadata", (
metadata_proxy(metadata) if metadata
else _empty_metadata_singleton
))
bound_setattr("type", type)
def __setattr__(self, name, value):
raise FrozenInstanceError()
@property
def convert(self):
warnings.warn(
"The `convert` attribute is deprecated in favor of `converter`. "
"It will be removed after 2019/01.",
DeprecationWarning, stacklevel=2,
)
return self.converter
@classmethod
def from_counting_attr(cls, name, ca, type=None):
# type holds the annotated value. deal with conflicts:
if type is None:
type = ca.type
elif ca.type is not None:
raise ValueError(
"Type annotation and type argument cannot both be present"
)
inst_dict = {
k: getattr(ca, k)
for k
in Attribute.__slots__
if k not in (
"name", "validator", "default", "type", "convert",
) # exclude methods and deprecated alias
}
return cls(
name=name, validator=ca._validator, default=ca._default, type=type,
**inst_dict
)
# Don't use _add_pickle since fields(Attribute) doesn't work
def __getstate__(self):
"""
Play nice with pickle.
"""
return tuple(getattr(self, name) if name != "metadata"
else dict(self.metadata)
for name in self.__slots__)
def __setstate__(self, state):
"""
Play nice with pickle.
"""
bound_setattr = _obj_setattr.__get__(self, Attribute)
for name, value in zip(self.__slots__, state):
if name != "metadata":
bound_setattr(name, value)
else:
bound_setattr(name, metadata_proxy(value) if value else
_empty_metadata_singleton)
_a = [
Attribute(name=name, default=NOTHING, validator=None,
repr=True, cmp=True, hash=(name != "metadata"), init=True)
for name in Attribute.__slots__
if name != "convert" # XXX: remove once `convert` is gone
]
Attribute = _add_hash(
_add_cmp(_add_repr(Attribute, attrs=_a), attrs=_a),
attrs=[a for a in _a if a.hash]
)
class _CountingAttr(object):
"""
Intermediate representation of attributes that uses a counter to preserve
the order in which the attributes have been defined.
*Internal* data structure of the attrs library. Running into is most
likely the result of a bug like a forgotten `@attr.s` decorator.
"""
__slots__ = ("counter", "_default", "repr", "cmp", "hash", "init",
"metadata", "_validator", "converter", "type")
__attrs_attrs__ = tuple(
Attribute(name=name, default=NOTHING, validator=None,
repr=True, cmp=True, hash=True, init=True)
for name
in ("counter", "_default", "repr", "cmp", "hash", "init",)
) + (
Attribute(name="metadata", default=None, validator=None,
repr=True, cmp=True, hash=False, init=True),
)
cls_counter = 0
def __init__(self, default, validator, repr, cmp, hash, init, converter,
metadata, type):
_CountingAttr.cls_counter += 1
self.counter = _CountingAttr.cls_counter
self._default = default
# If validator is a list/tuple, wrap it using helper validator.
if validator and isinstance(validator, (list, tuple)):
self._validator = and_(*validator)
else:
self._validator = validator
self.repr = repr
self.cmp = cmp
self.hash = hash
self.init = init
self.converter = converter
self.metadata = metadata
self.type = type
def validator(self, meth):
"""
Decorator that adds *meth* to the list of validators.
Returns *meth* unchanged.
.. versionadded:: 17.1.0
"""
if self._validator is None:
self._validator = meth
else:
self._validator = and_(self._validator, meth)
return meth
def default(self, meth):
"""
Decorator that allows to set the default for an attribute.
Returns *meth* unchanged.
:raises DefaultAlreadySetError: If default has been set before.
.. versionadded:: 17.1.0
"""
if self._default is not NOTHING:
raise DefaultAlreadySetError()
self._default = Factory(meth, takes_self=True)
return meth
_CountingAttr = _add_cmp(_add_repr(_CountingAttr))
@attrs(slots=True, init=False, hash=True)
class Factory(object):
"""
Stores a factory callable.
If passed as the default value to :func:`attr.ib`, the factory is used to
generate a new value.
:param callable factory: A callable that takes either none or exactly one
mandatory positional argument depending on *takes_self*.
:param bool takes_self: Pass the partially initialized instance that is
being initialized as a positional argument.
.. versionadded:: 17.1.0 *takes_self*
"""
factory = attrib()
takes_self = attrib()
def __init__(self, factory, takes_self=False):
"""
`Factory` is part of the default machinery so if we want a default
value here, we have to implement it ourselves.
"""
self.factory = factory
self.takes_self = takes_self
def make_class(name, attrs, bases=(object,), **attributes_arguments):
"""
A quick way to create a new class called *name* with *attrs*.
:param name: The name for the new class.
:type name: str
:param attrs: A list of names or a dictionary of mappings of names to
attributes.
:type attrs: :class:`list` or :class:`dict`
:param tuple bases: Classes that the new class will subclass.
:param attributes_arguments: Passed unmodified to :func:`attr.s`.
:return: A new class with *attrs*.
:rtype: type
.. versionadded:: 17.1.0 *bases*
"""
if isinstance(attrs, dict):
cls_dict = attrs
elif isinstance(attrs, (list, tuple)):
cls_dict = dict((a, attrib()) for a in attrs)
else:
raise TypeError("attrs argument must be a dict or a list.")
post_init = cls_dict.pop("__attrs_post_init__", None)
type_ = type(
name,
bases,
{} if post_init is None else {"__attrs_post_init__": post_init}
)
# For pickling to work, the __module__ variable needs to be set to the
# frame where the class is created. Bypass this step in environments where
# sys._getframe is not defined (Jython for example) or sys._getframe is not
# defined for arguments greater than 0 (IronPython).
try:
type_.__module__ = sys._getframe(1).f_globals.get(
"__name__", "__main__",
)
except (AttributeError, ValueError):
pass
return _attrs(these=cls_dict, **attributes_arguments)(type_)
# These are required by within this module so we define them here and merely
# import into .validators.
@attrs(slots=True, hash=True)
class _AndValidator(object):
"""
Compose many validators to a single one.
"""
_validators = attrib()
def __call__(self, inst, attr, value):
for v in self._validators:
v(inst, attr, value)
def and_(*validators):
"""
A validator that composes multiple validators into one.
When called on a value, it runs all wrapped validators.
:param validators: Arbitrary number of validators.
:type validators: callables
.. versionadded:: 17.1.0
"""
vals = []
for validator in validators:
vals.extend(
validator._validators if isinstance(validator, _AndValidator)
else [validator]
)
return _AndValidator(tuple(vals))<|fim▁end|>
| |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-<|fim▁hole|>#それらのフロールールで指定していたMatchにIPアドレス情報(サブネット)を追加
#mitigate時はなんでもPacket-Inするルールは不要なので削除したまま
#mitigate後はなんでもPacket-Inするルールから突っ込んでいく
import logging
import socket
import struct
import ipaddress
#packet_in_handlerにて、受け取ったパケットのipv4がsubnetに属するか調べるのに必要
def is_ipv4_belongs_to_network(ipv4, network):
# netmask -> CIDR
network, netmask = network
network_address = socket.inet_pton(socket.AF_INET, netmask)
cidr_value = bin(struct.unpack('!L', network_address)[0])[2:].index('0')
cidr = "{network}/{cidr_value}".format(**locals())
#check
ipv4 = ipaddress.ip_address(ipv4)
ipv4_network = ipaddress.ip_network(cidr.decode("utf-8"))
return ipv4 in ipv4_network<|fim▁end|>
|
#既存のフロールールを全て一旦削除
|
<|file_name|>emit_notices.py<|end_file_name|><|fim▁begin|>import logging
from django.core.management.base import BaseCommand
from notifications.engine import send_all<|fim▁hole|>
class Command(BaseCommand):
help = "Emit queued notices."
def handle(self, *args, **options):
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
logging.info("-" * 72)
send_all(*args)<|fim▁end|>
| |
<|file_name|>dropout.rs<|end_file_name|><|fim▁begin|>use prelude::*;
use kernels::ffi::*;
use densearray::prelude::*;
use operator::prelude::*;
use rng::xorshift::{Xorshiftplus128Rng};
use rand::{Rng, thread_rng};
//use rand::distributions::{IndependentSample};
//use rand::distributions::range::{Range};
use std::cell::{RefCell};
use std::cmp::{max};
use std::rc::{Rc};
#[derive(Clone, Debug)]
pub struct DropoutOperatorConfig {
pub batch_sz: usize,
pub dim: usize,
pub drop_frac: f32,
}
pub struct DropoutOperator<S, IoBuf: ?Sized> {<|fim▁hole|> cfg: DropoutOperatorConfig,
node: OperatorNode,
in_op: Rc<RefCell<DiffOperator<S, IoBuf>>>,
in_: CommonOutput,
out: CommonOutput,
mask: Vec<f32>,
rng: Xorshiftplus128Rng,
//dist: Range<f32>,
}
impl<S, IoBuf: ?Sized> DropoutOperator<S, IoBuf> {
pub fn new<InOp>(cfg: DropoutOperatorConfig, cap: OpCapability, prev_op: Rc<RefCell<InOp>>, prev_arm: usize) -> Rc<RefCell<DropoutOperator<S, IoBuf>>> where InOp: 'static + CommonOperator + DiffOperator<S, IoBuf> {
let in_ = prev_op.borrow()._output(prev_arm);
let out = CommonOutput::new(cfg.batch_sz, cfg.dim, cap);
let mut mask = Vec::with_capacity(cfg.batch_sz * cfg.dim);
mask.resize(cfg.batch_sz * cfg.dim, 0.0);
Rc::new(RefCell::new(DropoutOperator{
cfg: cfg,
node: OperatorNode::default(),
in_op: prev_op,
in_: in_,
out: out,
mask: mask,
rng: Xorshiftplus128Rng::new(&mut thread_rng()),
//dist: Range::new(0.0, 1.0),
}))
}
}
impl<S, IoBuf: ?Sized> Operator for DropoutOperator<S, IoBuf> {
fn _next(&self) -> u64 {
self.node._next()
}
}
impl<S, IoBuf: ?Sized> CommonOperator for DropoutOperator<S, IoBuf> {
fn _output(&self, arm: usize) -> CommonOutput {
assert_eq!(0, arm);
self.out.clone()
}
}
impl<S, IoBuf: ?Sized> DiffOperatorData<S> for DropoutOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperatorIo<IoBuf> for DropoutOperator<S, IoBuf> {
}
impl<S, IoBuf: ?Sized> DiffOperator<S, IoBuf> for DropoutOperator<S, IoBuf> {
fn _traverse_fwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
self.in_op.borrow_mut()._traverse_fwd(epoch, apply);
apply(self);
self.node.pop(epoch);
}
fn _traverse_bwd(&mut self, epoch: u64, apply: &mut FnMut(&mut DiffOperator<S, IoBuf>)) {
self.node.push(epoch);
assert!(self.node.limit(1));
apply(self);
self.in_op.borrow_mut()._traverse_bwd(epoch, apply);
self.node.pop(epoch);
}
fn _forward(&mut self, phase: OpPhase) {
let batch_size = self.in_.batch_sz.get();
assert!(batch_size <= self.cfg.batch_sz);
self.out.batch_sz.set(batch_size);
match phase {
OpPhase::Inference => {
self.out.buf.borrow_mut()[ .. batch_size * self.cfg.dim]
.copy_from_slice(&self.in_.buf.borrow()[ .. batch_size * self.cfg.dim]);
}
OpPhase::Learning => {
let in_buf = &self.in_.buf.borrow()[ .. batch_size * self.cfg.dim];
let out_buf = &mut self.out.buf.borrow_mut()[ .. batch_size * self.cfg.dim];
for p in 0 .. batch_size * self.cfg.dim {
let u: f32 = self.rng.gen();
if u < self.cfg.drop_frac {
self.mask[p] = 0.0;
out_buf[p] = 0.0;
} else {
self.mask[p] = 1.0;
out_buf[p] = in_buf[p];
}
}
}
}
}
fn _backward(&mut self) {
let batch_size = self.out.batch_sz.get();
if let Some(in_grad) = self.in_.grad.as_ref() {
let in_grad = &mut in_grad.borrow_mut()[ .. batch_size * self.cfg.dim];
let out_grad = &self.out.grad.as_ref().unwrap().borrow()[ .. batch_size * self.cfg.dim];
for p in 0 .. batch_size * self.cfg.dim {
in_grad[p] = self.mask[p] * out_grad[p];
}
}
}
}<|fim▁end|>
| |
<|file_name|>simple_executor.rs<|end_file_name|><|fim▁begin|>use crate::task::Task;
use alloc::collections::VecDeque;
use core::task::{RawWaker, RawWakerVTable, Waker};
pub struct SimpleExecutor {
task_queue: VecDeque<Task>,
}
impl SimpleExecutor {
pub fn new() -> SimpleExecutor {
SimpleExecutor {
task_queue: VecDeque::new(),
}
}
pub fn spawn(&mut self, task: Task) { self.task_queue.push_back(task) }
}
<|fim▁hole|>
let vtable = &RawWakerVTable::new(clone, no_op, no_op, no_op);
RawWaker::new(0 as *const (), vtable)
}
fn dummy_waker() -> Waker { unsafe { Waker::from_raw(dummy_raw_waker()) } }<|fim▁end|>
|
fn dummy_raw_waker() -> RawWaker {
fn no_op(_: *const ()) {}
fn clone(_: *const ()) -> RawWaker { dummy_raw_waker() }
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import division
from builtins import str
from past.utils import old_div
import copy
from datetime import datetime, timedelta
import dateutil.parser
from functools import wraps
import inspect
import json
import logging
import os
import socket
import sys
import time
from flask._compat import PY2
from flask import (
Flask, url_for, Markup, Blueprint, redirect,
flash, Response, render_template)
from flask.ext.admin import Admin, BaseView, expose, AdminIndexView
from flask.ext.admin.form import DateTimePickerWidget
from flask.ext.admin import base
from flask.ext.admin.contrib.sqla import ModelView
from flask.ext.cache import Cache
from flask import request
import sqlalchemy as sqla
from wtforms import (
widgets,
Form, DateTimeField, SelectField, TextAreaField, PasswordField, StringField)
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
import chartkick
import jinja2
import markdown
from sqlalchemy import or_
import airflow
from airflow import jobs, login, models, settings, utils
from airflow.configuration import conf
from airflow.models import State
from airflow.settings import Session
from airflow.utils import AirflowException
from airflow.www import utils as wwwutils
login_required = login.login_required
current_user = login.current_user
logout_user = login.logout_user
from airflow import default_login as login
if conf.getboolean('webserver', 'AUTHENTICATE'):
try:
# Environment specific login
import airflow_login as login
except ImportError:
logging.error(
"authenticate is set to True in airflow.cfg, "
"but airflow_login failed to import")
login_required = login.login_required
current_user = login.current_user
logout_user = login.logout_user
AUTHENTICATE = conf.getboolean('webserver', 'AUTHENTICATE')
if AUTHENTICATE is False:
login_required = lambda x: x
FILTER_BY_OWNER = False
if conf.getboolean('webserver', 'FILTER_BY_OWNER'):
# filter_by_owner if authentication is enabled and filter_by_owner is true
FILTER_BY_OWNER = AUTHENTICATE
class VisiblePasswordInput(widgets.PasswordInput):
def __init__(self, hide_value=False):
self.hide_value = hide_value
class VisiblePasswordField(PasswordField):
widget = VisiblePasswordInput()
def superuser_required(f):
'''
Decorator for views requiring superuser access
'''
@wraps(f)
def decorated_function(*args, **kwargs):
if (
not AUTHENTICATE or
(not current_user.is_anonymous() and current_user.is_superuser())
):
return f(*args, **kwargs)
else:
flash("This page requires superuser privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
def data_profiling_required(f):
'''
Decorator for views requiring data profiling access
'''
@wraps(f)
def decorated_function(*args, **kwargs):
if (
not AUTHENTICATE or
(not current_user.is_anonymous() and current_user.data_profiling())
):
return f(*args, **kwargs)
else:
flash("This page requires data profiling privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
QUERY_LIMIT = 100000
CHART_LIMIT = 200000
def pygment_html_render(s, lexer=lexers.TextLexer):
return highlight(
s,
lexer(),
HtmlFormatter(linenos=True),
)
def wrapped_markdown(s):
return '<div class="rich_doc">' + markdown.markdown(s) + "</div>"
attr_renderer = {
'bash_command': lambda x: pygment_html_render(x, lexers.BashLexer),
'hql': lambda x: pygment_html_render(x, lexers.SqlLexer),
'sql': lambda x: pygment_html_render(x, lexers.SqlLexer),
'doc': lambda x: pygment_html_render(x, lexers.TextLexer),
'doc_json': lambda x: pygment_html_render(x, lexers.JsonLexer),
'doc_rst': lambda x: pygment_html_render(x, lexers.RstLexer),
'doc_yaml': lambda x: pygment_html_render(x, lexers.YamlLexer),
'doc_md': wrapped_markdown,
'python_callable': lambda x: pygment_html_render(
inspect.getsource(x), lexers.PythonLexer),
}
dagbag = models.DagBag(os.path.expanduser(conf.get('core', 'DAGS_FOLDER')))
utils.pessimistic_connection_handling()
app = Flask(__name__)
app.config['SQLALCHEMY_POOL_RECYCLE'] = 3600
app.secret_key = conf.get('webserver', 'SECRET_KEY')
login.login_manager.init_app(app)
cache = Cache(
app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})
# Init for chartkick, the python wrapper for highcharts
ck = Blueprint(
'ck_page', __name__,
static_folder=chartkick.js(), static_url_path='/static')
app.register_blueprint(ck, url_prefix='/ck')
app.jinja_env.add_extension("chartkick.ext.charts")
@app.context_processor
def jinja_globals():
return {
'hostname': socket.gethostname(),
}
class DateTimeForm(Form):
# Date filter form needed for gantt and graph view
execution_date = DateTimeField(
"Execution date", widget=DateTimePickerWidget())
class GraphForm(Form):
execution_date = DateTimeField(
"Execution date", widget=DateTimePickerWidget())
arrange = SelectField("Layout", choices=(
('LR', "Left->Right"),
('RL', "Right->Left"),
('TB', "Top->Bottom"),
('BT', "Bottom->Top"),
))
class TreeForm(Form):
base_date = DateTimeField(
"Anchor date", widget=DateTimePickerWidget(), default=datetime.now())
num_runs = SelectField("Number of runs", default=25, choices=(
(5, "5"),
(25, "25"),
(50, "50"),
(100, "100"),
(365, "365"),
))
@app.route('/')
def index():
return redirect(url_for('admin.index'))
@app.route('/health')
def health():
""" We can add an array of tests here to check the server's health """
content = Markup(markdown.markdown("The server is healthy!"))
return content
@app.teardown_appcontext
def shutdown_session(exception=None):
settings.Session.remove()
def dag_link(v, c, m, p):
url = url_for(
'airflow.graph',
dag_id=m.dag_id)
return Markup(
'<a href="{url}">{m.dag_id}</a>'.format(**locals()))
class DagModelView(wwwutils.SuperUserMixin, ModelView):
column_list = ('dag_id', 'owners')
column_editable_list = ('is_paused',)
form_excluded_columns = ('is_subdag', 'is_active')
column_searchable_list = ('dag_id',)
column_filters = (
'dag_id', 'owners', 'is_paused', 'is_active', 'is_subdag',
'last_scheduler_run', 'last_expired')
form_widget_args = {
'last_scheduler_run': {'disabled': True},
'fileloc': {'disabled': True},
'is_paused': {'disabled': True},
'last_pickled': {'disabled': True},
'pickle_id': {'disabled': True},
'last_loaded': {'disabled': True},
'last_expired': {'disabled': True},
'pickle_size': {'disabled': True},
'scheduler_lock': {'disabled': True},
'owners': {'disabled': True},
}
column_formatters = dict(
dag_id=dag_link,
)
can_delete = False
can_create = False
page_size = 50
list_template = 'airflow/list_dags.html'
named_filter_urls = True
def get_query(self):
"""
Default filters for model
"""
return (
super(DagModelView, self)
.get_query()
.filter(or_(models.DagModel.is_active, models.DagModel.is_paused))
.filter(~models.DagModel.is_subdag)
)
def get_count_query(self):
"""
Default filters for model
"""
return (
super(DagModelView, self)
.get_count_query()
.filter(models.DagModel.is_active)
.filter(~models.DagModel.is_subdag)
)
class HomeView(AdminIndexView):
@expose("/")
@login_required
def index(self):
session = Session()
DM = models.DagModel
qry = None
# filter the dags if filter_by_owner and current user is not superuser
do_filter = FILTER_BY_OWNER and (not current_user.is_superuser())
if do_filter:
qry = session.query(DM).filter(~DM.is_subdag, DM.is_active, DM.owners == current_user.username).all()
else:
qry = session.query(DM).filter(~DM.is_subdag, DM.is_active).all()
orm_dags = {dag.dag_id: dag for dag in qry}
import_errors = session.query(models.ImportError).all()
for ie in import_errors:
flash(
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=ie),
"error")
session.expunge_all()
session.commit()
session.close()
dags = dagbag.dags.values()
if do_filter:
dags = {dag.dag_id: dag for dag in dags if (dag.owner == current_user.username and (not dag.parent_dag))}
else:
dags = {dag.dag_id: dag for dag in dags if not dag.parent_dag}
all_dag_ids = sorted(set(orm_dags.keys()) | set(dags.keys()))
return self.render(
'airflow/dags.html',
dags=dags,
orm_dags=orm_dags,
all_dag_ids=all_dag_ids)
admin = Admin(
app,
name="Airflow",
index_view=HomeView(name="DAGs"),
template_mode='bootstrap3')
class Airflow(BaseView):
def is_visible(self):
return False
@expose('/')
@login_required
def index(self):
return self.render('airflow/dags.html')
@expose('/chart_data')
@data_profiling_required
@wwwutils.gzipped
# @cache.cached(timeout=3600, key_prefix=wwwutils.make_cache_key)
def chart_data(self):
session = settings.Session()
chart_id = request.args.get('chart_id')
csv = request.args.get('csv') == "true"
chart = session.query(models.Chart).filter_by(id=chart_id).all()[0]
db = session.query(
models.Connection).filter_by(conn_id=chart.conn_id).all()[0]
session.expunge_all()
session.commit()
session.close()
payload = {}
payload['state'] = 'ERROR'
payload['error'] = ''
# Processing templated fields
try:
args = eval(chart.default_params)
if type(args) is not type(dict()):
raise AirflowException('Not a dict')
except:
args = {}
payload['error'] += (
"Default params is not valid, string has to evaluate as "
"a Python dictionary. ")
request_dict = {k: request.args.get(k) for k in request.args}
from airflow import macros
args.update(request_dict)
args['macros'] = macros
sql = jinja2.Template(chart.sql).render(**args)
label = jinja2.Template(chart.label).render(**args)
payload['sql_html'] = Markup(highlight(
sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
payload['label'] = label
import pandas as pd
pd.set_option('display.max_colwidth', 100)
hook = db.get_hook()
try:
df = hook.get_pandas_df(wwwutils.limit_sql(sql, CHART_LIMIT, conn_type=db.conn_type))
df = df.fillna(0)
except Exception as e:
payload['error'] += "SQL execution failed. Details: " + str(e)
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
if not payload['error'] and len(df) == CHART_LIMIT:
payload['warning'] = (
"Data has been truncated to {0}"
" rows. Expect incomplete results.").format(CHART_LIMIT)
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
if not payload['error'] and len(df) == 0:
payload['error'] += "Empty result set. "
elif (
not payload['error'] and
chart.sql_layout == 'series' and
chart.chart_type != "datatable" and
len(df.columns) < 3):
payload['error'] += "SQL needs to return at least 3 columns. "
elif (
not payload['error'] and
chart.sql_layout == 'columns'and
len(df.columns) < 2):
payload['error'] += "SQL needs to return at least 2 columns. "
elif not payload['error']:
import numpy as np
chart_type = chart.chart_type
data = None
if chart_type == "datatable":
chart.show_datatable = True
if chart.show_datatable:
data = df.to_dict(orient="split")
data['columns'] = [{'title': c} for c in data['columns']]
# Trying to convert time to something Highcharts likes
x_col = 1 if chart.sql_layout == 'series' else 0
if chart.x_is_date:
try:
# From string to datetime
df[df.columns[x_col]] = pd.to_datetime(
df[df.columns[x_col]])
except Exception as e:
raise AirflowException(str(e))
df[df.columns[x_col]] = df[df.columns[x_col]].apply(
lambda x: int(x.strftime("%s")) * 1000)
series = []
colorAxis = None
if chart_type == 'datatable':
payload['data'] = data
payload['state'] = 'SUCCESS'
return Response(
response=json.dumps(
payload, indent=4, default=date_handler),
status=200,
mimetype="application/json")
elif chart_type == 'para':
df.rename(columns={
df.columns[0]: 'name',
df.columns[1]: 'group',
}, inplace=True)
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
elif chart_type == 'heatmap':
color_perc_lbound = float(
request.args.get('color_perc_lbound', 0))
color_perc_rbound = float(
request.args.get('color_perc_rbound', 1))
color_scheme = request.args.get('color_scheme', 'blue_red')
if color_scheme == 'blue_red':
stops = [
[color_perc_lbound, '#00D1C1'],
[
color_perc_lbound +
((color_perc_rbound - color_perc_lbound)/2),
'#FFFFCC'
],
[color_perc_rbound, '#FF5A5F']
]
elif color_scheme == 'blue_scale':
stops = [
[color_perc_lbound, '#FFFFFF'],
[color_perc_rbound, '#2222FF']
]
elif color_scheme == 'fire':
diff = float(color_perc_rbound - color_perc_lbound)
stops = [
[color_perc_lbound, '#FFFFFF'],
[color_perc_lbound + 0.33*diff, '#FFFF00'],
[color_perc_lbound + 0.66*diff, '#FF0000'],
[color_perc_rbound, '#000000']
]
else:
stops = [
[color_perc_lbound, '#FFFFFF'],
[
color_perc_lbound +
((color_perc_rbound - color_perc_lbound)/2),
'#888888'
],
[color_perc_rbound, '#000000'],
]
xaxis_label = df.columns[1]
yaxis_label = df.columns[2]
data = []
for row in df.itertuples():
data.append({
'x': row[2],
'y': row[3],
'value': row[4],
})
x_format = '{point.x:%Y-%m-%d}' \
if chart.x_is_date else '{point.x}'
series.append({
'data': data,
'borderWidth': 0,
'colsize': 24 * 36e5,
'turboThreshold': sys.float_info.max,
'tooltip': {
'headerFormat': '',
'pointFormat': (
df.columns[1] + ': ' + x_format + '<br/>' +
df.columns[2] + ': {point.y}<br/>' +
df.columns[3] + ': <b>{point.value}</b>'
),
},
})
colorAxis = {
'stops': stops,
'minColor': '#FFFFFF',
'maxColor': '#000000',
'min': 50,
'max': 2200,
}
else:
if chart.sql_layout == 'series':
# User provides columns (series, x, y)
xaxis_label = df.columns[1]
yaxis_label = df.columns[2]
df[df.columns[2]] = df[df.columns[2]].astype(np.float)
df = df.pivot_table(
index=df.columns[1],
columns=df.columns[0],
values=df.columns[2], aggfunc=np.sum)
else:
# User provides columns (x, y, metric1, metric2, ...)
xaxis_label = df.columns[0]
yaxis_label = 'y'
df.index = df[df.columns[0]]
df = df.sort(df.columns[0])
del df[df.columns[0]]
for col in df.columns:
df[col] = df[col].astype(np.float)
for col in df.columns:
series.append({
'name': col,
'data': [
(k, df[col][k])
for k in df[col].keys()
if not np.isnan(df[col][k])]
})
series = [serie for serie in sorted(
series, key=lambda s: s['data'][0][1], reverse=True)]
if chart_type == "stacked_area":
stacking = "normal"
chart_type = 'area'
elif chart_type == "percent_area":
stacking = "percent"
chart_type = 'area'
else:
stacking = None
hc = {
'chart': {
'type': chart_type
},
'plotOptions': {
'series': {
'marker': {
'enabled': False
}
},
'area': {'stacking': stacking},
},
'title': {'text': ''},
'xAxis': {
'title': {'text': xaxis_label},
'type': 'datetime' if chart.x_is_date else None,
},
'yAxis': {
'title': {'text': yaxis_label},
},
'colorAxis': colorAxis,
'tooltip': {
'useHTML': True,
'backgroundColor': None,
'borderWidth': 0,
},
'series': series,
}
if chart.y_log_scale:
hc['yAxis']['type'] = 'logarithmic'
hc['yAxis']['minorTickInterval'] = 0.1
if 'min' in hc['yAxis']:
del hc['yAxis']['min']
payload['state'] = 'SUCCESS'
payload['hc'] = hc
payload['data'] = data
payload['request_dict'] = request_dict
return Response(
response=json.dumps(payload, indent=4, default=date_handler),
status=200,
mimetype="application/json")
@expose('/chart')
@data_profiling_required
def chart(self):
session = settings.Session()
chart_id = request.args.get('chart_id')
embed = request.args.get('embed')
chart = session.query(models.Chart).filter_by(id=chart_id).all()[0]
session.expunge_all()
session.commit()
session.close()
if chart.chart_type == 'para':
return self.render('airflow/para/para.html', chart=chart)
sql = ""
if chart.show_sql:
sql = Markup(highlight(
chart.sql,
lexers.SqlLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/highchart.html',
chart=chart,
title="Airflow - Chart",
sql=sql,
label=chart.label,
embed=embed)
@expose('/dag_stats')
@login_required
def dag_stats(self):
states = [
State.SUCCESS,
State.RUNNING,
State.FAILED,
State.UPSTREAM_FAILED,
State.UP_FOR_RETRY,
State.QUEUED,
]
task_ids = []
for dag in dagbag.dags.values():
task_ids += dag.task_ids
TI = models.TaskInstance
session = Session()
qry = (
session.query(TI.dag_id, TI.state, sqla.func.count(TI.task_id))
.filter(TI.task_id.in_(task_ids))
.group_by(TI.dag_id, TI.state)
)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
session.commit()
session.close()
payload = {}
for dag in dagbag.dags.values():
payload[dag.dag_id] = []
for state in states:
try:
count = data[dag.dag_id][state]
except:
count = 0
d = {
'state': state,
'count': count,
'dag_id': dag.dag_id,
'color': State.color(state)
}
payload[dag.dag_id].append(d)
return Response(
response=json.dumps(payload, indent=4),
status=200, mimetype="application/json")
@expose('/code')
@login_required
def code(self):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
code = "".join(open(dag.full_filepath, 'r').readlines())
title = dag.filepath
html_code = highlight(
code, lexers.PythonLexer(), HtmlFormatter(linenos=True))
return self.render(
'airflow/dag_code.html', html_code=html_code, dag=dag, title=title,
root=request.args.get('root'),
demo_mode=conf.getboolean('webserver', 'demo_mode'))
@app.errorhandler(404)
def circles(self):
return render_template('airflow/circles.html'), 404
@expose('/sandbox')
@login_required
def sandbox(self):
from airflow import configuration
title = "Sandbox Suggested Configuration"
cfg_loc = configuration.AIRFLOW_CONFIG + '.sandbox'
f = open(cfg_loc, 'r')
config = f.read()
f.close()
code_html = Markup(highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/code.html',
code_html=code_html, title=title, subtitle=cfg_loc)
@expose('/noaccess')
def noaccess(self):
return self.render('airflow/noaccess.html')
@expose('/headers')
def headers(self):
d = {k: v for k, v in request.headers}
if hasattr(current_user, 'is_superuser'):
d['is_superuser'] = current_user.is_superuser()
d['data_profiling'] = current_user.data_profiling()
d['is_anonymous'] = current_user.is_anonymous()
d['is_authenticated'] = current_user.is_authenticated()
return Response(
response=json.dumps(d, indent=4),
status=200, mimetype="application/json")
@expose('/login')
def login(self):
return login.login(self, request)
@expose('/logout')
def logout(self):
logout_user()
return redirect(url_for('admin.index'))
@expose('/rendered')
@login_required
def rendered(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = dateutil.parser.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
task = copy.copy(dag.get_task(task_id))
ti = models.TaskInstance(task=task, execution_date=dttm)
try:
ti.render_templates()
except Exception as e:
flash("Error rendering template: " + str(e), "error")
title = "Rendered Template"
html_dict = {}
for template_field in task.__class__.template_fields:
content = getattr(task, template_field)
if template_field in attr_renderer:
html_dict[template_field] = attr_renderer[template_field](content)
else:
html_dict[template_field] = (
"<pre><code>" + str(content) + "</pre></code>")
return self.render(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
title=title,)
@expose('/log')
@login_required
def log(self):
BASE_LOG_FOLDER = os.path.expanduser(
conf.get('core', 'BASE_LOG_FOLDER'))
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dag = dagbag.get_dag(dag_id)
log_relative = "/{dag_id}/{task_id}/{execution_date}".format(
**locals())
loc = BASE_LOG_FOLDER + log_relative
loc = loc.format(**locals())
log = ""
TI = models.TaskInstance
session = Session()
dttm = dateutil.parser.parse(execution_date)
ti = session.query(TI).filter(
TI.dag_id == dag_id, TI.task_id == task_id,
TI.execution_date == dttm).first()
dttm = dateutil.parser.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
if ti:
host = ti.hostname
if socket.gethostname() == host:
try:
f = open(loc)
log += "".join(f.readlines())
f.close()
except:
log = "Log file isn't where expected.\n".format(loc)
else:
WORKER_LOG_SERVER_PORT = \
conf.get('celery', 'WORKER_LOG_SERVER_PORT')
url = (
"http://{host}:{WORKER_LOG_SERVER_PORT}/log"
"{log_relative}").format(**locals())
log += "Log file isn't local.\n"
log += "Fetching here: {url}\n".format(**locals())
try:
import requests
log += requests.get(url).text
except:
log += "Failed to fetch log file.".format(**locals())
session.commit()
session.close()
log = log.decode('utf-8') if PY2 else log
title = "Log"
return self.render(
'airflow/ti_code.html',
code=log, dag=dag, title=title, task_id=task_id,
execution_date=execution_date, form=form)
@expose('/task')
@login_required
def task(self):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = dateutil.parser.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect('/admin/')
task = dag.get_task(task_id)
task = copy.copy(task)
task.resolve_template_files()
attributes = []
for attr_name in dir(task):
if not attr_name.startswith('_'):
attr = getattr(task, attr_name)
if type(attr) != type(self.task) and \
attr_name not in attr_renderer:
attributes.append((attr_name, str(attr)))
title = "Task Details"
# Color coding the special attributes that are code
special_attrs_rendered = {}
for attr_name in attr_renderer:
if hasattr(task, attr_name):
source = getattr(task, attr_name)
special_attrs_rendered[attr_name] = attr_renderer[attr_name](source)
return self.render(
'airflow/task.html',
attributes=attributes,
task_id=task_id,
execution_date=execution_date,
special_attrs_rendered=special_attrs_rendered,
form=form,
dag=dag, title=title)
@expose('/action')
@login_required
def action(self):
action = request.args.get('action')
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
origin = request.args.get('origin')
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
execution_date = request.args.get('execution_date')
execution_date = dateutil.parser.parse(execution_date)
confirmed = request.args.get('confirmed') == "true"
upstream = request.args.get('upstream') == "true"
downstream = request.args.get('downstream') == "true"
if action == "run":
from airflow.executors import DEFAULT_EXECUTOR as executor
from airflow.executors import CeleryExecutor
if not isinstance(executor, CeleryExecutor):
flash("Only works with the CeleryExecutor, sorry", "error")
return redirect(origin)
force = request.args.get('force') == "true"
deps = request.args.get('deps') == "true"
ti = models.TaskInstance(task=task, execution_date=execution_date)
executor.start()
executor.queue_task_instance(
ti, force=force, ignore_dependencies=deps)
executor.heartbeat()
flash(
"Sent {} to the message queue, "
"it should start any moment now.".format(ti))
return redirect(origin)
elif action == 'clear':
future = request.args.get('future') == "true"
past = request.args.get('past') == "true"
dag = dag.sub_dag(
task_regex=r"^{0}$".format(task_id),
include_downstream=downstream,
include_upstream=upstream)
end_date = execution_date if not future else None
start_date = execution_date if not past else None
if confirmed:
count = dag.clear(
start_date=start_date,
end_date=end_date)
flash("{0} task instances have been cleared".format(count))
return redirect(origin)
else:
tis = dag.clear(
start_date=start_date,
end_date=end_date,
dry_run=True)
if not tis:
flash("No task instances to clear", 'error')
response = redirect(origin)
else:
details = "\n".join([str(t) for t in tis])
response = self.render(
'airflow/confirm.html',
message=(
"Here's the list of task instances you are about "
"to clear:"),
details=details,)
return response
elif action == 'success':
# Flagging tasks as successful
session = settings.Session()
task_ids = [task_id]
if downstream:
task_ids += [
t.task_id
for t in task.get_flat_relatives(upstream=False)]
if upstream:
task_ids += [
t.task_id
for t in task.get_flat_relatives(upstream=True)]
TI = models.TaskInstance
tis = session.query(TI).filter(
TI.dag_id == dag_id,
TI.execution_date == execution_date,
TI.task_id.in_(task_ids)).all()
if confirmed:
updated_task_ids = []
for ti in tis:
updated_task_ids.append(ti.task_id)
ti.state = State.SUCCESS
session.commit()
to_insert = list(set(task_ids) - set(updated_task_ids))
for task_id in to_insert:
ti = TI(
task=dag.get_task(task_id),
execution_date=execution_date,
state=State.SUCCESS)
session.add(ti)
session.commit()
session.commit()
session.close()
flash("Marked success on {} task instances".format(
len(task_ids)))
return redirect(origin)
else:
if not task_ids:
flash("No task instances to mark as successful", 'error')
response = redirect(origin)
else:
tis = []
for task_id in task_ids:
tis.append(TI(
task=dag.get_task(task_id),
execution_date=execution_date,
state=State.SUCCESS))
details = "\n".join([str(t) for t in tis])
response = self.render(
'airflow/confirm.html',
message=(
"Here's the list of task instances you are about "
"to mark as successful:"),
details=details,)
return response
@expose('/tree')
@login_required
@wwwutils.gzipped
def tree(self):
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_downstream=False,
include_upstream=True)
session = settings.Session()
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else 25
if not base_date:
base_date = dag.latest_execution_date or datetime.now()
else:
base_date = dateutil.parser.parse(base_date)
base_date = utils.round_time(base_date, dag.schedule_interval)
form = TreeForm(data={'base_date': base_date, 'num_runs': num_runs})
start_date = dag.start_date
if not start_date and 'start_date' in dag.default_args:
start_date = dag.default_args['start_date']
if start_date:
difference = base_date - start_date
offset = timedelta(seconds=int(difference.total_seconds() % dag.schedule_interval.total_seconds()))
base_date -= offset
base_date -= timedelta(microseconds=base_date.microsecond)
from_date = (base_date - (num_runs * dag.schedule_interval))
dates = utils.date_range(
from_date, base_date, dag.schedule_interval)
task_instances = {}
for ti in dag.get_task_instances(session, from_date):
task_instances[(ti.task_id, ti.execution_date)] = ti
expanded = []
# The default recursion traces every path so that tree view has full
# expand/collapse functionality. After 5,000 nodes we stop and fall
# back on a quick DFS search for performance. See PR #320.
node_count = [0]
node_limit = 5000 / len(dag.roots)
def recurse_nodes(task, visited):
visited.add(task)
node_count[0] += 1
children = [
recurse_nodes(t, visited) for t in task.upstream_list
if node_count[0] < node_limit or t not in visited]
# D3 tree uses children vs _children to define what is
# expanded or not. The following block makes it such that
# repeated nodes are collapsed by default.
children_key = 'children'
if task.task_id not in expanded:
expanded.append(task.task_id)
elif children:
children_key = "_children"
return {
'name': task.task_id,
'instances': [
utils.alchemy_to_dict(
task_instances.get((task.task_id, d))) or {
'execution_date': d.isoformat(),
'task_id': task.task_id
}
for d in dates],
children_key: children,
'num_dep': len(task.upstream_list),
'operator': task.task_type,
'retries': task.retries,
'owner': task.owner,
'start_date': task.start_date,
'end_date': task.end_date,
'depends_on_past': task.depends_on_past,
'ui_color': task.ui_color,
}
if len(dag.roots) > 1:
# d3 likes a single root
data = {
'name': 'root',
'instances': [],
'children': [recurse_nodes(t, set()) for t in dag.roots]
}
elif len(dag.roots) == 1:
data = recurse_nodes(dag.roots[0], set())
else:
flash("No tasks found.", "error")
data = []
data = json.dumps(data, indent=4, default=utils.json_ser)
session.commit()
session.close()
return self.render(
'airflow/tree.html',
operators=sorted(
list(set([op.__class__ for op in dag.tasks])),
key=lambda x: x.__name__<|fim▁hole|> form=form,
dag=dag, data=data, blur=blur)
@expose('/graph')
@login_required
@wwwutils.gzipped
def graph(self):
session = settings.Session()
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
arrange = request.args.get('arrange', "LR")
dag = dagbag.get_dag(dag_id)
if dag_id not in dagbag.dags:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect('/admin/')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
nodes = []
edges = []
for task in dag.tasks:
nodes.append({
'id': task.task_id,
'value': {
'label': task.task_id,
'labelStyle': "fill:{0};".format(task.ui_fgcolor),
'style': "fill:{0};".format(task.ui_color),
}
})
def get_upstream(task):
for t in task.upstream_list:
edge = {
'u': t.task_id,
'v': task.task_id,
}
if edge not in edges:
edges.append(edge)
get_upstream(t)
for t in dag.roots:
get_upstream(t)
dttm = request.args.get('execution_date')
if dttm:
dttm = dateutil.parser.parse(dttm)
else:
dttm = dag.latest_execution_date or datetime.now().date()
form = GraphForm(data={'execution_date': dttm, 'arrange': arrange})
task_instances = {
ti.task_id: utils.alchemy_to_dict(ti)
for ti in dag.get_task_instances(session, dttm, dttm)
}
tasks = {
t.task_id: {
'dag_id': t.dag_id,
'task_type': t.task_type,
}
for t in dag.tasks
}
if not tasks:
flash("No tasks found", "error")
session.commit()
session.close()
doc_md = markdown.markdown(dag.doc_md) if hasattr(dag, 'doc_md') else ''
return self.render(
'airflow/graph.html',
dag=dag,
form=form,
width=request.args.get('width', "100%"),
height=request.args.get('height', "800"),
execution_date=dttm.isoformat(),
doc_md=doc_md,
arrange=arrange,
operators=sorted(
list(set([op.__class__ for op in dag.tasks])),
key=lambda x: x.__name__
),
blur=blur,
root=root or '',
task_instances=json.dumps(task_instances, indent=2),
tasks=json.dumps(tasks, indent=2),
nodes=json.dumps(nodes, indent=2),
edges=json.dumps(edges, indent=2),)
@expose('/duration')
@login_required
def duration(self):
session = settings.Session()
dag_id = request.args.get('dag_id')
days = int(request.args.get('days', 30))
dag = dagbag.get_dag(dag_id)
from_date = (datetime.today()-timedelta(days)).date()
from_date = datetime.combine(from_date, datetime.min.time())
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
all_data = []
for task in dag.tasks:
data = []
for ti in task.get_task_instances(session, from_date):
if ti.duration:
data.append([
ti.execution_date.isoformat(),
float(ti.duration) / (60*60)
])
if data:
all_data.append({'data': data, 'name': task.task_id})
session.commit()
session.close()
return self.render(
'airflow/chart.html',
dag=dag,
data=all_data,
chart_options={'yAxis': {'title': {'text': 'hours'}}},
height="700px",
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
)
@expose('/landing_times')
@login_required
def landing_times(self):
session = settings.Session()
dag_id = request.args.get('dag_id')
days = int(request.args.get('days', 30))
dag = dagbag.get_dag(dag_id)
from_date = (datetime.today()-timedelta(days)).date()
from_date = datetime.combine(from_date, datetime.min.time())
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
all_data = []
for task in dag.tasks:
data = []
for ti in task.get_task_instances(session, from_date):
if ti.end_date:
data.append([
ti.execution_date.isoformat(), old_div((
ti.end_date - (
ti.execution_date + task.schedule_interval)
).total_seconds(),(60*60))
])
all_data.append({'data': data, 'name': task.task_id})
session.commit()
session.close()
return self.render(
'airflow/chart.html',
dag=dag,
data=all_data,
height="700px",
chart_options={'yAxis': {'title': {'text': 'hours after 00:00'}}},
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
)
@expose('/paused')
@login_required
def paused(self):
DagModel = models.DagModel
dag_id = request.args.get('dag_id')
session = settings.Session()
orm_dag = session.query(
DagModel).filter(DagModel.dag_id == dag_id).first()
if request.args.get('is_paused') == 'false':
orm_dag.is_paused = True
else:
orm_dag.is_paused = False
session.merge(orm_dag)
session.commit()
session.close()
dagbag.get_dag(dag_id)
return "OK"
@expose('/refresh')
@login_required
def refresh(self):
DagModel = models.DagModel
dag_id = request.args.get('dag_id')
session = settings.Session()
orm_dag = session.query(
DagModel).filter(DagModel.dag_id == dag_id).first()
if orm_dag:
orm_dag.last_expired = datetime.now()
session.merge(orm_dag)
session.commit()
session.close()
dagbag.get_dag(dag_id)
flash("DAG [{}] is now fresh as a daisy".format(dag_id))
return redirect('/')
@expose('/refresh_all')
@login_required
def refresh_all(self):
dagbag.collect_dags(only_if_updated=False)
flash("All DAGs are now up to date")
return redirect('/')
@expose('/gantt')
@login_required
def gantt(self):
session = settings.Session()
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
demo_mode = conf.getboolean('webserver', 'demo_mode')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
dttm = request.args.get('execution_date')
if dttm:
dttm = dateutil.parser.parse(dttm)
else:
dttm = dag.latest_execution_date or datetime.now().date()
form = DateTimeForm(data={'execution_date': dttm})
tis = [
ti
for ti in dag.get_task_instances(session, dttm, dttm)
if ti.start_date]
tis = sorted(tis, key=lambda ti: ti.start_date)
tasks = []
data = []
for i, ti in enumerate(tis):
end_date = ti.end_date or datetime.now()
tasks += [ti.task_id]
color = State.color(ti.state)
data.append({
'x': i,
'low': int(ti.start_date.strftime('%s')) * 1000,
'high': int(end_date.strftime('%s')) * 1000,
'color': color,
})
height = (len(tis) * 25) + 50
session.commit()
session.close()
hc = {
'chart': {
'type': 'columnrange',
'inverted': True,
'height': height,
},
'xAxis': {'categories': tasks},
'yAxis': {'type': 'datetime'},
'title': {
'text': None
},
'plotOptions': {
'series': {
'cursor': 'pointer',
'minPointLength': 4,
},
},
'legend': {
'enabled': False
},
'series': [{
'data': data
}]
}
return self.render(
'airflow/gantt.html',
dag=dag,
execution_date=dttm.isoformat(),
form=form,
hc=json.dumps(hc, indent=4),
height=height,
demo_mode=demo_mode,
root=root,
)
@expose('/variables/<form>', methods=["GET", "POST"])
@login_required
def variables(self, form):
try:
if request.method == 'POST':
data = request.json
if data:
session = settings.Session()
var = models.Variable(key=form, val=json.dumps(data))
session.add(var)
session.commit()
return ""
else:
return self.render(
'airflow/variables/{}.html'.format(form)
)
except:
return ("Error: form airflow/variables/{}.html "
"not found.").format(form), 404
admin.add_view(Airflow(name='DAGs'))
class QueryView(wwwutils.DataProfilingMixin, BaseView):
@expose('/')
@wwwutils.gzipped
def query(self):
session = settings.Session()
dbs = session.query(models.Connection).order_by(
models.Connection.conn_id).all()
session.expunge_all()
db_choices = list(
((db.conn_id, db.conn_id) for db in dbs if db.get_hook()))
conn_id_str = request.args.get('conn_id')
csv = request.args.get('csv') == "true"
sql = request.args.get('sql')
class QueryForm(Form):
conn_id = SelectField("Layout", choices=db_choices)
sql = TextAreaField("SQL", widget=wwwutils.AceEditorWidget())
data = {
'conn_id': conn_id_str,
'sql': sql,
}
results = None
has_data = False
error = False
if conn_id_str:
db = [db for db in dbs if db.conn_id == conn_id_str][0]
hook = db.get_hook()
try:
df = hook.get_pandas_df(wwwutils.limit_sql(sql, QUERY_LIMIT, conn_type=db.conn_type))
# df = hook.get_pandas_df(sql)
has_data = len(df) > 0
df = df.fillna('')
results = df.to_html(
classes="table table-bordered table-striped no-wrap",
index=False,
na_rep='',
) if has_data else ''
except Exception as e:
flash(str(e), 'error')
error = True
if has_data and len(df) == QUERY_LIMIT:
flash(
"Query output truncated at " + str(QUERY_LIMIT) +
" rows", 'info')
if not has_data and error:
flash('No data', 'error')
if csv:
return Response(
response=df.to_csv(index=False),
status=200,
mimetype="application/text")
form = QueryForm(request.form, data=data)
session.commit()
session.close()
return self.render(
'airflow/query.html', form=form,
title="Ad Hoc Query",
results=results or '',
has_data=has_data)
admin.add_view(QueryView(name='Ad Hoc Query', category="Data Profiling"))
class AirflowModelView(ModelView):
list_template = 'airflow/model_list.html'
edit_template = 'airflow/model_edit.html'
create_template = 'airflow/model_create.html'
page_size = 500
class ModelViewOnly(wwwutils.LoginMixin, AirflowModelView):
"""
Modifying the base ModelView class for non edit, browse only operations
"""
named_filter_urls = True
can_create = False
can_edit = False
can_delete = False
column_display_pk = True
def log_link(v, c, m, p):
url = url_for(
'airflow.log',
dag_id=m.dag_id,
task_id=m.task_id,
execution_date=m.execution_date.isoformat())
return Markup(
'<a href="{url}">'
' <span class="glyphicon glyphicon-book" aria-hidden="true">'
'</span></a>').format(**locals())
def task_instance_link(v, c, m, p):
url = url_for(
'airflow.task',
dag_id=m.dag_id,
task_id=m.task_id,
execution_date=m.execution_date.isoformat())
url_root = url_for(
'airflow.graph',
dag_id=m.dag_id,
root=m.task_id,
execution_date=m.execution_date.isoformat())
return Markup(
"""
<span style="white-space: nowrap;">
<a href="{url}">{m.task_id}</a>
<a href="{url_root}" title="Filter on this task and upstream">
<span class="glyphicon glyphicon-filter" style="margin-left: 0px;"
aria-hidden="true"></span>
</a>
</span>
""".format(**locals()))
def state_f(v, c, m, p):
color = State.color(m.state)
return Markup(
'<span class="label" style="background-color:{color};">'
'{m.state}</span>'.format(**locals()))
def duration_f(v, c, m, p):
if m.end_date and m.duration:
return timedelta(seconds=m.duration)
def datetime_f(v, c, m, p):
attr = getattr(m, p)
dttm = attr.isoformat() if attr else ''
if datetime.now().isoformat()[:4] == dttm[:4]:
dttm = dttm[5:]
return Markup("<nobr>{}</nobr>".format(dttm))
def nobr_f(v, c, m, p):
return Markup("<nobr>{}</nobr>".format(getattr(m, p)))
class JobModelView(ModelViewOnly):
verbose_name_plural = "jobs"
verbose_name = "job"
column_default_sort = ('start_date', True)
column_filters = (
'job_type', 'dag_id', 'state',
'unixname', 'hostname', 'start_date', 'end_date', 'latest_heartbeat')
column_formatters = dict(
start_date=datetime_f,
end_date=datetime_f,
hostname=nobr_f,
state=state_f,
latest_heartbeat=datetime_f)
mv = JobModelView(jobs.BaseJob, Session, name="Jobs", category="Browse")
admin.add_view(mv)
class LogModelView(ModelViewOnly):
verbose_name_plural = "logs"
verbose_name = "log"
column_default_sort = ('dttm', True)
column_filters = ('dag_id', 'task_id', 'execution_date')
column_formatters = dict(
dttm=datetime_f, execution_date=datetime_f, dag_id=dag_link)
mv = LogModelView(
models.Log, Session, name="Logs", category="Browse")
admin.add_view(mv)
class TaskInstanceModelView(ModelViewOnly):
verbose_name_plural = "task instances"
verbose_name = "task instance"
column_filters = (
'state', 'dag_id', 'task_id', 'execution_date', 'hostname',
'queue', 'pool', 'operator')
named_filter_urls = True
column_formatters = dict(
log=log_link, task_id=task_instance_link,
hostname=nobr_f,
state=state_f,
execution_date=datetime_f,
start_date=datetime_f,
end_date=datetime_f,
dag_id=dag_link, duration=duration_f)
column_searchable_list = ('dag_id', 'task_id', 'state')
column_default_sort = ('start_date', True)
column_list = (
'state', 'dag_id', 'task_id', 'execution_date', 'operator',
'start_date', 'end_date', 'duration', 'job_id', 'hostname',
'unixname', 'priority_weight', 'queued_dttm', 'log')
can_delete = True
page_size = 500
mv = TaskInstanceModelView(
models.TaskInstance, Session, name="Task Instances", category="Browse")
admin.add_view(mv)
mv = DagModelView(
models.DagModel, Session, name=None)
admin.add_view(mv)
# Hack to not add this view to the menu
admin._menu = admin._menu[:-1]
class ConnectionModelView(wwwutils.SuperUserMixin, AirflowModelView):
create_template = 'airflow/conn_create.html'
edit_template = 'airflow/conn_edit.html'
list_template = 'airflow/conn_list.html'
form_columns = (
'conn_id',
'conn_type',
'host',
'schema',
'login',
'password',
'port',
'extra',
)
verbose_name = "Connection"
verbose_name_plural = "Connections"
column_default_sort = ('conn_id', False)
column_list = ('conn_id', 'conn_type', 'host', 'port', 'is_encrypted',)
form_overrides = dict(_password=VisiblePasswordField)
form_widget_args = {
'is_encrypted': {'disabled': True},
}
# Used to customized the form, the forms elements get rendered
# and results are stored in the extra field as json. All of these
# need to be prefixed with extra__ and then the conn_type ___ as in
# extra__{conn_type}__name. You can also hide form elements and rename
# others from the connection_form.js file
form_extra_fields = {
'extra__jdbc__drv_path' : StringField('Driver Path'),
'extra__jdbc__drv_clsname': StringField('Driver Class'),
}
form_choices = {
'conn_type': [
('ftp', 'FTP',),
('hdfs', 'HDFS',),
('http', 'HTTP',),
('hive_cli', 'Hive Client Wrapper',),
('hive_metastore', 'Hive Metastore Thrift',),
('hiveserver2', 'Hive Server 2 Thrift',),
('jdbc', 'Jdbc Connection',),
('mysql', 'MySQL',),
('postgres', 'Postgres',),
('oracle', 'Oracle',),
('presto', 'Presto',),
('s3', 'S3',),
('samba', 'Samba',),
('sqlite', 'Sqlite',),
('mssql', 'Microsoft SQL Server'),
]
}
def on_model_change(self, form, model, is_created):
formdata = form.data
if formdata['conn_type'] in ['jdbc']:
extra = {
key:formdata[key]
for key in self.form_extra_fields.keys() if key in formdata}
model.extra = json.dumps(extra)
@classmethod
def is_secure(self):
"""
Used to display a message in the Connection list view making it clear
that the passwords can't be encrypted.
"""
is_secure = False
try:
import cryptography
conf.get('core', 'fernet_key')
is_secure = True
except:
pass
return is_secure
def on_form_prefill(self, form, id):
try:
d = json.loads(form.data.get('extra', '{}'))
except Exception as e:
d = {}
for field in list(self.form_extra_fields.keys()):
value = d.get(field, '')
if value:
field = getattr(form, field)
field.data = value
mv = ConnectionModelView(
models.Connection, Session,
name="Connections", category="Admin")
admin.add_view(mv)
class UserModelView(wwwutils.SuperUserMixin, AirflowModelView):
verbose_name = "User"
verbose_name_plural = "Users"
column_default_sort = 'username'
mv = UserModelView(models.User, Session, name="Users", category="Admin")
admin.add_view(mv)
class ConfigurationView(wwwutils.SuperUserMixin, BaseView):
@expose('/')
def conf(self):
from airflow import configuration
raw = request.args.get('raw') == "true"
title = "Airflow Configuration"
subtitle = configuration.AIRFLOW_CONFIG
if conf.getboolean("webserver", "expose_config"):
with open(configuration.AIRFLOW_CONFIG, 'r') as f:
config = f.read()
else:
config = (
"# You Airflow administrator chose not to expose the "
"configuration, most likely for security reasons.")
if raw:
return Response(
response=config,
status=200,
mimetype="application/text")
else:
code_html = Markup(highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render(
'airflow/code.html',
pre_subtitle=settings.HEADER + " v" + airflow.__version__,
code_html=code_html, title=title, subtitle=subtitle)
admin.add_view(ConfigurationView(name='Configuration', category="Admin"))
def label_link(v, c, m, p):
try:
default_params = eval(m.default_params)
except:
default_params = {}
url = url_for(
'airflow.chart', chart_id=m.id, iteration_no=m.iteration_no,
**default_params)
return Markup("<a href='{url}'>{m.label}</a>".format(**locals()))
class ChartModelView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "chart"
verbose_name_plural = "charts"
form_columns = (
'label',
'owner',
'conn_id',
'chart_type',
'show_datatable',
'x_is_date',
'y_log_scale',
'show_sql',
'height',
'sql_layout',
'sql',
'default_params',)
column_list = (
'label', 'conn_id', 'chart_type', 'owner', 'last_modified',)
column_formatters = dict(label=label_link, last_modified=datetime_f)
column_default_sort = ('last_modified', True)
create_template = 'airflow/chart/create.html'
edit_template = 'airflow/chart/edit.html'
column_filters = ('label', 'owner.username', 'conn_id')
column_searchable_list = ('owner.username', 'label', 'sql')
column_descriptions = {
'label': "Can include {{ templated_fields }} and {{ macros }}",
'chart_type': "The type of chart to be displayed",
'sql': "Can include {{ templated_fields }} and {{ macros }}.",
'height': "Height of the chart, in pixels.",
'conn_id': "Source database to run the query against",
'x_is_date': (
"Whether the X axis should be casted as a date field. Expect most "
"intelligible date formats to get casted properly."
),
'owner': (
"The chart's owner, mostly used for reference and filtering in "
"the list view."
),
'show_datatable':
"Whether to display an interactive data table under the chart.",
'default_params': (
'A dictionary of {"key": "values",} that define what the '
'templated fields (parameters) values should be by default. '
'To be valid, it needs to "eval" as a Python dict. '
'The key values will show up in the url\'s querystring '
'and can be altered there.'
),
'show_sql': "Whether to display the SQL statement as a collapsible "
"section in the chart page.",
'y_log_scale': "Whether to use a log scale for the Y axis.",
'sql_layout': (
"Defines the layout of the SQL that the application should "
"expect. Depending on the tables you are sourcing from, it may "
"make more sense to pivot / unpivot the metrics."
),
}
column_labels = {
'sql': "SQL",
'height': "Chart Height",
'sql_layout': "SQL Layout",
'show_sql': "Display the SQL Statement",
'default_params': "Default Parameters",
}
form_choices = {
'chart_type': [
('line', 'Line Chart'),
('spline', 'Spline Chart'),
('bar', 'Bar Chart'),
('para', 'Parallel Coordinates'),
('column', 'Column Chart'),
('area', 'Overlapping Area Chart'),
('stacked_area', 'Stacked Area Chart'),
('percent_area', 'Percent Area Chart'),
('heatmap', 'Heatmap'),
('datatable', 'No chart, data table only'),
],
'sql_layout': [
('series', 'SELECT series, x, y FROM ...'),
('columns', 'SELECT x, y (series 1), y (series 2), ... FROM ...'),
],
'conn_id': [
(c.conn_id, c.conn_id)
for c in (
Session().query(models.Connection.conn_id)
.group_by(models.Connection.conn_id)
)
]
}
def on_model_change(self, form, model, is_created=True):
if model.iteration_no is None:
model.iteration_no = 0
else:
model.iteration_no += 1
if AUTHENTICATE and not model.user_id and current_user:
model.user_id = current_user.id
model.last_modified = datetime.now()
mv = ChartModelView(
models.Chart, Session,
name="Charts", category="Data Profiling")
admin.add_view(mv)
admin.add_link(
base.MenuLink(
category='Docs',
name='Documentation',
url='http://pythonhosted.org/airflow/'))
admin.add_link(
base.MenuLink(
category='Docs',
name='Github',
url='https://github.com/airbnb/airflow'))
class KnowEventView(wwwutils.DataProfilingMixin, AirflowModelView):
verbose_name = "known event"
verbose_name_plural = "known events"
form_columns = (
'label',
'event_type',
'start_date',
'end_date',
'reported_by',
'description')
column_list = (
'label', 'event_type', 'start_date', 'end_date', 'reported_by')
column_default_sort = ("start_date", True)
mv = KnowEventView(
models.KnownEvent, Session, name="Known Events", category="Data Profiling")
admin.add_view(mv)
class KnowEventTypeView(wwwutils.DataProfilingMixin, AirflowModelView):
pass
'''
# For debugging / troubleshooting
mv = KnowEventTypeView(
models.KnownEventType,
Session, name="Known Event Types", category="Manage")
admin.add_view(mv)
class DagPickleView(SuperUserMixin, ModelView):
pass
mv = DagPickleView(
models.DagPickle,
Session, name="Pickles", category="Manage")
admin.add_view(mv)
'''
class VariableView(wwwutils.LoginMixin, AirflowModelView):
verbose_name = "Variable"
verbose_name_plural = "Variables"
column_list = ('key',)
column_filters = ('key', 'val')
column_searchable_list = ('key', 'val')
form_widget_args = {
'val': {
'rows': 20,
}
}
mv = VariableView(
models.Variable, Session, name="Variables", category="Admin")
admin.add_view(mv)
def pool_link(v, c, m, p):
url = '/admin/taskinstance/?flt1_pool_equals=' + m.pool
return Markup("<a href='{url}'>{m.pool}</a>".format(**locals()))
def fused_slots(v, c, m, p):
url = (
'/admin/taskinstance/' +
'?flt1_pool_equals=' + m.pool +
'&flt2_state_equals=running')
return Markup("<a href='{0}'>{1}</a>".format(url, m.used_slots()))
def fqueued_slots(v, c, m, p):
url = (
'/admin/taskinstance/' +
'?flt1_pool_equals=' + m.pool +
'&flt2_state_equals=queued&sort=10&desc=1')
return Markup("<a href='{0}'>{1}</a>".format(url, m.queued_slots()))
class PoolModelView(wwwutils.SuperUserMixin, AirflowModelView):
column_list = ('pool', 'slots', 'used_slots', 'queued_slots')
column_formatters = dict(
pool=pool_link, used_slots=fused_slots, queued_slots=fqueued_slots)
named_filter_urls = True
mv = PoolModelView(models.Pool, Session, name="Pools", category="Admin")
admin.add_view(mv)
class SlaMissModelView(wwwutils.SuperUserMixin, ModelViewOnly):
verbose_name_plural = "SLA misses"
verbose_name = "SLA miss"
column_list = (
'dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp')
column_formatters = dict(
task_id=task_instance_link,
execution_date=datetime_f,
timestamp=datetime_f,
dag_id=dag_link)
named_filter_urls = True
column_searchable_list = ('dag_id', 'task_id',)
column_filters = (
'dag_id', 'task_id', 'email_sent', 'timestamp', 'execution_date')
form_widget_args = {
'email_sent': {'disabled': True},
'timestamp': {'disabled': True},
}
mv = SlaMissModelView(
models.SlaMiss, Session, name="SLA Misses", category="Browse")
admin.add_view(mv)
def integrate_plugins():
"""Integrate plugins to the context"""
from airflow.plugins_manager import (
admin_views, flask_blueprints, menu_links)
for v in admin_views:
admin.add_view(v)
for bp in flask_blueprints:
print(bp)
app.register_blueprint(bp)
for ml in menu_links:
admin.add_link(ml)
integrate_plugins()<|fim▁end|>
|
),
root=root,
|
<|file_name|>brightcovePlayer.py<|end_file_name|><|fim▁begin|>import httplib
from pyamf import AMF0, AMF3
from pyamf import remoting
from pyamf.remoting.client import RemotingService
height = 1080
def build_amf_request(const, playerID, videoPlayer, publisherID):
env = remoting.Envelope(amfVersion=3)
env.bodies.append(
(
"/1",
remoting.Request(
target="com.brightcove.player.runtime.PlayerMediaFacade.findMediaById",
body=[const, playerID, videoPlayer, publisherID],
envelope=env
)
)
)
return env
def get_clip_info(const, playerID, videoPlayer, publisherID, playerKey):
conn = httplib.HTTPConnection("c.brightcove.com")
envelope = build_amf_request(const, playerID, videoPlayer, publisherID)
conn.request("POST", "/services/messagebroker/amf?playerKey=" + playerKey, str(remoting.encode(envelope).read()), {'content-type': 'application/x-amf'})
response = conn.getresponse().read()
response = remoting.decode(response).bodies[0][1].body
return response
def play(const, playerID, videoPlayer, publisherID, playerKey):
rtmpdata = get_clip_info(const, playerID, videoPlayer, publisherID, playerKey)
streamName = ""
streamUrl = rtmpdata['FLVFullLengthURL'];
for item in sorted(rtmpdata['renditions'], key=lambda item:item['frameHeight'], reverse=False):
streamHeight = item['frameHeight']<|fim▁hole|> if streamHeight <= height:
streamUrl = item['defaultURL']
streamName = streamName + rtmpdata['displayName']
return [streamName, streamUrl];<|fim▁end|>
| |
<|file_name|>plot_dop_A.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
================================================
Following the Metal to Mott insulator Transition
================================================
Sequence of plots showing the transfer of spectral weight for a Hubbard
Model in the Bethe Lattice as the local dopping is increased.
"""
# Code source: Óscar Nájera
# License: BSD 3 clause
from __future__ import division, absolute_import, print_function
import matplotlib.pyplot as plt
import numpy as np
from slaveparticles.quantum import dos
axis = 'real'
u = 8.0
beta = 1e3
dop = [0.25, 0.5, 0.75, 0.9, 0.99]
out_file = axis+'_dop_b{}_U{}'.format(beta, u)
res = np.load(out_file+'.npy')
f, axes = plt.subplots(len(dop), sharex=True)
axes[0].set_title(r'$A(\omega)$ under doping U={} at '
'$\\beta=${}'.format(u, beta))
axes[-1].set_xlabel('$\\omega / t$')
f.subplots_adjust(hspace=0)
for ax, n in zip(axes, dop):
ind = np.abs(res[:, 0] - n).argmin()<|fim▁hole|> s = sim.GF[r'$\Sigma$']
ra = w + sim.mu - s
rho = dos.bethe_lattice(ra, sim.t)
ax.plot(w, rho,
label='n={:.2f}'.format(sim.ocupations().sum()))
ax.set_xlim([-6, 6])
ax.set_ylim([0, 0.36])
ax.set_yticks([])
ax.set_ylabel('n={:.2f}'.format(sim.ocupations().sum()))
ax.legend(loc=0, handlelength=0)<|fim▁end|>
|
sim = res[ind, 1]
w = sim.omega
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from timeline import views
urlpatterns = [<|fim▁hole|><|fim▁end|>
|
url(r'^$', views.timelines, name='timelines'),
]
|
<|file_name|>reset_database_parameters_request_response.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016, 2018, 2021, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package databasemanagement
import (
"github.com/oracle/oci-go-sdk/v46/common"
"net/http"
)
// ResetDatabaseParametersRequest wrapper for the ResetDatabaseParameters operation
//
// See also
//
// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/databasemanagement/ResetDatabaseParameters.go.html to see an example of how to use ResetDatabaseParametersRequest.
type ResetDatabaseParametersRequest struct {
// The OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the Managed Database.
ManagedDatabaseId *string `mandatory:"true" contributesTo:"path" name:"managedDatabaseId"`
// The details required to reset database parameters.
ResetDatabaseParametersDetails `contributesTo:"body"`
// The client request ID for tracing.
OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"`
// A token that uniquely identifies a request so it can be retried in case of a timeout or
// server error without risk of executing that same action again. Retry tokens expire after 24
// hours, but can be invalidated before then due to conflicting operations. For example, if a resource
// has been deleted and purged from the system, then a retry of the original creation request
// might be rejected.
OpcRetryToken *string `mandatory:"false" contributesTo:"header" name:"opc-retry-token"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.<|fim▁hole|> return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request ResetDatabaseParametersRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) {
return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders)
}
// BinaryRequestBody implements the OCIRequest interface
func (request ResetDatabaseParametersRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) {
return nil, false
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request ResetDatabaseParametersRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// ResetDatabaseParametersResponse wrapper for the ResetDatabaseParameters operation
type ResetDatabaseParametersResponse struct {
// The underlying http response
RawResponse *http.Response
// The UpdateDatabaseParametersResult instance
UpdateDatabaseParametersResult `presentIn:"body"`
// Unique Oracle-assigned identifier for the request. If you need to contact
// Oracle about a particular request, please provide the request ID.
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
}
func (response ResetDatabaseParametersResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response ResetDatabaseParametersResponse) HTTPResponse() *http.Response {
return response.RawResponse
}<|fim▁end|>
|
RequestMetadata common.RequestMetadata
}
func (request ResetDatabaseParametersRequest) String() string {
|
<|file_name|>watch.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp'),
config = require('../config.js');
require('./browserify.js');<|fim▁hole|>
gulp.task('watch', ['default'], function() {
gulp.watch(config.paths.sass.src, ['sass']);
gulp.start('watch:js');
});<|fim▁end|>
|
require('./default.js');
require('./sass.js');
|
<|file_name|>fold.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast::*;
use ast;
use codemap::{Span, Spanned};
use parse::token;
use opt_vec::OptVec;
// We may eventually want to be able to fold over type parameters, too.
pub trait ast_fold {
fn fold_crate(&self, c: &Crate) -> Crate {
noop_fold_crate(c, self)
}
fn fold_meta_items(&self, meta_items: &[@MetaItem]) -> ~[@MetaItem] {
meta_items.map(|x| fold_meta_item_(*x, self))
}
fn fold_view_paths(&self, view_paths: &[@view_path]) -> ~[@view_path] {
view_paths.map(|view_path| {
let inner_view_path = match view_path.node {
view_path_simple(ref ident, ref path, node_id) => {
view_path_simple(ident.clone(),
self.fold_path(path),
self.new_id(node_id))
}
view_path_glob(ref path, node_id) => {
view_path_glob(self.fold_path(path), self.new_id(node_id))
}
view_path_list(ref path, ref path_list_idents, node_id) => {
view_path_list(self.fold_path(path),
path_list_idents.map(|path_list_ident| {
let id = self.new_id(path_list_ident.node
.id);
Spanned {
node: path_list_ident_ {
name: path_list_ident.node
.name
.clone(),
id: id,
},
span: self.new_span(
path_list_ident.span)
}
}),
self.new_id(node_id))
}
};
@Spanned {
node: inner_view_path,
span: self.new_span(view_path.span),
}
})
}
fn fold_view_item(&self, vi: &view_item) -> view_item {
let inner_view_item = match vi.node {
view_item_extern_mod(ref ident,
string,
ref meta_items,
node_id) => {
view_item_extern_mod(ident.clone(),
string,
self.fold_meta_items(*meta_items),
self.new_id(node_id))
}
view_item_use(ref view_paths) => {
view_item_use(self.fold_view_paths(*view_paths))
}
};
view_item {
node: inner_view_item,
attrs: vi.attrs.map(|a| fold_attribute_(*a, self)),
vis: vi.vis,
span: self.new_span(vi.span),
}
}
fn fold_foreign_item(&self, ni: @foreign_item) -> @foreign_item {
let fold_attribute = |x| fold_attribute_(x, self);
@ast::foreign_item {
ident: self.fold_ident(ni.ident),
attrs: ni.attrs.map(|x| fold_attribute(*x)),
node:
match ni.node {
foreign_item_fn(ref fdec, ref generics) => {
foreign_item_fn(
ast::fn_decl {
inputs: fdec.inputs.map(|a| fold_arg_(a,
self)),
output: self.fold_ty(&fdec.output),
cf: fdec.cf,
},
fold_generics(generics, self))
}
foreign_item_static(ref t, m) => {
foreign_item_static(self.fold_ty(t), m)
}
},
id: self.new_id(ni.id),
span: self.new_span(ni.span),
vis: ni.vis,
}
}
fn fold_item(&self, i: @item) -> Option<@item> {
noop_fold_item(i, self)
}
fn fold_struct_field(&self, sf: @struct_field) -> @struct_field {
let fold_attribute = |x| fold_attribute_(x, self);
@Spanned {
node: ast::struct_field_ {
kind: sf.node.kind,
id: self.new_id(sf.node.id),
ty: self.fold_ty(&sf.node.ty),
attrs: sf.node.attrs.map(|e| fold_attribute(*e))
},
span: self.new_span(sf.span)
}
}
fn fold_item_underscore(&self, i: &item_) -> item_ {
noop_fold_item_underscore(i, self)
}
fn fold_type_method(&self, m: &TypeMethod) -> TypeMethod {
noop_fold_type_method(m, self)
}
fn fold_method(&self, m: @method) -> @method {
@ast::method {
ident: self.fold_ident(m.ident),
attrs: m.attrs.map(|a| fold_attribute_(*a, self)),
generics: fold_generics(&m.generics, self),
explicit_self: m.explicit_self,
purity: m.purity,
decl: fold_fn_decl(&m.decl, self),
body: self.fold_block(&m.body),
id: self.new_id(m.id),
span: self.new_span(m.span),
self_id: self.new_id(m.self_id),
vis: m.vis,
}
}
fn fold_block(&self, b: &Block) -> Block {
noop_fold_block(b, self)
}
fn fold_stmt(&self, s: &Stmt) -> Option<@Stmt> {
noop_fold_stmt(s, self)
}<|fim▁hole|> pats: a.pats.map(|x| self.fold_pat(*x)),
guard: a.guard.map_move(|x| self.fold_expr(x)),
body: self.fold_block(&a.body),
}
}
fn fold_pat(&self, p: @Pat) -> @Pat {
let node = match p.node {
PatWild => PatWild,
PatIdent(binding_mode, ref pth, ref sub) => {
PatIdent(binding_mode,
self.fold_path(pth),
sub.map_move(|x| self.fold_pat(x)))
}
PatLit(e) => PatLit(self.fold_expr(e)),
PatEnum(ref pth, ref pats) => {
PatEnum(self.fold_path(pth),
pats.map(|pats| pats.map(|x| self.fold_pat(*x))))
}
PatStruct(ref pth, ref fields, etc) => {
let pth_ = self.fold_path(pth);
let fs = do fields.map |f| {
ast::FieldPat {
ident: f.ident,
pat: self.fold_pat(f.pat)
}
};
PatStruct(pth_, fs, etc)
}
PatTup(ref elts) => PatTup(elts.map(|x| self.fold_pat(*x))),
PatBox(inner) => PatBox(self.fold_pat(inner)),
PatUniq(inner) => PatUniq(self.fold_pat(inner)),
PatRegion(inner) => PatRegion(self.fold_pat(inner)),
PatRange(e1, e2) => {
PatRange(self.fold_expr(e1), self.fold_expr(e2))
},
PatVec(ref before, ref slice, ref after) => {
PatVec(before.map(|x| self.fold_pat(*x)),
slice.map_move(|x| self.fold_pat(x)),
after.map(|x| self.fold_pat(*x)))
}
};
@Pat {
id: self.new_id(p.id),
span: self.new_span(p.span),
node: node,
}
}
fn fold_decl(&self, d: @Decl) -> Option<@Decl> {
let node = match d.node {
DeclLocal(ref l) => Some(DeclLocal(self.fold_local(*l))),
DeclItem(it) => {
match self.fold_item(it) {
Some(it_folded) => Some(DeclItem(it_folded)),
None => None,
}
}
};
node.map_move(|node| {
@Spanned {
node: node,
span: d.span,
}
})
}
fn fold_expr(&self, e: @Expr) -> @Expr {
noop_fold_expr(e, self)
}
fn fold_ty(&self, t: &Ty) -> Ty {
let node = match t.node {
ty_nil | ty_bot | ty_infer => t.node.clone(),
ty_box(ref mt) => ty_box(fold_mt(mt, self)),
ty_uniq(ref mt) => ty_uniq(fold_mt(mt, self)),
ty_vec(ref mt) => ty_vec(fold_mt(mt, self)),
ty_ptr(ref mt) => ty_ptr(fold_mt(mt, self)),
ty_rptr(region, ref mt) => ty_rptr(region, fold_mt(mt, self)),
ty_closure(ref f) => {
ty_closure(@TyClosure {
sigil: f.sigil,
purity: f.purity,
region: f.region,
onceness: f.onceness,
bounds: fold_opt_bounds(&f.bounds, self),
decl: fold_fn_decl(&f.decl, self),
lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
})
}
ty_bare_fn(ref f) => {
ty_bare_fn(@TyBareFn {
lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
purity: f.purity,
abis: f.abis,
decl: fold_fn_decl(&f.decl, self)
})
}
ty_tup(ref tys) => ty_tup(tys.map(|ty| self.fold_ty(ty))),
ty_path(ref path, ref bounds, id) => {
ty_path(self.fold_path(path),
fold_opt_bounds(bounds, self),
self.new_id(id))
}
ty_fixed_length_vec(ref mt, e) => {
ty_fixed_length_vec(fold_mt(mt, self), self.fold_expr(e))
}
ty_mac(ref mac) => ty_mac(self.fold_mac(mac)),
ty_typeof(expr) => ty_typeof(self.fold_expr(expr)),
};
Ty {
id: self.new_id(t.id),
span: self.new_span(t.span),
node: node,
}
}
fn fold_mod(&self, m: &_mod) -> _mod {
noop_fold_mod(m, self)
}
fn fold_foreign_mod(&self, nm: &foreign_mod) -> foreign_mod {
ast::foreign_mod {
sort: nm.sort,
abis: nm.abis,
view_items: nm.view_items
.iter()
.map(|x| self.fold_view_item(x))
.collect(),
items: nm.items
.iter()
.map(|x| self.fold_foreign_item(*x))
.collect(),
}
}
fn fold_variant(&self, v: &variant) -> variant {
let kind;
match v.node.kind {
tuple_variant_kind(ref variant_args) => {
kind = tuple_variant_kind(variant_args.map(|x|
fold_variant_arg_(x, self)))
}
struct_variant_kind(ref struct_def) => {
kind = struct_variant_kind(@ast::struct_def {
fields: struct_def.fields.iter()
.map(|f| self.fold_struct_field(*f)).collect(),
ctor_id: struct_def.ctor_id.map(|c| self.new_id(*c))
})
}
}
let fold_attribute = |x| fold_attribute_(x, self);
let attrs = v.node.attrs.map(|x| fold_attribute(*x));
let de = match v.node.disr_expr {
Some(e) => Some(self.fold_expr(e)),
None => None
};
let node = ast::variant_ {
name: v.node.name,
attrs: attrs,
kind: kind,
id: self.new_id(v.node.id),
disr_expr: de,
vis: v.node.vis,
};
Spanned {
node: node,
span: self.new_span(v.span),
}
}
fn fold_ident(&self, i: Ident) -> Ident {
i
}
fn fold_path(&self, p: &Path) -> Path {
ast::Path {
span: self.new_span(p.span),
global: p.global,
segments: p.segments.map(|segment| ast::PathSegment {
identifier: self.fold_ident(segment.identifier),
lifetime: segment.lifetime,
types: segment.types.map(|typ| self.fold_ty(typ)),
})
}
}
fn fold_local(&self, l: @Local) -> @Local {
@Local {
is_mutbl: l.is_mutbl,
ty: self.fold_ty(&l.ty),
pat: self.fold_pat(l.pat),
init: l.init.map_move(|e| self.fold_expr(e)),
id: self.new_id(l.id),
span: self.new_span(l.span),
}
}
fn fold_mac(&self, macro: &mac) -> mac {
Spanned {
node: match macro.node {
mac_invoc_tt(ref p, ref tts, ctxt) => {
mac_invoc_tt(self.fold_path(p),
fold_tts(*tts, self),
ctxt)
}
},
span: self.new_span(macro.span)
}
}
fn map_exprs(&self, f: &fn(@Expr) -> @Expr, es: &[@Expr]) -> ~[@Expr] {
es.map(|x| f(*x))
}
fn new_id(&self, i: NodeId) -> NodeId {
i
}
fn new_span(&self, sp: Span) -> Span {
sp
}
}
/* some little folds that probably aren't useful to have in ast_fold itself*/
//used in noop_fold_item and noop_fold_crate and noop_fold_crate_directive
fn fold_meta_item_<T:ast_fold>(mi: @MetaItem, fld: &T) -> @MetaItem {
@Spanned {
node:
match mi.node {
MetaWord(id) => MetaWord(id),
MetaList(id, ref mis) => {
let fold_meta_item = |x| fold_meta_item_(x, fld);
MetaList(
id,
mis.map(|e| fold_meta_item(*e))
)
}
MetaNameValue(id, s) => MetaNameValue(id, s)
},
span: fld.new_span(mi.span) }
}
//used in noop_fold_item and noop_fold_crate
fn fold_attribute_<T:ast_fold>(at: Attribute, fld: &T) -> Attribute {
Spanned {
span: fld.new_span(at.span),
node: ast::Attribute_ {
style: at.node.style,
value: fold_meta_item_(at.node.value, fld),
is_sugared_doc: at.node.is_sugared_doc
}
}
}
//used in noop_fold_foreign_item and noop_fold_fn_decl
fn fold_arg_<T:ast_fold>(a: &arg, fld: &T) -> arg {
ast::arg {
is_mutbl: a.is_mutbl,
ty: fld.fold_ty(&a.ty),
pat: fld.fold_pat(a.pat),
id: fld.new_id(a.id),
}
}
// build a new vector of tts by appling the ast_fold's fold_ident to
// all of the identifiers in the token trees.
pub fn fold_tts<T:ast_fold>(tts: &[token_tree], fld: &T) -> ~[token_tree] {
do tts.map |tt| {
match *tt {
tt_tok(span, ref tok) =>
tt_tok(span,maybe_fold_ident(tok,fld)),
tt_delim(ref tts) => tt_delim(@mut fold_tts(**tts, fld)),
tt_seq(span, ref pattern, ref sep, is_optional) =>
tt_seq(span,
@mut fold_tts(**pattern, fld),
sep.map(|tok|maybe_fold_ident(tok,fld)),
is_optional),
tt_nonterminal(sp,ref ident) =>
tt_nonterminal(sp,fld.fold_ident(*ident))
}
}
}
// apply ident folder if it's an ident, otherwise leave it alone
fn maybe_fold_ident<T:ast_fold>(t: &token::Token, fld: &T) -> token::Token {
match *t {
token::IDENT(id, followed_by_colons) => {
token::IDENT(fld.fold_ident(id), followed_by_colons)
}
_ => (*t).clone()
}
}
pub fn fold_fn_decl<T:ast_fold>(decl: &ast::fn_decl, fld: &T)
-> ast::fn_decl {
ast::fn_decl {
inputs: decl.inputs.map(|x| fold_arg_(x, fld)), // bad copy
output: fld.fold_ty(&decl.output),
cf: decl.cf,
}
}
fn fold_ty_param_bound<T:ast_fold>(tpb: &TyParamBound, fld: &T)
-> TyParamBound {
match *tpb {
TraitTyParamBound(ref ty) => TraitTyParamBound(fold_trait_ref(ty, fld)),
RegionTyParamBound => RegionTyParamBound
}
}
pub fn fold_ty_param<T:ast_fold>(tp: &TyParam, fld: &T) -> TyParam {
TyParam {
ident: tp.ident,
id: fld.new_id(tp.id),
bounds: tp.bounds.map(|x| fold_ty_param_bound(x, fld)),
}
}
pub fn fold_ty_params<T:ast_fold>(tps: &OptVec<TyParam>, fld: &T)
-> OptVec<TyParam> {
tps.map(|tp| fold_ty_param(tp, fld))
}
pub fn fold_lifetime<T:ast_fold>(l: &Lifetime, fld: &T) -> Lifetime {
Lifetime {
id: fld.new_id(l.id),
span: fld.new_span(l.span),
ident: l.ident
}
}
pub fn fold_lifetimes<T:ast_fold>(lts: &OptVec<Lifetime>, fld: &T)
-> OptVec<Lifetime> {
lts.map(|l| fold_lifetime(l, fld))
}
pub fn fold_generics<T:ast_fold>(generics: &Generics, fld: &T) -> Generics {
Generics {ty_params: fold_ty_params(&generics.ty_params, fld),
lifetimes: fold_lifetimes(&generics.lifetimes, fld)}
}
fn fold_struct_def<T:ast_fold>(struct_def: @ast::struct_def, fld: &T)
-> @ast::struct_def {
@ast::struct_def {
fields: struct_def.fields.map(|f| fold_struct_field(*f, fld)),
ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(*cid)),
}
}
fn noop_fold_view_item(vi: &view_item_, fld: @ast_fold) -> view_item_ {
match *vi {
view_item_extern_mod(ident, name, ref meta_items, node_id) => {
view_item_extern_mod(ident,
name,
fld.fold_meta_items(*meta_items),
fld.new_id(node_id))
}
view_item_use(ref view_paths) => {
view_item_use(fld.fold_view_paths(*view_paths))
}
}
}
fn fold_trait_ref<T:ast_fold>(p: &trait_ref, fld: &T) -> trait_ref {
ast::trait_ref {
path: fld.fold_path(&p.path),
ref_id: fld.new_id(p.ref_id),
}
}
fn fold_struct_field<T:ast_fold>(f: @struct_field, fld: &T) -> @struct_field {
@Spanned {
node: ast::struct_field_ {
kind: f.node.kind,
id: fld.new_id(f.node.id),
ty: fld.fold_ty(&f.node.ty),
attrs: f.node.attrs.map(|a| fold_attribute_(*a, fld)),
},
span: fld.new_span(f.span),
}
}
fn fold_field_<T:ast_fold>(field: Field, folder: &T) -> Field {
ast::Field {
ident: folder.fold_ident(field.ident),
expr: folder.fold_expr(field.expr),
span: folder.new_span(field.span),
}
}
fn fold_mt<T:ast_fold>(mt: &mt, folder: &T) -> mt {
mt {
ty: ~folder.fold_ty(mt.ty),
mutbl: mt.mutbl,
}
}
fn fold_field<T:ast_fold>(f: TypeField, folder: &T) -> TypeField {
ast::TypeField {
ident: folder.fold_ident(f.ident),
mt: fold_mt(&f.mt, folder),
span: folder.new_span(f.span),
}
}
fn fold_opt_bounds<T:ast_fold>(b: &Option<OptVec<TyParamBound>>, folder: &T)
-> Option<OptVec<TyParamBound>> {
do b.map |bounds| {
do bounds.map |bound| {
fold_ty_param_bound(bound, folder)
}
}
}
fn fold_variant_arg_<T:ast_fold>(va: &variant_arg, folder: &T)
-> variant_arg {
ast::variant_arg {
ty: folder.fold_ty(&va.ty),
id: folder.new_id(va.id)
}
}
pub fn noop_fold_block<T:ast_fold>(b: &Block, folder: &T) -> Block {
let view_items = b.view_items.map(|x| folder.fold_view_item(x));
let mut stmts = ~[];
for stmt in b.stmts.iter() {
match folder.fold_stmt(*stmt) {
None => {}
Some(stmt) => stmts.push(stmt)
}
}
ast::Block {
view_items: view_items,
stmts: stmts,
expr: b.expr.map(|x| folder.fold_expr(*x)),
id: folder.new_id(b.id),
rules: b.rules,
span: folder.new_span(b.span),
}
}
pub fn noop_fold_item_underscore<T:ast_fold>(i: &item_, folder: &T) -> item_ {
match *i {
item_static(ref t, m, e) => {
item_static(folder.fold_ty(t), m, folder.fold_expr(e))
}
item_fn(ref decl, purity, abi, ref generics, ref body) => {
item_fn(
fold_fn_decl(decl, folder),
purity,
abi,
fold_generics(generics, folder),
folder.fold_block(body)
)
}
item_mod(ref m) => item_mod(folder.fold_mod(m)),
item_foreign_mod(ref nm) => {
item_foreign_mod(folder.fold_foreign_mod(nm))
}
item_ty(ref t, ref generics) => {
item_ty(folder.fold_ty(t),
fold_generics(generics, folder))
}
item_enum(ref enum_definition, ref generics) => {
item_enum(
ast::enum_def {
variants: do enum_definition.variants.map |x| {
folder.fold_variant(x)
},
},
fold_generics(generics, folder))
}
item_struct(ref struct_def, ref generics) => {
let struct_def = fold_struct_def(*struct_def, folder);
item_struct(struct_def, fold_generics(generics, folder))
}
item_impl(ref generics, ref ifce, ref ty, ref methods) => {
item_impl(fold_generics(generics, folder),
ifce.map(|p| fold_trait_ref(p, folder)),
folder.fold_ty(ty),
methods.map(|x| folder.fold_method(*x))
)
}
item_trait(ref generics, ref traits, ref methods) => {
let methods = do methods.map |method| {
match *method {
required(ref m) => required(folder.fold_type_method(m)),
provided(method) => provided(folder.fold_method(method))
}
};
item_trait(fold_generics(generics, folder),
traits.map(|p| fold_trait_ref(p, folder)),
methods)
}
item_mac(ref m) => item_mac(folder.fold_mac(m)),
}
}
pub fn noop_fold_type_method<T:ast_fold>(m: &TypeMethod, fld: &T)
-> TypeMethod {
TypeMethod {
ident: fld.fold_ident(m.ident),
attrs: m.attrs.map(|a| fold_attribute_(*a, fld)),
purity: m.purity,
decl: fold_fn_decl(&m.decl, fld),
generics: fold_generics(&m.generics, fld),
explicit_self: m.explicit_self,
id: fld.new_id(m.id),
span: fld.new_span(m.span),
}
}
pub fn noop_fold_mod<T:ast_fold>(m: &_mod, folder: &T) -> _mod {
ast::_mod {
view_items: m.view_items
.iter()
.map(|x| folder.fold_view_item(x)).collect(),
items: m.items.iter().filter_map(|x| folder.fold_item(*x)).collect(),
}
}
pub fn noop_fold_crate<T:ast_fold>(c: &Crate, folder: &T) -> Crate {
let fold_meta_item = |x| fold_meta_item_(x, folder);
let fold_attribute = |x| fold_attribute_(x, folder);
Crate {
module: folder.fold_mod(&c.module),
attrs: c.attrs.map(|x| fold_attribute(*x)),
config: c.config.map(|x| fold_meta_item(*x)),
span: folder.new_span(c.span),
}
}
pub fn noop_fold_item<T:ast_fold>(i: @ast::item, folder: &T)
-> Option<@ast::item> {
let fold_attribute = |x| fold_attribute_(x, folder);
Some(@ast::item {
ident: folder.fold_ident(i.ident),
attrs: i.attrs.map(|e| fold_attribute(*e)),
id: folder.new_id(i.id),
node: folder.fold_item_underscore(&i.node),
vis: i.vis,
span: folder.new_span(i.span)
})
}
pub fn noop_fold_expr<T:ast_fold>(e: @ast::Expr, folder: &T) -> @ast::Expr {
let fold_field = |x| fold_field_(x, folder);
let node = match e.node {
ExprVstore(e, v) => {
ExprVstore(folder.fold_expr(e), v)
}
ExprVec(ref exprs, mutt) => {
ExprVec(folder.map_exprs(|x| folder.fold_expr(x), *exprs), mutt)
}
ExprRepeat(expr, count, mutt) => {
ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count), mutt)
}
ExprTup(ref elts) => ExprTup(elts.map(|x| folder.fold_expr(*x))),
ExprCall(f, ref args, blk) => {
ExprCall(folder.fold_expr(f),
folder.map_exprs(|x| folder.fold_expr(x), *args),
blk)
}
ExprMethodCall(callee_id, f, i, ref tps, ref args, blk) => {
ExprMethodCall(
folder.new_id(callee_id),
folder.fold_expr(f),
folder.fold_ident(i),
tps.map(|x| folder.fold_ty(x)),
folder.map_exprs(|x| folder.fold_expr(x), *args),
blk
)
}
ExprBinary(callee_id, binop, lhs, rhs) => {
ExprBinary(folder.new_id(callee_id),
binop,
folder.fold_expr(lhs),
folder.fold_expr(rhs))
}
ExprUnary(callee_id, binop, ohs) => {
ExprUnary(folder.new_id(callee_id), binop, folder.fold_expr(ohs))
}
ExprDoBody(f) => ExprDoBody(folder.fold_expr(f)),
ExprLit(_) => e.node.clone(),
ExprCast(expr, ref ty) => {
ExprCast(folder.fold_expr(expr), folder.fold_ty(ty))
}
ExprAddrOf(m, ohs) => ExprAddrOf(m, folder.fold_expr(ohs)),
ExprIf(cond, ref tr, fl) => {
ExprIf(folder.fold_expr(cond),
folder.fold_block(tr),
fl.map_move(|x| folder.fold_expr(x)))
}
ExprWhile(cond, ref body) => {
ExprWhile(folder.fold_expr(cond), folder.fold_block(body))
}
ExprForLoop(pat, iter, ref body, ref maybe_ident) => {
ExprForLoop(folder.fold_pat(pat),
folder.fold_expr(iter),
folder.fold_block(body),
maybe_ident.map_move(|i| folder.fold_ident(i)))
}
ExprLoop(ref body, opt_ident) => {
ExprLoop(folder.fold_block(body),
opt_ident.map_move(|x| folder.fold_ident(x)))
}
ExprMatch(expr, ref arms) => {
ExprMatch(folder.fold_expr(expr),
arms.map(|x| folder.fold_arm(x)))
}
ExprFnBlock(ref decl, ref body) => {
ExprFnBlock(
fold_fn_decl(decl, folder),
folder.fold_block(body)
)
}
ExprBlock(ref blk) => ExprBlock(folder.fold_block(blk)),
ExprAssign(el, er) => {
ExprAssign(folder.fold_expr(el), folder.fold_expr(er))
}
ExprAssignOp(callee_id, op, el, er) => {
ExprAssignOp(folder.new_id(callee_id),
op,
folder.fold_expr(el),
folder.fold_expr(er))
}
ExprField(el, id, ref tys) => {
ExprField(folder.fold_expr(el), folder.fold_ident(id),
tys.map(|x| folder.fold_ty(x)))
}
ExprIndex(callee_id, el, er) => {
ExprIndex(folder.new_id(callee_id),
folder.fold_expr(el),
folder.fold_expr(er))
}
ExprPath(ref pth) => ExprPath(folder.fold_path(pth)),
ExprSelf => ExprSelf,
ExprLogLevel => ExprLogLevel,
ExprBreak(opt_ident) => ExprBreak(opt_ident),
ExprAgain(opt_ident) => ExprAgain(opt_ident),
ExprRet(ref e) => {
ExprRet(e.map_move(|x| folder.fold_expr(x)))
}
ExprInlineAsm(ref a) => {
ExprInlineAsm(inline_asm {
inputs: a.inputs.map(|&(c, input)| (c, folder.fold_expr(input))),
outputs: a.outputs.map(|&(c, out)| (c, folder.fold_expr(out))),
.. (*a).clone()
})
}
ExprMac(ref mac) => ExprMac(folder.fold_mac(mac)),
ExprStruct(ref path, ref fields, maybe_expr) => {
ExprStruct(folder.fold_path(path),
fields.map(|x| fold_field(*x)),
maybe_expr.map_move(|x| folder.fold_expr(x)))
},
ExprParen(ex) => ExprParen(folder.fold_expr(ex))
};
@Expr {
id: folder.new_id(e.id),
node: node,
span: folder.new_span(e.span),
}
}
pub fn noop_fold_stmt<T:ast_fold>(s: &Stmt, folder: &T) -> Option<@Stmt> {
let node = match s.node {
StmtDecl(d, nid) => {
match folder.fold_decl(d) {
Some(d) => Some(StmtDecl(d, folder.new_id(nid))),
None => None,
}
}
StmtExpr(e, nid) => {
Some(StmtExpr(folder.fold_expr(e), folder.new_id(nid)))
}
StmtSemi(e, nid) => {
Some(StmtSemi(folder.fold_expr(e), folder.new_id(nid)))
}
StmtMac(ref mac, semi) => Some(StmtMac(folder.fold_mac(mac), semi))
};
node.map_move(|node| @Spanned {
node: node,
span: folder.new_span(s.span),
})
}
#[cfg(test)]
mod test {
use ast;
use util::parser_testing::{string_to_crate, matches_codepattern};
use parse::token;
use print::pprust;
use super::*;
// this version doesn't care about getting comments or docstrings in.
fn fake_print_crate(s: @pprust::ps, crate: &ast::Crate) {
pprust::print_mod(s, &crate.module, crate.attrs);
}
// change every identifier to "zz"
struct ToZzIdentFolder;
impl ast_fold for ToZzIdentFolder {
fn fold_ident(&self, _: ast::Ident) -> ast::Ident {
token::str_to_ident("zz")
}
}
// maybe add to expand.rs...
macro_rules! assert_pred (
($pred:expr, $predname:expr, $a:expr , $b:expr) => (
{
let pred_val = $pred;
let a_val = $a;
let b_val = $b;
if !(pred_val(a_val,b_val)) {
fail!("expected args satisfying %s, got %? and %?",
$predname, a_val, b_val);
}
}
)
)
// make sure idents get transformed everywhere
#[test] fn ident_transformation () {
let zz_fold = ToZzIdentFolder;
let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}");
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&zz_fold.fold_crate(ast),fake_print_crate,
token::get_ident_interner()),
~"#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}");
}
// even inside macro defs....
#[test] fn ident_transformation_in_defs () {
let zz_fold = ToZzIdentFolder;
let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+
=> (g $(d $d $e)+))} ");
assert_pred!(matches_codepattern,
"matches_codepattern",
pprust::to_str(&zz_fold.fold_crate(ast),fake_print_crate,
token::get_ident_interner()),
~"zz!zz((zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+)))");
}
}<|fim▁end|>
|
fn fold_arm(&self, a: &Arm) -> Arm {
Arm {
|
<|file_name|>observable-simple.component.ts<|end_file_name|><|fim▁begin|>/*
* MIT License
*
* Copyright (c) 2017-2018 Stefano Cappa
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import { Component } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/observable/of';
import 'rxjs/add/operator/delay';
import { Image } from 'angular-modal-gallery';
import * as _ from 'lodash';
import { IMAGES_ARRAY } from '../images';
import { TitleService } from '../../../core/services/title.service';
@Component({
selector: 'mmw-observable-simple-page',
templateUrl: 'observable-simple.html',
styleUrls: ['observable-simple.scss']
})
export class ObservableSimpleComponent {
// observable of an array of images with a delay to simulate a network request
imagesObservable: Observable<Array<Image>> = Observable.of(_.cloneDeep(IMAGES_ARRAY)).delay(300);
constructor(private titleService: TitleService) {
this.titleService.titleEvent.emit('Demo - Observable simple');
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>WebServiceClientTest.js<|end_file_name|><|fim▁begin|>window.WebServiceClientTest = new Class( {
Implements : [Events, JsTestClass, Options],
Binds : ['onDocumentReady', 'onDocumentError'],
<|fim▁hole|>
options : {
testMethods : [
{ method : 'initialize_', isAsynchron : false }]
},
constants : {
},
initialize : function( options ) {
this.setOptions( options );
this.webServiceClient;
},
beforeEachTest : function(){
this.webServiceClient = new WebServiceClient({ });
},
afterEachTest : function (){
this.webServiceClient = null;
},
initialize_ : function() {
assertThat( this.webServiceClient, JsHamcrest.Matchers.instanceOf( WebServiceClient ));
}
});<|fim▁end|>
| |
<|file_name|>htmllinkelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::Parser as CssParser;
use dom::attr::Attr;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListBinding::DOMTokenListMethods;
use dom::bindings::codegen::Bindings::HTMLLinkElementBinding;
use dom::bindings::codegen::Bindings::HTMLLinkElementBinding::HTMLLinkElementMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutNullableJS, Root, RootedReference};
use dom::bindings::str::DOMString;
use dom::cssstylesheet::CSSStyleSheet;
use dom::document::Document;
use dom::domtokenlist::DOMTokenList;
use dom::element::{AttributeMutation, Element, ElementCreator};
use dom::element::{cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute};
use dom::globalscope::GlobalScope;
use dom::htmlelement::HTMLElement;
use dom::node::{Node, UnbindContext, document_from_node, window_from_node};
use dom::stylesheet::StyleSheet as DOMStyleSheet;
use dom::virtualmethods::VirtualMethods;
use html5ever_atoms::LocalName;
use net_traits::ReferrerPolicy;
use script_traits::{MozBrowserEvent, ScriptMsg as ConstellationMsg};
use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::default::Default;
use std::sync::Arc;
use style::attr::AttrValue;
use style::media_queries::parse_media_query_list;
use style::str::HTML_SPACE_CHARACTERS;
use style::stylesheets::Stylesheet;
use stylesheet_loader::{StylesheetLoader, StylesheetContextSource, StylesheetOwner};
unsafe_no_jsmanaged_fields!(Stylesheet);
#[derive(JSTraceable, PartialEq, Clone, Copy, HeapSizeOf)]
pub struct RequestGenerationId(u32);
impl RequestGenerationId {
fn increment(self) -> RequestGenerationId {
RequestGenerationId(self.0 + 1)
}
}
#[dom_struct]
pub struct HTMLLinkElement {
htmlelement: HTMLElement,
rel_list: MutNullableJS<DOMTokenList>,
#[ignore_heap_size_of = "Arc"]
stylesheet: DOMRefCell<Option<Arc<Stylesheet>>>,
cssom_stylesheet: MutNullableJS<CSSStyleSheet>,
/// https://html.spec.whatwg.org/multipage/#a-style-sheet-that-is-blocking-scripts
parser_inserted: Cell<bool>,
/// The number of loads that this link element has triggered (could be more
/// than one because of imports) and have not yet finished.
pending_loads: Cell<u32>,
/// Whether any of the loads have failed.
any_failed_load: Cell<bool>,
/// A monotonically increasing counter that keeps track of which stylesheet to apply.
request_generation_id: Cell<RequestGenerationId>,
}
impl HTMLLinkElement {
fn new_inherited(local_name: LocalName, prefix: Option<DOMString>, document: &Document,
creator: ElementCreator) -> HTMLLinkElement {
HTMLLinkElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
rel_list: Default::default(),
parser_inserted: Cell::new(creator.is_parser_created()),
stylesheet: DOMRefCell::new(None),
cssom_stylesheet: MutNullableJS::new(None),
pending_loads: Cell::new(0),
any_failed_load: Cell::new(false),
request_generation_id: Cell::new(RequestGenerationId(0)),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document,
creator: ElementCreator) -> Root<HTMLLinkElement> {
Node::reflect_node(box HTMLLinkElement::new_inherited(local_name, prefix, document, creator),
document,
HTMLLinkElementBinding::Wrap)
}
pub fn get_request_generation_id(&self) -> RequestGenerationId {
self.request_generation_id.get()
}
pub fn set_stylesheet(&self, s: Arc<Stylesheet>) {
assert!(self.stylesheet.borrow().is_none()); // Useful for catching timing issues.
*self.stylesheet.borrow_mut() = Some(s);
}
pub fn get_stylesheet(&self) -> Option<Arc<Stylesheet>> {
self.stylesheet.borrow().clone()
}
pub fn get_cssom_stylesheet(&self) -> Option<Root<CSSStyleSheet>> {
self.get_stylesheet().map(|sheet| {
self.cssom_stylesheet.or_init(|| {
CSSStyleSheet::new(&window_from_node(self),
self.upcast::<Element>(),
"text/css".into(),
None, // todo handle location
None, // todo handle title
sheet)
})
})
}
pub fn is_alternate(&self) -> bool {
let rel = get_attr(self.upcast(), &local_name!("rel"));
match rel {
Some(ref value) => {
value.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("alternate"))
},
None => false,
}
}
}
fn get_attr(element: &Element, local_name: &LocalName) -> Option<String> {
let elem = element.get_attribute(&ns!(), local_name);
elem.map(|e| {
let value = e.value();
(**value).to_owned()
})
}
fn string_is_stylesheet(value: &Option<String>) -> bool {
match *value {
Some(ref value) => {
value.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("stylesheet"))
},
None => false,
}
}
/// Favicon spec usage in accordance with CEF implementation:
/// only url of icon is required/used
/// https://html.spec.whatwg.org/multipage/#rel-icon
fn is_favicon(value: &Option<String>) -> bool {
match *value {
Some(ref value) => {
value.split(HTML_SPACE_CHARACTERS)
.any(|s| s.eq_ignore_ascii_case("icon") || s.eq_ignore_ascii_case("apple-touch-icon"))
},
None => false,
}
}
impl VirtualMethods for HTMLLinkElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if !self.upcast::<Node>().is_in_doc() || mutation == AttributeMutation::Removed {
return;
}
let rel = get_attr(self.upcast(), &local_name!("rel"));
match attr.local_name() {
&local_name!("href") => {
if string_is_stylesheet(&rel) {
self.handle_stylesheet_url(&attr.value());
} else if is_favicon(&rel) {
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
self.handle_favicon_url(rel.as_ref().unwrap(), &attr.value(), &sizes);
}
},
&local_name!("sizes") => {
if is_favicon(&rel) {
if let Some(ref href) = get_attr(self.upcast(), &local_name!("href")) {
self.handle_favicon_url(rel.as_ref().unwrap(), href, &Some(attr.value().to_string()));
}
}
},
&local_name!("media") => {
if string_is_stylesheet(&rel) {
if let Some(href) = self.upcast::<Element>().get_attribute(&ns!(), &local_name!("href")) {
self.handle_stylesheet_url(&href.value());
}
}
},
_ => {},
}
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("rel") => AttrValue::from_serialized_tokenlist(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
let element = self.upcast();
let rel = get_attr(element, &local_name!("rel"));
let href = get_attr(element, &local_name!("href"));
let sizes = get_attr(self.upcast(), &local_name!("sizes"));
match href {
Some(ref href) if string_is_stylesheet(&rel) => {
self.handle_stylesheet_url(href);
}
Some(ref href) if is_favicon(&rel) => {
self.handle_favicon_url(rel.as_ref().unwrap(), href, &sizes);
}
_ => {}
}
}
}
fn unbind_from_tree(&self, context: &UnbindContext) {
if let Some(ref s) = self.super_type() {
s.unbind_from_tree(context);
}
let document = document_from_node(self);
document.invalidate_stylesheets();
}
}
impl HTMLLinkElement {
/// https://html.spec.whatwg.org/multipage/#concept-link-obtain
fn handle_stylesheet_url(&self, href: &str) {
let document = document_from_node(self);
if document.browsing_context().is_none() {<|fim▁hole|> if href.is_empty() {
return;
}
// Step 2.
let url = match document.base_url().join(href) {
Ok(url) => url,
Err(e) => {
debug!("Parsing url {} failed: {}", href, e);
return;
}
};
let element = self.upcast::<Element>();
// Step 3
let cors_setting = cors_setting_for_element(element);
let mq_attribute = element.get_attribute(&ns!(), &local_name!("media"));
let value = mq_attribute.r().map(|a| a.value());
let mq_str = match value {
Some(ref value) => &***value,
None => "",
};
let mut css_parser = CssParser::new(&mq_str);
let media = parse_media_query_list(&mut css_parser);
let im_attribute = element.get_attribute(&ns!(), &local_name!("integrity"));
let integrity_val = im_attribute.r().map(|a| a.value());
let integrity_metadata = match integrity_val {
Some(ref value) => &***value,
None => "",
};
self.request_generation_id.set(self.request_generation_id.get().increment());
// TODO: #8085 - Don't load external stylesheets if the node's mq
// doesn't match.
let loader = StylesheetLoader::for_element(self.upcast());
loader.load(StylesheetContextSource::LinkElement {
url: url,
media: Some(media),
}, cors_setting, integrity_metadata.to_owned());
}
fn handle_favicon_url(&self, rel: &str, href: &str, sizes: &Option<String>) {
let document = document_from_node(self);
match document.base_url().join(href) {
Ok(url) => {
let event = ConstellationMsg::NewFavicon(url.clone());
document.window().upcast::<GlobalScope>().constellation_chan().send(event).unwrap();
let mozbrowser_event = match *sizes {
Some(ref sizes) => MozBrowserEvent::IconChange(rel.to_owned(), url.to_string(), sizes.to_owned()),
None => MozBrowserEvent::IconChange(rel.to_owned(), url.to_string(), "".to_owned())
};
document.trigger_mozbrowser_event(mozbrowser_event);
}
Err(e) => debug!("Parsing url {} failed: {}", href, e)
}
}
}
impl StylesheetOwner for HTMLLinkElement {
fn increment_pending_loads_count(&self) {
self.pending_loads.set(self.pending_loads.get() + 1)
}
fn load_finished(&self, succeeded: bool) -> Option<bool> {
assert!(self.pending_loads.get() > 0, "What finished?");
if !succeeded {
self.any_failed_load.set(true);
}
self.pending_loads.set(self.pending_loads.get() - 1);
if self.pending_loads.get() != 0 {
return None;
}
let any_failed = self.any_failed_load.get();
self.any_failed_load.set(false);
Some(any_failed)
}
fn parser_inserted(&self) -> bool {
self.parser_inserted.get()
}
fn referrer_policy(&self) -> Option<ReferrerPolicy> {
if self.RelList().Contains("noreferrer".into()) {
return Some(ReferrerPolicy::NoReferrer)
}
None
}
fn set_origin_clean(&self, origin_clean: bool) {
if let Some(stylesheet) = self.get_cssom_stylesheet() {
stylesheet.set_origin_clean(origin_clean);
}
}
}
impl HTMLLinkElementMethods for HTMLLinkElement {
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_url_getter!(Href, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-href
make_setter!(SetHref, "href");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
make_getter!(Rel, "rel");
// https://html.spec.whatwg.org/multipage/#dom-link-rel
fn SetRel(&self, rel: DOMString) {
self.upcast::<Element>().set_tokenlist_attribute(&local_name!("rel"), rel);
}
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_getter!(Media, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-media
make_setter!(SetMedia, "media");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_getter!(Integrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-integrity
make_setter!(SetIntegrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_getter!(Hreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-hreflang
make_setter!(SetHreflang, "hreflang");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_getter!(Type, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-type
make_setter!(SetType, "type");
// https://html.spec.whatwg.org/multipage/#dom-link-rellist
fn RelList(&self) -> Root<DOMTokenList> {
self.rel_list.or_init(|| DOMTokenList::new(self.upcast(), &local_name!("rel")))
}
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_getter!(Charset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-charset
make_setter!(SetCharset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-link-rev
make_getter!(Rev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-link-rev
make_setter!(SetRev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_getter!(Target, "target");
// https://html.spec.whatwg.org/multipage/#dom-link-target
make_setter!(SetTarget, "target");
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn GetCrossOrigin(&self) -> Option<DOMString> {
reflect_cross_origin_attribute(self.upcast::<Element>())
}
// https://html.spec.whatwg.org/multipage/#dom-link-crossorigin
fn SetCrossOrigin(&self, value: Option<DOMString>) {
set_cross_origin_attribute(self.upcast::<Element>(), value);
}
// https://drafts.csswg.org/cssom/#dom-linkstyle-sheet
fn GetSheet(&self) -> Option<Root<DOMStyleSheet>> {
self.get_cssom_stylesheet().map(Root::upcast)
}
}<|fim▁end|>
|
return;
}
// Step 1.
|
<|file_name|>app_banner_debug_log.cc<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/banners/app_banner_debug_log.h"
#include "content/public/browser/render_frame_host.h"
#include "content/public/browser/web_contents.h"
namespace banners {
const char kRendererRequestCancel[] =
"renderer has requested the banner prompt be cancelled";
const char kManifestEmpty[] =
"manifest could not be fetched, is empty, or could not be parsed";
const char kNoManifest[] = "site has no manifest <link> URL";
const char kCannotDetermineBestIcon[] =
"could not determine the best icon to use";
const char kNoMatchingServiceWorker[] =
"no matching service worker detected. You may need to reload the page, or "
"check that the service worker for the current page also controls the "
"start URL from the manifest";
const char kNoIconAvailable[] = "no icon available to display";
const char kUserNavigatedBeforeBannerShown[] =
"the user navigated before the banner could be shown";
const char kStartURLNotValid[] = "start URL in manifest is not valid";
const char kManifestMissingNameOrShortName[] =
"one of manifest name or short name must be specified";
const char kManifestMissingSuitableIcon[] =
"manifest does not contain a suitable icon - PNG format of at least "
"144x144px is required, and the sizes attribute must be set";
const char kNotLoadedInMainFrame[] = "page not loaded in the main frame";
const char kNotServedFromSecureOrigin[] =
"page not served from a secure origin";
// The leading space is intentional as another string is prepended.
const char kIgnoredNotSupportedOnAndroid[] =<|fim▁hole|>const char kIgnoredIdsDoNotMatch[] =
"play application ignored: app URL and id fields were specified in the "
"manifest, but they do not match";
void OutputDeveloperNotShownMessage(content::WebContents* web_contents,
const std::string& message,
bool is_debug_mode) {
OutputDeveloperDebugMessage(web_contents, "not shown: " + message,
is_debug_mode);
}
void OutputDeveloperDebugMessage(content::WebContents* web_contents,
const std::string& message,
bool is_debug_mode) {
if (!is_debug_mode || !web_contents)
return;
web_contents->GetMainFrame()->AddMessageToConsole(
content::CONSOLE_MESSAGE_LEVEL_DEBUG, "App banner " + message);
}
} // namespace banners<|fim▁end|>
|
" application ignored: not supported on Android";
const char kIgnoredNoId[] = "play application ignored: no id provided";
|
<|file_name|>pagination-nav.spec.js<|end_file_name|><|fim▁begin|>import VueRouter from 'vue-router'
import { mount } from '@vue/test-utils'
import { waitNT, waitRAF } from '../../../tests/utils'
import { Vue } from '../../vue'
import { BPaginationNav } from './pagination-nav'
Vue.use(VueRouter)
// The majority of tests for the core of pagination mixin are performed
// in pagination.spec.js. Here we just test the differences that
// <pagination-nav> has
// We use a (currently) undocumented wrapper method `destroy()` at the end
// of each test to remove the VM and DOM from the JSDOM document, as
// the wrapper's and instances remain after each test completes
describe('pagination-nav', () => {
it('renders with correct basic structure for root elements', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 1,
value: 1
}
})
await waitNT(wrapper.vm)
await waitRAF()
// <pagination-nav> has an outer wrapper of nav
expect(wrapper.element.tagName).toBe('NAV')
const $ul = wrapper.find('ul.pagination')
expect($ul.exists()).toBe(true)
// NAV Attributes
expect(wrapper.attributes('aria-hidden')).toBe('false')
expect(wrapper.attributes('aria-label')).toBe('Pagination')
// UL Classes
expect($ul.classes()).toContain('pagination')
expect($ul.classes()).toContain('b-pagination')
expect($ul.classes()).not.toContain('pagination-sm')
expect($ul.classes()).not.toContain('pagination-lg')
expect($ul.classes()).not.toContain('justify-content-center')
expect($ul.classes()).not.toContain('justify-content-end')
// UL Attributes
expect($ul.attributes('role')).not.toBe('menubar')
expect($ul.attributes('aria-disabled')).toBe('false')
expect($ul.attributes('aria-label')).not.toBe('Pagination')
wrapper.destroy()
})
it('renders with correct default HREF', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('/1')
expect($links.at(1).attributes('href')).toBe('/2')
expect($links.at(2).attributes('href')).toBe('/1')
expect($links.at(3).attributes('href')).toBe('/2')
expect($links.at(4).attributes('href')).toBe('/3')
expect($links.at(5).attributes('href')).toBe('/4')
expect($links.at(6).attributes('href')).toBe('/5')
expect($links.at(7).attributes('href')).toBe('/4')
expect($links.at(8).attributes('href')).toBe('/5')
wrapper.destroy()
})
it('renders with correct default page button text', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
expect($links.at(2).text()).toBe('1')
expect($links.at(3).text()).toBe('2')
expect($links.at(4).text()).toBe('3')
expect($links.at(5).text()).toBe('4')
expect($links.at(6).text()).toBe('5')
wrapper.destroy()
})
it('disabled renders correct', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 1,
value: 1,
disabled: true
}
})
await waitNT(wrapper.vm)
await waitRAF()
// <pagination-nav> has an outer wrapper of nav
expect(wrapper.element.tagName).toBe('NAV')
const $ul = wrapper.find('ul.pagination')
expect($ul.exists()).toBe(true)
// NAV Attributes
expect(wrapper.attributes('aria-hidden')).toBe('true')
expect(wrapper.attributes('aria-disabled')).toBe('true')
// UL Classes
expect($ul.classes()).toContain('pagination')
expect($ul.classes()).toContain('b-pagination')
// UL Attributes
expect($ul.attributes('role')).not.toBe('menubar')
expect($ul.attributes('aria-disabled')).toBe('true')
// LI classes
expect(wrapper.findAll('li').length).toBe(5)
expect(wrapper.findAll('li.page-item').length).toBe(5)
expect(wrapper.findAll('li.disabled').length).toBe(5)
// LI Inner should be span elements
expect(wrapper.findAll('li > span').length).toBe(5)
expect(wrapper.findAll('li > span.page-link').length).toBe(5)
expect(wrapper.findAll('li > span[aria-disabled="true"').length).toBe(5)
wrapper.destroy()
})
it('reacts to changes in number-of-pages', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 3,
value: 2,
limit: 10
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
let $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(7)
await wrapper.setProps({
numberOfPages: 5
})
await waitNT(wrapper.vm)
$links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
wrapper.destroy()
})
it('renders with correct HREF when base-url specified', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10,
baseUrl: '/foo/'
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('/foo/1')
expect($links.at(1).attributes('href')).toBe('/foo/2')
expect($links.at(2).attributes('href')).toBe('/foo/1')
expect($links.at(3).attributes('href')).toBe('/foo/2')
expect($links.at(4).attributes('href')).toBe('/foo/3')
expect($links.at(5).attributes('href')).toBe('/foo/4')
expect($links.at(6).attributes('href')).toBe('/foo/5')
expect($links.at(7).attributes('href')).toBe('/foo/4')
expect($links.at(8).attributes('href')).toBe('/foo/5')
wrapper.destroy()
})
it('renders with correct HREF when link-gen function provided', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10,
linkGen: page => `?${page}`
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('?1')
expect($links.at(1).attributes('href')).toBe('?2')
expect($links.at(2).attributes('href')).toBe('?1')
expect($links.at(3).attributes('href')).toBe('?2')
expect($links.at(4).attributes('href')).toBe('?3')
expect($links.at(5).attributes('href')).toBe('?4')
expect($links.at(6).attributes('href')).toBe('?5')
expect($links.at(7).attributes('href')).toBe('?4')
expect($links.at(8).attributes('href')).toBe('?5')
wrapper.destroy()
})
it('renders with correct HREF when link-gen function returns object', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10,
linkGen: page => ({ path: `/baz?${page}` })
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('/baz?1')
expect($links.at(1).attributes('href')).toBe('/baz?2')
expect($links.at(2).attributes('href')).toBe('/baz?1')
expect($links.at(3).attributes('href')).toBe('/baz?2')
expect($links.at(4).attributes('href')).toBe('/baz?3')
expect($links.at(5).attributes('href')).toBe('/baz?4')
expect($links.at(6).attributes('href')).toBe('/baz?5')
expect($links.at(7).attributes('href')).toBe('/baz?4')
expect($links.at(8).attributes('href')).toBe('/baz?5')
wrapper.destroy()
})
it('renders with correct page button text when page-gen function provided', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 5,
value: 3,
limit: 10,
pageGen: page => `Page ${page}`
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
expect($links.at(2).text()).toBe('Page 1')
expect($links.at(3).text()).toBe('Page 2')
expect($links.at(4).text()).toBe('Page 3')
expect($links.at(5).text()).toBe('Page 4')
expect($links.at(6).text()).toBe('Page 5')
wrapper.destroy()
})
it('renders with correct HREF when array of links set via pages prop', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
value: 3,
limit: 10,
pages: ['/baz?1', '/baz?2', '/baz?3', '/baz?4', '/baz?5']
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('/baz?1')
expect($links.at(1).attributes('href')).toBe('/baz?2')
expect($links.at(2).attributes('href')).toBe('/baz?1')
expect($links.at(3).attributes('href')).toBe('/baz?2')
expect($links.at(4).attributes('href')).toBe('/baz?3')
expect($links.at(5).attributes('href')).toBe('/baz?4')
expect($links.at(6).attributes('href')).toBe('/baz?5')
expect($links.at(7).attributes('href')).toBe('/baz?4')
expect($links.at(8).attributes('href')).toBe('/baz?5')
// Page buttons have correct content
expect($links.at(2).text()).toBe('1')
expect($links.at(3).text()).toBe('2')
expect($links.at(4).text()).toBe('3')
expect($links.at(5).text()).toBe('4')
expect($links.at(6).text()).toBe('5')
wrapper.destroy()
})
it('renders with correct HREF when array of links and text set via pages prop', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
value: 3,
limit: 10,
pages: [
{ link: '/baz?1', text: 'one' },
{ link: '/baz?2', text: 'two' },
{ link: '/baz?3', text: 'three' },
{ link: '/baz?4', text: 'four' },
{ link: '/baz?5', text: 'five' }
]
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
const $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(9)
// Default base URL is '/', and link will be the page number
expect($links.at(0).attributes('href')).toBe('/baz?1')
expect($links.at(1).attributes('href')).toBe('/baz?2')
expect($links.at(2).attributes('href')).toBe('/baz?1')
expect($links.at(3).attributes('href')).toBe('/baz?2')
expect($links.at(4).attributes('href')).toBe('/baz?3')
expect($links.at(5).attributes('href')).toBe('/baz?4')
expect($links.at(6).attributes('href')).toBe('/baz?5')
expect($links.at(7).attributes('href')).toBe('/baz?4')
expect($links.at(8).attributes('href')).toBe('/baz?5')
// Page buttons have correct content
expect($links.at(2).text()).toBe('one')
expect($links.at(3).text()).toBe('two')
expect($links.at(4).text()).toBe('three')
expect($links.at(5).text()).toBe('four')
expect($links.at(6).text()).toBe('five')
wrapper.destroy()
})
it('reacts to changes in pages array length', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
value: 2,
limit: 10,
pages: ['/baz?1', '/baz?2', '/baz?3']
}
})
await waitNT(wrapper.vm)
await waitRAF()
expect(wrapper.element.tagName).toBe('NAV')
let $links = wrapper.findAll('a.page-link')
expect($links.length).toBe(7)
expect($links.at(0).attributes('href')).toBe('/baz?1')
expect($links.at(1).attributes('href')).toBe('/baz?1')
expect($links.at(2).attributes('href')).toBe('/baz?1')
expect($links.at(3).attributes('href')).toBe('/baz?2')
expect($links.at(4).attributes('href')).toBe('/baz?3')
expect($links.at(5).attributes('href')).toBe('/baz?3')
expect($links.at(6).attributes('href')).toBe('/baz?3')
// Add extra page
await wrapper.setProps({
pages: ['/baz?1', '/baz?2', '/baz?3', '/baz?4']
})
await waitNT(wrapper.vm)
$links = wrapper.findAll('a.page-link')
expect($links.length).toBe(8)
expect($links.at(0).attributes('href')).toBe('/baz?1')
expect($links.at(1).attributes('href')).toBe('/baz?1')
expect($links.at(2).attributes('href')).toBe('/baz?1')
expect($links.at(3).attributes('href')).toBe('/baz?2')
expect($links.at(4).attributes('href')).toBe('/baz?3')
expect($links.at(5).attributes('href')).toBe('/baz?4')
expect($links.at(6).attributes('href')).toBe('/baz?3')
expect($links.at(7).attributes('href')).toBe('/baz?4')
wrapper.destroy()
})
it('clicking buttons updates the v-model', async () => {
const App = {
compatConfig: { MODE: 3, RENDER_FUNCTION: 'suppress-warning' },
methods: {
onPageClick(bvEvent, page) {
// Prevent 3rd page from being selected
if (page === 3) {
bvEvent.preventDefault()
}
}
},
render(h) {
return h(BPaginationNav, {
props: {
baseUrl: '#', // Needed to prevent JSDOM errors
numberOfPages: 5,
value: 1,
limit: 10
},
on: { 'page-click': this.onPageClick }
})
}
}
const wrapper = mount(App)
expect(wrapper).toBeDefined()
const paginationNav = wrapper.findComponent(BPaginationNav)
expect(paginationNav).toBeDefined()
expect(paginationNav.element.tagName).toBe('NAV')
// Grab the page links
const lis = paginationNav.findAll('li')
expect(lis.length).toBe(9)
expect(paginationNav.vm.computedCurrentPage).toBe(1)
expect(paginationNav.emitted('input')).toBeUndefined()
expect(paginationNav.emitted('change')).toBeUndefined()
expect(paginationNav.emitted('page-click')).toBeUndefined()
// Click on current (1st) page link (does nothing)
await lis
.at(2)
.find('a')
.trigger('click')
await waitRAF()
expect(paginationNav.vm.computedCurrentPage).toBe(1)
expect(paginationNav.emitted('input')).toBeUndefined()
expect(paginationNav.emitted('change')).toBeUndefined()
expect(paginationNav.emitted('page-click')).toBeUndefined()
// Click on 2nd page link
await lis
.at(3)
.find('a')
.trigger('click')
await waitRAF()
expect(paginationNav.vm.computedCurrentPage).toBe(2)
expect(paginationNav.emitted('input')).toBeDefined()
expect(paginationNav.emitted('change')).toBeDefined()
expect(paginationNav.emitted('page-click')).toBeDefined()
expect(paginationNav.emitted('input')[0][0]).toBe(2)
expect(paginationNav.emitted('change')[0][0]).toBe(2)
expect(paginationNav.emitted('page-click').length).toBe(1)
// Click goto last page link<|fim▁hole|> await waitRAF()
expect(paginationNav.vm.computedCurrentPage).toBe(5)
expect(paginationNav.emitted('input')[1][0]).toBe(5)
expect(paginationNav.emitted('change')[1][0]).toBe(5)
expect(paginationNav.emitted('page-click').length).toBe(2)
// Click prev page link
await lis
.at(1)
.find('a')
.trigger('click')
await waitRAF()
expect(paginationNav.vm.computedCurrentPage).toBe(4)
expect(paginationNav.emitted('input')[2][0]).toBe(4)
expect(paginationNav.emitted('change')[2][0]).toBe(4)
expect(paginationNav.emitted('page-click').length).toBe(3)
// Click on 3rd page link (prevented)
await lis
.at(4)
.find('a')
.trigger('click')
await waitRAF()
expect(paginationNav.vm.computedCurrentPage).toBe(4)
expect(paginationNav.emitted('input').length).toBe(3)
expect(paginationNav.emitted('change').length).toBe(3)
expect(paginationNav.emitted('page-click').length).toBe(4)
wrapper.destroy()
})
describe('auto-detect page', () => {
// Note: JSDOM only works with hash URL updates out of the box
beforeEach(() => {
// Make sure theJSDOM is at '/', as JSDOM instances for each test!
window.history.pushState({}, '', '/')
})
it('detects current page without $router', async () => {
const wrapper = mount(BPaginationNav, {
propsData: {
numberOfPages: 3,
value: null,
linkGen: page => (page === 2 ? '/' : `/#${page}`)
}
})
await waitNT(wrapper.vm)
await waitRAF()
await waitNT(wrapper.vm)
expect(wrapper.vm.$router).toBeUndefined()
expect(wrapper.vm.$route).toBeUndefined()
expect(wrapper.element.tagName).toBe('NAV')
const $ul = wrapper.find('ul.pagination')
expect($ul.exists()).toBe(true)
// Emitted current page (2)
expect(wrapper.emitted('input')).toBeDefined()
expect(wrapper.emitted('input').length).toBe(1)
expect(wrapper.emitted('input')[0][0]).toBe(2) // Page 2, URL = ''
wrapper.destroy()
})
it('works with $router to detect path and linkGen returns location object', async () => {
const App = {
compatConfig: { MODE: 3, COMPONENT_FUNCTIONAL: 'suppress-warning' },
components: { BPaginationNav },
methods: {
linkGen(page) {
// We make page #2 "home" for testing
// We return a to prop to auto trigger use of $router
// if using strings, we would need to set use-router=true
return page === 2 ? { path: '/' } : { path: '/' + page }
}
},
template: `
<div>
<b-pagination-nav :number-of-pages="3" :link-gen="linkGen"></b-pagination-nav>
<router-view></router-view>
</div>
`
}
// Our router view component
const FooRoute = {
compatConfig: { MODE: 3, RENDER_FUNCTION: 'suppress-warning' },
render(h) {
return h('div', { class: 'foo-content' }, ['stub'])
}
}
// Create router instance
const router = new VueRouter({
routes: [{ path: '/', component: FooRoute }, { path: '/:page', component: FooRoute }]
})
const wrapper = mount(App, { router })
expect(wrapper).toBeDefined()
// Wait for the router to initialize
await new Promise(resolve => router.onReady(resolve))
// Wait for the guessCurrentPage to complete
await waitNT(wrapper.vm)
await waitRAF()
await waitNT(wrapper.vm)
// The pagination-nav component should exist
expect(wrapper.findComponent(BPaginationNav).exists()).toBe(true)
// And should be on page 2
expect(wrapper.findComponent(BPaginationNav).vm.currentPage).toBe(2)
// Push router to a new page
wrapper.vm.$router.push('/3')
// Wait for the guessCurrentPage to complete
await waitNT(wrapper.vm)
await waitRAF()
await waitNT(wrapper.vm)
// The pagination-nav component should exist
expect(wrapper.findComponent(BPaginationNav).exists()).toBe(true)
// And should be on page 3
expect(wrapper.findComponent(BPaginationNav).vm.currentPage).toBe(3)
wrapper.destroy()
})
it('works with $router to detect path and use-router set and linkGen returns string', async () => {
const App = {
compatConfig: { MODE: 3, COMPONENT_FUNCTIONAL: 'suppress-warning' },
components: { BPaginationNav },
methods: {
linkGen(page) {
// We make page #2 "home" for testing
// We return a to prop to auto trigger use of $router
// if using strings, we would need to set use-router=true
return page === 2 ? '/' : `/${page}`
}
},
template: `
<div>
<b-pagination-nav :number-of-pages="3" :link-gen="linkGen" use-router></b-pagination-nav>
<router-view></router-view>
</div>
`
}
// Our router view component
const FooRoute = {
compatConfig: { MODE: 3, RENDER_FUNCTION: 'suppress-warning' },
render(h) {
return h('div', { class: 'foo-content' }, ['stub'])
}
}
// Create router instance
const router = new VueRouter({
routes: [{ path: '/', component: FooRoute }, { path: '/:page', component: FooRoute }]
})
const wrapper = mount(App, { router })
expect(wrapper).toBeDefined()
// Wait for the router to initialize
await new Promise(resolve => router.onReady(resolve))
// Wait for the guessCurrentPage to complete
await waitNT(wrapper.vm)
await waitRAF()
await waitNT(wrapper.vm)
// The <pagination-nav> component should exist
expect(wrapper.findComponent(BPaginationNav).exists()).toBe(true)
// And should be on page 2
expect(wrapper.findComponent(BPaginationNav).vm.currentPage).toBe(2)
// Push router to a new page
wrapper.vm.$router.push('/3')
// Wait for the guessCurrentPage to complete
await waitNT(wrapper.vm)
await waitRAF()
await waitNT(wrapper.vm)
// The pagination-nav component should exist
expect(wrapper.findComponent(BPaginationNav).exists()).toBe(true)
// And should be on page 3
expect(wrapper.findComponent(BPaginationNav).vm.currentPage).toBe(3)
wrapper.destroy()
})
})
})<|fim▁end|>
|
await lis
.at(8)
.find('a')
.trigger('click')
|
<|file_name|>caption.js<|end_file_name|><|fim▁begin|>import React from "react";
import { Text, View } from "react-native";
import { defaultProps, propTypes } from "./caption-prop-types";
import styles from "./styles";
const renderCredits = (style, credits) => {
if (!credits || credits === "") {
return null;
}
return (
<Text style={[styles.text, styles.credits, style.text, style.credits]}>
{credits.toUpperCase()}
</Text>
);
};
const renderText = (style, text) => {
if (!text || text === "") {
return null;
}
return <Text style={[styles.text, style.text, style.caption]}>{text}</Text>;
};
const Caption = ({ children, credits, style, text }) => (
<View>
{children}
<View style={[styles.container, style.container]}>
{renderText(style, text)}
{renderCredits(style, credits)}
</View>
</View>
);
Caption.propTypes = propTypes;
Caption.defaultProps = defaultProps;<|fim▁hole|>export default Caption;
export { default as CentredCaption } from "./centred-caption";<|fim▁end|>
| |
<|file_name|>embedding_ops_test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for ops used with embeddings."""
import itertools
import math
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging
from tensorflow.python.util import compat
def _AsLong(array):
"""Casts arrays elements to long type. Used to convert from numpy tf."""
return [int(x) for x in array]
class ScatterAddSubTest(test.TestCase):
def _TestCase(self, shape, indices, scatter_op=state_ops.scatter_add):
"""Run a random test case with the given shape and indices.
Args:
shape: Shape of the parameters array.
indices: One-dimensional array of ints, the indices of the last dimension
of the parameters to update.
scatter_op: ScatterAdd or ScatterSub.
"""
super(ScatterAddSubTest, self).setUp()
with self.cached_session(use_gpu=False):
# Create a random parameter array of given shape
p_init = np.random.rand(*shape).astype("f")
# Create the shape of the update array. All dimensions except the last
# match the parameter array, the last dimension equals the # of indices.
vals_shape = [len(indices)] + shape[1:]
vals_init = np.random.rand(*vals_shape).astype("f")
v_i = [float(x) for x in vals_init.ravel()]
p = variables.Variable(p_init)
vals = constant_op.constant(v_i, shape=vals_shape, name="vals")
ind = constant_op.constant(indices, dtype=dtypes.int32)
p2 = scatter_op(p, ind, vals, name="updated_p")
# p = init
self.evaluate(variables.global_variables_initializer())
# p += vals
result = self.evaluate(p2)
# Compute the expected 'p' using numpy operations.
for i, ind in enumerate(indices):
if scatter_op == state_ops.scatter_add:
p_init.reshape(shape[0], -1)[ind, :] += (vals_init.reshape(
vals_shape[0], -1)[i, :])
else:
p_init.reshape(shape[0], -1)[ind, :] -= (vals_init.reshape(
vals_shape[0], -1)[i, :])
self.assertTrue(all((p_init == result).ravel()))
@test_util.run_deprecated_v1
def testNoRepetitions(self):
self._TestCase([2, 2], [1])
self._TestCase([4, 4, 4], [2, 0])
self._TestCase([43, 20, 10, 10], [42, 5, 6, 1, 3, 5, 7, 9])
@test_util.run_deprecated_v1
def testWithRepetitions(self):
self._TestCase([2, 2], [1, 1])
self._TestCase([5, 3, 9, 5], [2, 0, 4, 1, 3, 1, 4, 0, 4, 3])
self._TestCase([32, 4, 4], [31] * 8)
@test_util.run_deprecated_v1
def testRandom(self):
# Random shapes of rank 4, random indices
for _ in range(5):
shape = np.random.randint(1, 20, size=4)
indices = np.random.randint(shape[0], size=2 * shape[0])
self._TestCase(_AsLong(list(shape)), list(indices))
@test_util.run_deprecated_v1
def testSubRandom(self):
# Random shapes of rank 4, random indices
for _ in range(5):
shape = np.random.randint(1, 20, size=4)
indices = np.random.randint(shape[0], size=2 * shape[0])
self._TestCase(_AsLong(list(shape)), list(indices), state_ops.scatter_sub)
@test_util.run_deprecated_v1
def testWrongShape(self):
# Indices and values mismatch.
var = variables.Variable(
array_ops.zeros(shape=[1024, 64, 64], dtype=dtypes.float32))
indices = array_ops.placeholder(dtypes.int32, shape=[32])
values = array_ops.placeholder(dtypes.float32, shape=[33, 64, 64])
with self.assertRaises(ValueError):
state_ops.scatter_add(var, indices, values)
# Var and values mismatch.
values = array_ops.placeholder(dtypes.float32, shape=[32, 64, 63])
with self.assertRaises(ValueError):
state_ops.scatter_add(var, indices, values)
def _PName(param_id):
return "p" + str(param_id)
def _EmbeddingParams(num_shards,
vocab_size,
dtype=dtypes.float32,
shape=None,
use_shapeless_placeholder=False):
p = []
params = {}
feed_dict = {}
if not shape:
shape = [10]
for i in range(num_shards):
shard_shape = [vocab_size // num_shards] + shape
if i < vocab_size % num_shards: # Excess goes evenly on the first shards
shard_shape[0] += 1
param_name = _PName(i)
if use_shapeless_placeholder:
param = array_ops.placeholder(dtype, shape=None, name=param_name)
else:
param = constant_op.constant(
1.0, shape=shard_shape, dtype=dtype, name=param_name)
p.append(param)
np_type = "f" if dtype == dtypes.float32 else "d"
val = (np.random.rand(*shard_shape).astype(np_type)) + 1
params[param_name + ":0"] = val
feed_dict[param.name] = val
return p, params, feed_dict
def _EmbeddingParamsAsPartitionedVariable(num_shards,
vocab_size,
dtype=dtypes.float32,
shape=None,
use_resource=False):
p, params, feed_dict = _EmbeddingParams(
num_shards, vocab_size, dtype=dtype, shape=shape)
shape = shape or [10]
partitioned_variable = variable_scope.get_variable(
"p",
shape=[vocab_size] + shape,
initializer=array_ops.concat([params[p_i.name] for p_i in p], 0),
partitioner=partitioned_variables.min_max_variable_partitioner(
max_partitions=num_shards, min_slice_size=1),
use_resource=use_resource)
return p, partitioned_variable, params, feed_dict
def _EmbeddingResult(params,
id_vals,
num_shards,
vocab_size,
partition_strategy="mod",
weight_vals=None):
if weight_vals is None:
weight_vals = np.copy(id_vals)
weight_vals.fill(1)
values = []
weights = []
weights_squared = []
for ids, wts in zip(id_vals, weight_vals):
value_aggregation = None
weight_aggregation = None
squared_weight_aggregation = None
if isinstance(ids, compat.integral_types):
ids = [ids]
wts = [wts]
for i, weight_value in zip(ids, wts):
if partition_strategy == "mod":
val = np.copy(params[_PName(i % num_shards) + ":0"][
i // num_shards, :]) * weight_value
elif partition_strategy == "div":<|fim▁hole|> offset = i % (ids_per_partition + 1)
else:
partition = extras + (i - threshold) // ids_per_partition
offset = (i - threshold) % ids_per_partition
val = np.copy(
params[_PName(partition) + ":0"][offset, :]) * weight_value
else:
assert False
if value_aggregation is None:
assert weight_aggregation is None
assert squared_weight_aggregation is None
value_aggregation = val
weight_aggregation = weight_value
squared_weight_aggregation = weight_value * weight_value
else:
assert weight_aggregation is not None
assert squared_weight_aggregation is not None
value_aggregation += val
weight_aggregation += weight_value
squared_weight_aggregation += weight_value * weight_value
values.append(value_aggregation)
weights.append(weight_aggregation)
weights_squared.append(squared_weight_aggregation)
values = np.array(values).astype(np.float32)
weights = np.array(weights).astype(np.float32)
weights_squared = np.array(weights_squared).astype(np.float32)
return values, weights, weights_squared
class EmbeddingLookupTest(test.TestCase):
# This test looks up [0, 0] in a parameter matrix sharded 2 ways. Since
# both the ids are in the first shard, one of the resulting lookup
# vector is going to be empty. The subsequent DivOp fails because of that.
# TODO(keveman): Disabling the test until the underlying problem is fixed.
@test_util.run_deprecated_v1
def testSimpleSharded(self):
with self.cached_session():
num_shards = 2
vocab_size = 4
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
id_vals = np.array([0, 0])
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
print("Construct ids", ids.get_shape())
embedding = embedding_ops.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testMaxNorm(self):
with self.cached_session():
embeddings = constant_op.constant([[2.0]])
ids = constant_op.constant([0], dtype=dtypes.int32)
embedding = embedding_ops.embedding_lookup(
[embeddings], ids, max_norm=1.0)
self.assertAllEqual(embedding, [[1.0]])
@test_util.run_deprecated_v1
def testMaxNormNontrivial(self):
with self.cached_session():
embeddings = constant_op.constant([[2.0, 4.0], [3.0, 1.0]])
ids = constant_op.constant([0, 1], dtype=dtypes.int32)
embedding = embedding_ops.embedding_lookup(
[embeddings], ids, max_norm=2.0)
norms = math_ops.sqrt(
math_ops.reduce_sum(embeddings * embeddings, axis=1))
normalized = embeddings / array_ops.stack([norms, norms], axis=1)
self.assertAllClose(embedding, 2 * self.evaluate(normalized))
@test_util.run_deprecated_v1
def testSimpleShardedPartitionedVariable(self):
with self.cached_session() as sess:
num_shards = 2
vocab_size = 4
p, p_variable, params, feed_dict = _EmbeddingParamsAsPartitionedVariable(
num_shards, vocab_size)
id_vals = np.array([0, 0])
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
print("Construct ids", ids.get_shape())
embedding = embedding_ops.embedding_lookup(p_variable, ids)
self.evaluate(variables.global_variables_initializer())
params_values = [params[p_i.name] for p_i in p]
# Test that the PartitionedVariable components equal the list in p
p_var_val = self.evaluate(list(p_variable))
# Actual test
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(params_values, p_var_val)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testSimpleShardedPartitionedResourceVariable(self):
with self.cached_session() as sess:
num_shards = 2
vocab_size = 4
p, p_variable, params, _ = _EmbeddingParamsAsPartitionedVariable(
num_shards, vocab_size, use_resource=True)
id_vals = np.array([0, 0])
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
print("Construct ids", ids.get_shape())
embedding = embedding_ops.embedding_lookup(p_variable, ids)
self.evaluate(variables.global_variables_initializer())
params_values = [params[p_i.name] for p_i in p]
# Test that the PartitionedVariable components equal the list in p
p_var_val = self.evaluate(list(p_variable))
# Actual test
print(ops.get_default_graph().as_graph_def())
tf_result = self.evaluate(embedding)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(params_values, p_var_val)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedModPartitioningInt32Ids(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
embedding = embedding_ops.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedModPartitioningInt64Ids(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int64)
embedding = embedding_ops.embedding_lookup(p, ids)
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(params, id_vals, num_shards, vocab_size)
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedDivPartitioningInt32Ids(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
embedding = embedding_ops.embedding_lookup(
p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedDivPartitioningInt32IdsPartitionedVariable(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
_, p_variable, params, feed_dict = _EmbeddingParamsAsPartitionedVariable(
num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int32)
self.evaluate(variables.global_variables_initializer())
embedding = embedding_ops.embedding_lookup(
p_variable, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedDivPartitioningInt64Ids(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
p, params, feed_dict = _EmbeddingParams(num_shards, vocab_size)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int64)
embedding = embedding_ops.embedding_lookup(
p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
self.assertShapeEqual(np_result, embedding)
@test_util.run_deprecated_v1
def testShardedDivPartitioningUnknownParamShape(self):
with self.cached_session():
num_shards = 5
vocab_size = 13
# Embedding dimensions is 10. The vocab_size x 10 embedding
# parameters are spread in num_shards matrices, so the first
# 3 shards are 3 x 10 and the last 2 shards are 2 x 10.
# We clear parameter shapes, to test when shape is not statically known.
p, params, feed_dict = _EmbeddingParams(
num_shards, vocab_size, use_shapeless_placeholder=True)
num_vals = 30
# Fetch num_vals embeddings for random word ids. Since
# num_vals > vocab_size, this ought to have repetitions, so
# will test that aspect.
id_vals = np.random.randint(vocab_size, size=num_vals)
ids = constant_op.constant(list(id_vals), dtype=dtypes.int64)
embedding = embedding_ops.embedding_lookup(
p, ids, partition_strategy="div")
tf_result = embedding.eval(feed_dict=feed_dict)
np_result, _, _ = _EmbeddingResult(
params, id_vals, num_shards, vocab_size, partition_strategy="div")
self.assertAllEqual(np_result, tf_result)
@test_util.run_deprecated_v1
def testGradientsEmbeddingLookup(self):
vocab_size = 9
num_ids = 10
id_vals = list(np.random.randint(vocab_size, size=num_ids))
tf_logging.vlog(1, id_vals)
for ids_shape in [(10,), (2, 5)]:
for num_shards in [1, 3]:
with self.cached_session():
ids = constant_op.constant(
id_vals, shape=ids_shape, dtype=dtypes.int32)
x, params, _ = _EmbeddingParams(num_shards, vocab_size, shape=[2])
y = embedding_ops.embedding_lookup(x, ids)
y_shape = ids_shape + tuple(params[_PName(0) + ":0"].shape[1:])
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
err = gradient_checker.compute_gradient_error(
x, x_shape, y, y_shape, x_init_value=x_init_value)
self.assertLess(err, 1e-4)
@test_util.run_deprecated_v1
def testGradientsEmbeddingLookupWithComputedParams(self):
vocab_size = 9
num_ids = 5
id_vals = list(np.random.randint(vocab_size, size=num_ids))
tf_logging.vlog(1, id_vals)
for num_shards in [1, 3]:
with self.cached_session():
ids = constant_op.constant(id_vals, dtype=dtypes.int32)
x, params, _ = _EmbeddingParams(num_shards, vocab_size, shape=[2])
# This will force a conversion from IndexedSlices to Tensor.
x_squared = [math_ops.square(elem) for elem in x]
y = embedding_ops.embedding_lookup(x_squared, ids)
y_shape = [num_ids] + list(params[_PName(0) + ":0"].shape[1:])
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
err = gradient_checker.compute_gradient_error(
x, x_shape, y, y_shape, x_init_value=x_init_value)
self.assertLess(err, 1e-3)
def testConstructionNonSharded(self):
with ops.Graph().as_default():
p = variables.Variable(
array_ops.zeros(shape=[100, 100], dtype=dtypes.float32))
ids = constant_op.constant([0, 1, 1, 7], dtype=dtypes.int32)
embedding_ops.embedding_lookup([p], ids)
def testConstructionSharded(self):
with ops.Graph().as_default():
p = []
for _ in range(2):
p += [
variables.Variable(
array_ops.zeros(shape=[100, 100], dtype=dtypes.float32))
]
ids = constant_op.constant([0, 1, 1, 17], dtype=dtypes.int32)
embedding_ops.embedding_lookup(p, ids)
@test_util.run_deprecated_v1
def testHigherRank(self):
np.random.seed(8)
with self.cached_session():
for params_shape in (12,), (6, 3):
params = np.random.randn(*params_shape)
for ids_shape in (3, 2), (4, 3):
ids = np.random.randint(
params.shape[0], size=np.prod(ids_shape)).reshape(ids_shape)
# Compare nonsharded to gather
simple = embedding_ops.embedding_lookup(params, ids)
self.assertAllEqual(simple, array_ops.gather(params, ids))
# Run a few random sharded versions
for procs in 1, 2, 3:
stride = procs * math_ops.range(params.shape[0] // procs)
split_params = [
array_ops.gather(params, stride + p) for p in range(procs)
]
sharded = embedding_ops.embedding_lookup(split_params, ids)
self.assertAllEqual(simple, sharded)
@test_util.run_deprecated_v1
def testHigherRankMaxNorm(self):
np.random.seed(8)
with self.cached_session():
for params_shape in (12,), (6, 3), (6, 2, 3):
# Test embedding rank 0, 1, 2.
# Note: the first dimension must be a common multiple of procs below.
params = 2 * np.ones(params_shape)
params_norm = params / np.sqrt(
np.sum(
params * params, tuple(range(params.ndim)[1:]), keepdims=True))
for ids_shape in (), (3), (4, 3), (2, 3, 4):
ids = np.random.randint(
params.shape[0], size=np.prod(ids_shape,
dtype=np.int64)).reshape(ids_shape)
# Compare nonsharded to gather
simple = embedding_ops.embedding_lookup(params, ids, max_norm=1.0)
# assertAllClose is used here as different implementations of sqrt may
# be used to compute each of the values being compared. For example,
# on AVX512 builds the embedding operation makes use of Eigen's fast
# vectorized square root algorithm for doubles. These different
# implementations of sqrt are not guaranteed to produce exactly the
# same results. Therefore, an exact comparison cannot be made.
self.assertAllClose(simple, array_ops.gather(params_norm, ids))
# Run a few different sharded versions.
for procs in 1, 2, 3:
stride = procs * math_ops.range(params.shape[0] // procs)
split_params = [
array_ops.gather(params, stride + p) for p in range(procs)
]
sharded = embedding_ops.embedding_lookup(
split_params, ids, max_norm=1.0)
self.assertAllEqual(simple, sharded)
@test_util.run_deprecated_v1
def testTransform(self):
# This tests all combinations of:
# - ids rank 0, 1, >1
# - params sharded/unsharded
# It always applies max_norm.
np.random.seed(8)
l2_norm = 2.
with self.cached_session():
# Param values are in [l2_norm, l2_norm+1) so it will always clip.
params = np.random.rand(6, 3) + l2_norm
params_norm = l2_norm * params / np.sqrt(
np.sum(params * params, axis=1, keepdims=True))
# Compute the norm of each embedding. This will change the embedding
# rank to 0.
params_norm = np.linalg.norm(params_norm, axis=1)
transform = lambda x: linalg_ops.norm(x, axis=1)
for ids_shape in (), (3), (4, 3), (2, 3, 4):
# Test ids rank 0, 1, 2, 3.
ids = np.random.randint(
params.shape[0], size=np.prod(ids_shape,
dtype=np.int64)).reshape(ids_shape)
# Compare nonsharded to gather.
simple = embedding_ops._embedding_lookup_and_transform(
params, ids, max_norm=l2_norm, transform_fn=transform)
self.assertAllClose(simple, array_ops.gather(params_norm, ids))
# Run a few different sharded versions.
for procs in 1, 2, 3:
stride = procs * math_ops.range(params.shape[0] // procs)
split_params = [
array_ops.gather(params, stride + p) for p in range(procs)
]
sharded = embedding_ops._embedding_lookup_and_transform(
split_params, ids, max_norm=l2_norm, transform_fn=transform)
# assertAllClose is used here as different implementations of sqrt may
# be used to compute each of the values being compared. For example,
# on AVX512 builds the embedding operation makes use of Eigen's fast
# vectorized square root algorithm for doubles. These different
# implementations of sqrt are not guaranteed to produce exactly the
# same results. Therefore, an exact comparison cannot be made.
self.assertAllClose(simple, sharded)
def testRaggedMaxNorm(self):
embeddings = constant_op.constant([[2.0]])
ids = ragged_factory_ops.constant([[0, 0], [0]], dtype=dtypes.int32)
embedding = embedding_ops.embedding_lookup([embeddings], ids, max_norm=1.0)
self.assertAllEqual(embedding, [[[1.0], [1.0]], [[1.0]]])
class EmbeddingLookupSparseTest(test.TestCase):
def _RandomIdsAndWeights(self, batch_size, vocab_size):
max_val_per_entry = 6
vals_per_batch_entry = np.random.randint(
1, max_val_per_entry, size=batch_size)
num_vals = np.sum(vals_per_batch_entry)
ids = np.random.randint(vocab_size, size=num_vals)
weights = 1 + np.random.rand(num_vals)
indices = []
for batch_entry, num_val in enumerate(vals_per_batch_entry):
for val_index in range(num_val):
indices.append([batch_entry, val_index])
shape = [batch_size, max_val_per_entry]
sp_ids = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(ids, dtypes.int32),
constant_op.constant(shape, dtypes.int64))
sp_weights = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(weights, dtypes.float32),
constant_op.constant(shape, dtypes.int64))
return sp_ids, sp_weights, ids, weights, vals_per_batch_entry
def _GroupByBatchEntry(self, vals, vals_per_batch_entry):
grouped_vals = []
index = 0
for num_val in vals_per_batch_entry:
grouped_vals.append(list(vals[index:(index + num_val)]))
index += num_val
return grouped_vals
@test_util.run_deprecated_v1
def testEmbeddingLookupSparse(self):
vocab_size = 13
batch_size = 10
param_shape = [2, 5]
expected_lookup_result_shape = [None] + param_shape
sp_ids, sp_weights, ids, weights, vals_per_batch_entry = (
self._RandomIdsAndWeights(batch_size, vocab_size))
grouped_ids = self._GroupByBatchEntry(ids, vals_per_batch_entry)
grouped_weights = self._GroupByBatchEntry(weights, vals_per_batch_entry)
grouped_ignored_weights = self._GroupByBatchEntry(
np.ones(np.sum(vals_per_batch_entry)), vals_per_batch_entry)
for num_shards, combiner, dtype, ignore_weights in itertools.product(
[1, 5], ["sum", "mean", "sqrtn"],
[dtypes.float16, dtypes.bfloat16, dtypes.float32, dtypes.float64],
[True, False]):
with self.cached_session():
p, params, feed_dict = _EmbeddingParams(
num_shards, vocab_size, shape=param_shape, dtype=dtype)
embedding_sum = embedding_ops.embedding_lookup_sparse(
p,
sp_ids,
None if ignore_weights else sp_weights,
combiner=combiner)
self.assertEqual(embedding_sum.get_shape().as_list(),
expected_lookup_result_shape)
self.assertEqual(embedding_sum.dtype, dtype)
tf_embedding_sum = embedding_sum.eval(feed_dict=feed_dict)
np_embedding_sum, np_weight_sum, np_weight_sq_sum = _EmbeddingResult(
params,
grouped_ids,
num_shards,
vocab_size,
weight_vals=grouped_ignored_weights
if ignore_weights else grouped_weights)
if combiner == "mean":
np_embedding_sum /= np.reshape(np_weight_sum, (batch_size, 1, 1))
if combiner == "sqrtn":
np_embedding_sum /= np.reshape(
np.sqrt(np_weight_sq_sum), (batch_size, 1, 1))
rtol = 1e-6
if dtype == dtypes.bfloat16:
rtol = 1e-2
elif dtype == dtypes.float16:
rtol = 1e-3
atol = rtol
self.assertAllClose(np_embedding_sum, tf_embedding_sum, rtol, atol)
def testMissingInSparseIds(self):
# Github issue, 36359
with self.test_session():
x = array_ops.ones((4, 5))
sp_ids = sparse_tensor.SparseTensor(
constant_op.constant([[1, 0], [3, 0]], dtypes.int64),
constant_op.constant([0, 2], dtypes.int32),
constant_op.constant([4, 1], dtypes.int64))
sp_weights = sparse_tensor.SparseTensor(
constant_op.constant([[1, 0], [3, 0]], dtypes.int64),
constant_op.constant([1, 1], dtypes.float32),
constant_op.constant([4, 1], dtypes.int64))
for combiner in ["sum", "mean", "sqrtn"]:
embedding_sum = embedding_ops.embedding_lookup_sparse(
x, sp_ids, sp_weights, combiner=combiner)
tf_embedding_sum = ops.convert_to_tensor(embedding_sum)
self.assertAllClose(tf_embedding_sum[0], np.zeros(5))
self.assertAllClose(tf_embedding_sum[1], np.ones(5))
self.assertAllClose(tf_embedding_sum[2], np.zeros(5))
self.assertAllClose(tf_embedding_sum[3], np.ones(5))
@test_util.run_deprecated_v1
def testGradientsEmbeddingLookupSparse(self):
vocab_size = 12
batch_size = 4
param_shape = [2, 3]
sp_ids, sp_weights, _, _, _ = (self._RandomIdsAndWeights(
batch_size, vocab_size))
for num_shards, combiner, dtype, ignore_weights in itertools.product(
[1, 3], ["sum", "mean", "sqrtn"], [dtypes.float32,
dtypes.float64], [True, False]):
with self.cached_session():
x, params, _ = _EmbeddingParams(
num_shards, vocab_size, shape=param_shape, dtype=dtype)
y = embedding_ops.embedding_lookup_sparse(
x,
sp_ids,
None if ignore_weights else sp_weights,
combiner=combiner)
x_name = [_PName(i) for i in range(num_shards)]
x_init_value = [params[x_n + ":0"] for x_n in x_name]
x_shape = [i.shape for i in x_init_value]
y_shape = [batch_size] + list(params[_PName(0) + ":0"].shape[1:])
err = gradient_checker.compute_gradient_error(
x, x_shape, y, y_shape, x_init_value=x_init_value)
self.assertLess(err, 1e-5 if dtype == dtypes.float64 else 2e-3)
@test_util.run_deprecated_v1
def testIncompatibleShapes(self):
with self.cached_session():
x, _, _ = _EmbeddingParams(1, 10, dtype=dtypes.float32)
sp_ids = sparse_tensor.SparseTensor(
constant_op.constant([[0, 0], [0, 1], [1, 0]], dtypes.int64),
constant_op.constant([0, 1, 2], dtypes.int32),
constant_op.constant([2, 2], dtypes.int64))
sp_weights = sparse_tensor.SparseTensor(
constant_op.constant([[0, 0], [0, 1]], dtypes.int64),
constant_op.constant([12.0, 5.0], dtypes.float32),
constant_op.constant([1, 2], dtypes.int64))
with self.assertRaises(ValueError):
embedding_ops.embedding_lookup_sparse(
x, sp_ids, sp_weights, combiner="mean")
class SafeEmbeddingLookupSparseTest(test.TestCase):
def _random_weights(self, vocab_size=4, embed_dim=4, num_shards=1):
assert vocab_size > 0
assert embed_dim > 0
assert num_shards > 0
assert num_shards <= vocab_size
initializer = init_ops.truncated_normal_initializer(
mean=0.0, stddev=1.0 / math.sqrt(vocab_size), dtype=dtypes.float32)
embedding_weights = list(variable_scope.get_variable(
name="embedding_weights",
shape=[vocab_size, embed_dim],
partitioner=partitioned_variables.fixed_size_partitioner(num_shards),
initializer=initializer))
for w in embedding_weights:
self.evaluate(w.initializer)
embedding_weights = [self.evaluate(w) for w in embedding_weights]
return embedding_weights
def _ids_and_weights_2d(self):
# Each row demonstrates a test case:
# Row 0: multiple valid ids, 1 invalid id, weighted mean
# Row 1: all ids are invalid (leaving no valid ids after pruning)
# Row 2: no ids to begin with
# Row 3: single id
# Row 4: all ids have <=0 weight
indices = [[0, 0], [0, 1], [0, 2], [1, 0], [3, 0], [4, 0], [4, 1]]
ids = [0, 1, -1, -1, 2, 0, 1]
weights = [1.0, 2.0, 1.0, 1.0, 3.0, 0.0, -0.5]
shape = [5, 4]
sparse_ids = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(ids, dtypes.int64),
constant_op.constant(shape, dtypes.int64))
sparse_weights = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(weights, dtypes.float32),
constant_op.constant(shape, dtypes.int64))
return sparse_ids, sparse_weights
def _ids_and_weights_3d(self):
# Each (2-D) index demonstrates a test case:
# Index 0, 0: multiple valid ids, 1 invalid id, weighted mean
# Index 0, 1: all ids are invalid (leaving no valid ids after pruning)
# Index 0, 2: no ids to begin with
# Index 1, 0: single id
# Index 1, 1: all ids have <=0 weight
# Index 1, 2: no ids to begin with
indices = [[0, 0, 0], [0, 0, 1], [0, 0, 2], [0, 1, 0], [1, 0, 0], [1, 1, 0],
[1, 1, 1]]
ids = [0, 1, -1, -1, 2, 0, 1]
weights = [1.0, 2.0, 1.0, 1.0, 3.0, 0.0, -0.5]
shape = [2, 3, 4]
sparse_ids = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(ids, dtypes.int64),
constant_op.constant(shape, dtypes.int64))
sparse_weights = sparse_tensor.SparseTensor(
constant_op.constant(indices, dtypes.int64),
constant_op.constant(weights, dtypes.float32),
constant_op.constant(shape, dtypes.int64))
return sparse_ids, sparse_weights
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_return_zero_vector(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, sparse_weights = self._ids_and_weights_2d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids,
sparse_weights))
self.assertAllClose(
embedding_lookup_result,
[(1.0 * embedding_weights[0][0] + 2.0 * embedding_weights[0][1]) /
3.0, [0] * 4, [0] * 4, embedding_weights[0][2], [0] * 4])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_return_special_vector(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, sparse_weights = self._ids_and_weights_2d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(
embedding_weights, sparse_ids, sparse_weights, default_id=3))
self.assertAllClose(
embedding_lookup_result,
[(1.0 * embedding_weights[0][0] + 2.0 * embedding_weights[0][1]) /
3.0, embedding_weights[0][3], embedding_weights[0][3],
embedding_weights[0][2], embedding_weights[0][3]])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_no_weights(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, _ = self._ids_and_weights_2d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids, None))
self.assertAllClose(
embedding_lookup_result,
[(embedding_weights[0][0] + embedding_weights[0][1]) / 2.0, [0] * 4,
[0] * 4, embedding_weights[0][2], (
embedding_weights[0][0] + embedding_weights[0][1]) / 2.0])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_partitioned(self):
with self.cached_session():
embedding_weights = self._random_weights(num_shards=3)
sparse_ids, _ = self._ids_and_weights_2d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids, None))
embedding_weights = list(itertools.chain(*embedding_weights))
self.assertAllClose(embedding_lookup_result,
[(embedding_weights[0] + embedding_weights[1]) / 2.0,
[0] * 4, [0] * 4, embedding_weights[2],
(embedding_weights[0] + embedding_weights[1]) / 2.0])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_partitioned_inconsistent_weights(self):
with self.cached_session():
embedding_weights = self._random_weights(num_shards=3)
sparse_ids, sparse_weights = self._ids_and_weights_2d()
embedding_weights[1] = embedding_weights[1].astype(np.float64)
self.assertRaises(TypeError, embedding_ops.safe_embedding_lookup_sparse,
embedding_weights, sparse_ids)
embedding_weights = [
constant_op.constant(w, dtype=dtypes.float64)
for w in embedding_weights
]
self.assertRaises(ValueError, embedding_ops.safe_embedding_lookup_sparse,
embedding_weights, sparse_ids, sparse_weights)
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_3d_return_zero_vector(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, sparse_weights = self._ids_and_weights_3d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids,
sparse_weights))
self.assertAllClose(embedding_lookup_result, [[
(1.0 * embedding_weights[0][0] + 2.0 * embedding_weights[0][1]) / 3.0,
[0] * 4, [0] * 4
], [embedding_weights[0][2], [0] * 4, [0] * 4]])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_3d_return_special_vector(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, sparse_weights = self._ids_and_weights_3d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(
embedding_weights, sparse_ids, sparse_weights, default_id=3))
self.assertAllClose(
embedding_lookup_result,
[[(1.0 * embedding_weights[0][0] + 2.0 * embedding_weights[0][1]) /
3.0, embedding_weights[0][3], embedding_weights[0][3]], [
embedding_weights[0][2], embedding_weights[0][3],
embedding_weights[0][3]
]])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_3d_no_weights(self):
with self.cached_session():
embedding_weights = self._random_weights()
sparse_ids, _ = self._ids_and_weights_3d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids, None))
self.assertAllClose(embedding_lookup_result, [[(
embedding_weights[0][0] + embedding_weights[0][1]) / 2.0, [0] * 4, [
0
] * 4], [
embedding_weights[0][2],
(embedding_weights[0][0] + embedding_weights[0][1]) / 2.0, [0] * 4
]])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_3d_partitioned(self):
with self.cached_session():
embedding_weights = self._random_weights(num_shards=3)
sparse_ids, _ = self._ids_and_weights_3d()
embedding_lookup_result = (
embedding_ops.safe_embedding_lookup_sparse_v2(embedding_weights,
sparse_ids, None))
embedding_weights = list(itertools.chain(*embedding_weights))
self.assertAllClose(embedding_lookup_result, [[
(embedding_weights[0] + embedding_weights[1]) / 2.0, [0] * 4, [0] * 4
], [
embedding_weights[2],
(embedding_weights[0] + embedding_weights[1]) / 2.0, [0] * 4
]])
@test_util.run_deprecated_v1
def test_safe_embedding_lookup_sparse_3d_partitioned_inconsistent_weights(
self):
with self.cached_session():
embedding_weights = self._random_weights(num_shards=3)
sparse_ids, sparse_weights = self._ids_and_weights_3d()
embedding_weights[1] = embedding_weights[1].astype(np.float64)
self.assertRaises(TypeError, embedding_ops.safe_embedding_lookup_sparse,
embedding_weights, sparse_ids)
embedding_weights = [
constant_op.constant(w, dtype=dtypes.float64)
for w in embedding_weights
]
self.assertRaises(ValueError, embedding_ops.safe_embedding_lookup_sparse,
embedding_weights, sparse_ids, sparse_weights)
class DynamicStitchOpTest(test.TestCase):
@test_util.run_deprecated_v1
def testCint32Cpu(self):
with self.session(use_gpu=False):
indices = [
ops.convert_to_tensor([0, 1, 2]),
ops.convert_to_tensor([2, 3])
]
values = [
ops.convert_to_tensor([12, 23, 34]),
ops.convert_to_tensor([1, 2])
]
self.assertAllEqual(
data_flow_ops.dynamic_stitch(indices, values), [12, 23, 1, 2])
@test_util.run_deprecated_v1
def testCint32Gpu(self):
with self.session():
indices = [
ops.convert_to_tensor([0, 1, 2]),
ops.convert_to_tensor([2, 3])
]
values = [
ops.convert_to_tensor([12, 23, 34]),
ops.convert_to_tensor([1, 2])
]
self.assertAllEqual(
data_flow_ops.dynamic_stitch(indices, values), [12, 23, 1, 2])
@test_util.run_deprecated_v1
def testInt32Cpu(self):
with self.session(use_gpu=False):
indices = [
ops.convert_to_tensor([0, 1, 2]),
ops.convert_to_tensor([2, 3])
]
values = [
ops.convert_to_tensor([12, 23, 34]),
ops.convert_to_tensor([1, 2])
]
self.assertAllEqual(
data_flow_ops.dynamic_stitch(indices, values), [12, 23, 1, 2])
@test_util.run_deprecated_v1
def testInt32Gpu(self):
with self.session():
indices = [
ops.convert_to_tensor([0, 1, 2]),
ops.convert_to_tensor([2, 3])
]
values = [
ops.convert_to_tensor([12, 23, 34]),
ops.convert_to_tensor([1, 2])
]
self.assertAllEqual(
data_flow_ops.dynamic_stitch(indices, values), [12, 23, 1, 2])
@test_util.run_deprecated_v1
def testSumGradArgs(self):
with self.session(use_gpu=False):
indices = [
ops.convert_to_tensor([0, 1, 2, 3]),
ops.convert_to_tensor([2, 3])
]
values = [
ops.convert_to_tensor([2, 3, 5, 7]),
ops.convert_to_tensor([1, 1])
]
self.assertAllEqual(
data_flow_ops.dynamic_stitch(indices, values), [2, 3, 1, 1])
# We expect that the values are merged in order.
@test_util.run_deprecated_v1
def testStitchOrder(self):
with self.cached_session():
indices = []
np_values = []
values = []
for _ in range(10):
indices.extend([ops.convert_to_tensor(np.arange(100).astype(np.int32))])
np_values.extend([np.random.uniform(size=100)])
values.extend([ops.convert_to_tensor(np_values[-1])])
stitched = data_flow_ops.dynamic_stitch(indices, values)
self.assertAllEqual(np_values[-1], stitched)
class ParallelDynamicStitchOpTest(test.TestCase):
@test_util.run_deprecated_v1
def testCint32Cpu(self):
with self.session(use_gpu=False):
indices = [
ops.convert_to_tensor([0, 1, 4, 6]),
ops.convert_to_tensor([2, 3, 5])
]
values = [
ops.convert_to_tensor([12, 23, 34, 45]),
ops.convert_to_tensor([1, 2, 3])
]
self.assertAllEqual(
data_flow_ops.parallel_dynamic_stitch(indices, values),
[12, 23, 1, 2, 34, 3, 45])
@test_util.run_deprecated_v1
def testInt32Cpu(self):
with self.session(use_gpu=False):
indices = [
ops.convert_to_tensor([0, 1, 5, 6, 7]),
ops.convert_to_tensor([2, 4, 3])
]
values = [
ops.convert_to_tensor([12, 23, 34, 45, 56]),
ops.convert_to_tensor([1, 3, 2])
]
self.assertAllEqual(
data_flow_ops.parallel_dynamic_stitch(indices, values),
[12, 23, 1, 2, 3, 34, 45, 56])
@test_util.run_deprecated_v1
def testSimple(self):
with self.session(use_gpu=False):
indices = [ops.convert_to_tensor([0, 1]), ops.convert_to_tensor([2, 3])]
values = [ops.convert_to_tensor([2, 3]), ops.convert_to_tensor([1, 1])]
self.assertAllEqual(
data_flow_ops.parallel_dynamic_stitch(indices, values), [2, 3, 1, 1])
if __name__ == "__main__":
test.main()<|fim▁end|>
|
ids_per_partition, extras = divmod(vocab_size, num_shards)
threshold = extras * (ids_per_partition + 1)
if i < threshold:
partition = i // (ids_per_partition + 1)
|
<|file_name|>012_divisable_tri_nums.py<|end_file_name|><|fim▁begin|>## Close
### What is the value of the first triangle number to have over five hundred divisors?<|fim▁hole|><|fim▁end|>
|
print max([len(m) for m in map(lambda k: [n for n in range(1,(k+1)) if k%n == 0], [sum(range(n)) for n in range(1,1000)])])
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -*
# Copyright (c) 2014 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le module secondaire diligence."""
from abstraits.module import *
from primaires.format.fonctions import format_nb
from secondaires.diligence import commandes
from secondaires.diligence.diligence import DiligenceMaudite
from secondaires.diligence import editeurs
class Module(BaseModule):
"""Module proposant des zones aléatoires.
Ce module est appelé "diligence", car la diligence maudite est
le premier type de zone semi-aléatoire développée sur ce MUD.
L'idée est de définir une zone modèle et de créer des salles
répliques pour des diligences à déplacement semi-aléatoire. les
salles définies dans le modèle proposent les titres, descriptions,
détails et scripts pour les salles dupliquées.
"""
def __init__(self, importeur):
"""Constructeur du module"""
BaseModule.__init__(self, importeur, "diligence", "secondaire")
self.commandes = []
self.diligences = {}
self.logger = self.importeur.man_logs.creer_logger(
"diligence", "diligence")
def config(self):
"""Configuration du module."""
self.importeur.scripting.a_charger.append(self)
BaseModule.config(self)
def init(self):
"""Chargement des objets du module."""
diligences = self.importeur.supenr.charger_groupe(DiligenceMaudite)
for diligence in diligences:
self.ajouter_diligence(diligence)
self.logger.info(format_nb(len(diligences),
"{nb} diligence{s} maudite{s} récupérée{s}", fem=True))
BaseModule.init(self)
def ajouter_commandes(self):
"""Ajout des commandes dans l'interpréteur"""
self.commandes = [
commandes.diligence.CmdDiligence(),
]
for cmd in self.commandes:
self.importeur.interpreteur.ajouter_commande(cmd)
# Ajout des éditeurs<|fim▁hole|> self.importeur.interpreteur.ajouter_editeur(
editeurs.diledit.EdtDiledit)
@property
def zones(self):
"""Retourne toutes les zones des diligences (actives)."""
cles = [cle + "_" for cle in self.diligences.keys()]
zones = []
for zone in importeur.salle.zones.values():
if any(zone.cle.startswith(c) for c in cles):
zones.append(zone)
return zones
def creer_diligence(self, cle):
"""Crée une diligence."""
if cle in self.diligences:
raise ValueError("la diligence {} existe déjà".format(
repr(cle)))
diligence = DiligenceMaudite(cle)
self.ajouter_diligence(diligence)
return diligence
def ajouter_diligence(self, diligence):
"""Ajoute le diligence."""
if diligence.cle in self.diligences:
raise ValueError("la diligence de clé {} est " \
"déjà définie".format(repr(diligence.cle)))
self.diligences[diligence.cle] = diligence
def supprimer_diligence(self, cle):
"""Supprime une diligence."""
if cle not in self.diligences:
raise ValueError("la diligence {} n'existe pas".format(
repr(cle)))
self.diligences.pop(cle).detruire()<|fim▁end|>
| |
<|file_name|>addeditmoviedlg_ans.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2008-9 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 2 of the License, or
# version 3 of the License, or (at your option) any later version. It is
# provided for educational purposes and is distributed in the hope that
# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
# the GNU General Public License for more details.
from PyQt4.QtCore import (QDate, QString, Qt, SIGNAL, pyqtSignature)
from PyQt4.QtGui import (QApplication, QDialog, QDialogButtonBox)
import moviedata_ans as moviedata
import ui_addeditmoviedlg_ans as ui_addeditmoviedlg
class AddEditMovieDlg(QDialog,
ui_addeditmoviedlg.Ui_AddEditMovieDlg):
def __init__(self, movies, movie=None, parent=None):
super(AddEditMovieDlg, self).__init__(parent)
self.setupUi(self)
self.movies = movies
self.movie = movie
self.acquiredDateEdit.setDisplayFormat(moviedata.DATEFORMAT)
if movie is not None:
self.titleLineEdit.setText(movie.title)
self.yearSpinBox.setValue(movie.year)
self.minutesSpinBox.setValue(movie.minutes)
self.acquiredDateEdit.setDate(movie.acquired)
self.acquiredDateEdit.setEnabled(False)
self.locationLineEdit.setText(movie.location)
self.notesTextEdit.setPlainText(movie.notes)
self.notesTextEdit.setFocus()
self.buttonBox.button(QDialogButtonBox.Ok).setText(
"&Accept")
self.setWindowTitle("My Movies - Edit Movie")
else:
today = QDate.currentDate()
self.acquiredDateEdit.setDateRange(today.addDays(-5),
today)
self.acquiredDateEdit.setDate(today)
self.titleLineEdit.setFocus()
self.on_titleLineEdit_textEdited(QString())
@pyqtSignature("QString")
def on_titleLineEdit_textEdited(self, text):
self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(
not self.titleLineEdit.text().isEmpty())
def accept(self):<|fim▁hole|> minutes = self.minutesSpinBox.value()
location = self.locationLineEdit.text()
notes = self.notesTextEdit.toPlainText()
if self.movie is None:
acquired = self.acquiredDateEdit.date()
self.movie = moviedata.Movie(title, year, minutes,
acquired, location, notes)
self.movies.add(self.movie)
else:
self.movies.updateMovie(self.movie, title, year,
minutes, location, notes)
QDialog.accept(self)
if __name__ == "__main__":
import sys
app = QApplication(sys.argv)
form = AddEditMovieDlg(0)
form.show()
app.exec_()<|fim▁end|>
|
title = self.titleLineEdit.text()
year = self.yearSpinBox.value()
|
<|file_name|>base.ts<|end_file_name|><|fim▁begin|>import Web3 from 'web3';
import { Contract } from 'web3-eth-contract';
import { AbiItem } from 'web3-utils';
import PriceOracleABI from '../../contracts/abi/v0.9.0/price-oracle';
import ExchangeABI from '../../contracts/abi/v0.9.0/exchange';
import { getOracle, getAddress } from '../../contracts/addresses';
import BN from 'bn.js';
import { safeFloatConvert } from '../utils/general-utils';
const tokenDecimals = new BN('18');
const ten = new BN('10');
export default class LayerTwoOracle {
constructor(private layer2Web3: Web3) {}
async convertToSpend(token: string, amount: string): Promise<number> {
let exchange = new this.layer2Web3.eth.Contract(
ExchangeABI as AbiItem[],
await getAddress('exchange', this.layer2Web3)
);
let spendAmount = await exchange.methods.convertToSpend(token, amount).call();
// 1 SPEND == $0.01 USD, and SPEND decimals is 0, so this is safe to
// represent as a javascript number
return Number(spendAmount);
}
async convertFromSpend(token: string, amount: number): Promise<string> {
let exchange = new this.layer2Web3.eth.Contract(
ExchangeABI as AbiItem[],
await getAddress('exchange', this.layer2Web3)
);
return await exchange.methods.convertFromSpend(token, amount.toString()).call();
}
async getRateLock(tokenAddress: string): Promise<string> {
let exchange = new this.layer2Web3.eth.Contract(
ExchangeABI as AbiItem[],
await getAddress('exchange', this.layer2Web3)
);
let { price } = await exchange.methods.exchangeRateOf(tokenAddress).call();
return price;
}
async getUSDConverter(token: string): Promise<(amountInWei: string) => number> {
const oracle = await this.getOracleContract(token);
const usdRawRate = new BN((await oracle.methods.usdPrice().call()).price);
const oracleDecimals = Number(await oracle.methods.decimals().call());
return (amountInWei) => {
let rawAmount = usdRawRate.mul(new BN(amountInWei)).div(ten.pow(tokenDecimals));
return safeFloatConvert(rawAmount, oracleDecimals);
};
}
async getUSDPrice(token: string, amount: string): Promise<number> {
let oracle = await this.getOracleContract(token);
let usdRawRate = new BN((await oracle.methods.usdPrice().call()).price);
let oracleDecimals = Number(await oracle.methods.decimals().call());
let rawAmount = usdRawRate.mul(new BN(amount)).div(ten.pow(tokenDecimals));
return safeFloatConvert(rawAmount, oracleDecimals);
}<|fim▁hole|> async getETHPrice(token: string, amount: string): Promise<string> {
let oracle = await this.getOracleContract(token);
let ethRawRate = new BN((await oracle.methods.ethPrice().call()).price);
let oracleDecimals = new BN(await oracle.methods.decimals().call());
let weiAmount = ethRawRate.mul(new BN(amount)).div(ten.pow(oracleDecimals));
return weiAmount.toString();
}
async getUpdatedAt(token: string): Promise<Date> {
let oracle = await this.getOracleContract(token);
let unixTime = Number((await oracle.methods.usdPrice().call()).updatedAt);
return new Date(unixTime * 1000);
}
private async getOracleContract(token: string): Promise<Contract> {
let address = await getOracle(token, this.layer2Web3);
return new this.layer2Web3.eth.Contract(PriceOracleABI as AbiItem[], address);
}
}<|fim▁end|>
| |
<|file_name|>typings.d.ts<|end_file_name|><|fim▁begin|>interface Date {
now(): number;
}
declare module 'http' {
import { IncomingMessage } from 'http';
export interface Response<TBody> extends IncomingMessage {
body?: TBody;
}
}
declare module 'redis' {
export function createClient(port: number, host?: string, options?: ClientOptions): RedisClient;
export function createClient(unix_socket: string, options?: ClientOptions): RedisClient;
export function createClient(redis_url: string, options?: ClientOptions): RedisClient;
export function createClient(options?: ClientOptions): RedisClient;
<|fim▁hole|> host?: string;
port?: number;
path?: string;
url?: string;
parser?: string;
string_numbers?: boolean;
return_buffers?: boolean;
detect_buffers?: boolean;
socket_keepalive?: boolean;
no_ready_check?: boolean;
enable_offline_queue?: boolean;
retry_max_delay?: number;
connect_timeout?: number;
max_attempts?: number;
retry_unfulfilled_commands?: boolean;
auth_pass?: string;
password?: string;
db?: string;
family?: string;
rename_commands?: { [command: string]: string };
tls?: any;
prefix?: string;
retry_strategy?: Function;
}
export class RedisClient {
expire(key: string, seconds: number): void;
getAsync(key: string): Promise<string>;
setAsync(key: string, value: any): Promise<'OK'>;
delAsync(key: string): Promise<number>;
}
}
declare module 'node-rsa' {
namespace NodeRsa {
}
class NodeRsa {
constructor(options: any);
exportKey(keyType?: string): string;
}
export = NodeRsa;
}<|fim▁end|>
|
export interface ClientOptions {
|
<|file_name|>0053_auto_20161112_2146.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-12 10:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0052_auto_20161112_2141'),
]
operations = [
migrations.AlterField(
model_name='yearlevel',
name='name',<|fim▁hole|><|fim▁end|>
|
field=models.CharField(blank=True, choices=[(1, '1'), (2, '2'), (3, 'A'), (4, '3B'), (5, '4C'), (6, '5D')], max_length=30, null=True),
),
]
|
<|file_name|>az60.js<|end_file_name|><|fim▁begin|>const gal=[//731-62
'1@001/b4avsq1y2i',
'1@002/50uvpeo7tb',
'1@002/0ypu4wgjxm',
'1@002/b61d80e9pf',
'1@002/f1kb57t4ul',
'1@002/swq38v49nz',
'1@002/zeak367fw1',
'1@003/nx1ld4j9pe',
'1@003/yh0ub5rank',
<|fim▁hole|>'1@005/0asu1qo75n',
'1@005/4c7bn1q5mx',
'1@005/le3vrzbwfs',
'1@006/ek0tq9wvny',
'1@006/ax21m8tjos',
'1@006/w2e3104dp6',
'1@007/bsukxlv9j7',
'1@007/w5cpl0uvy6',
'1@007/l04q3wrucj',
'2@007/s2hr6jv7nc',
'2@009/31yrxp8waj',
'2@009/8josfrbgwi',
'2@009/rt4jie1fg8',
'2@009/p0va85n4gf',
'2@010/i9thzxn50q',
'3@010/a6w84ltj02',
'3@010/mfyevin3so',
'3@010/wdy5qzflnv',
'3@011/06wim8bjdp',
'3@011/avtd46k9cx',
'3@011/80915y2exz',
'3@011/vkt4dalwhb',
'3@011/znudscat9h',
'3@012/18xg4h9s3i',
'3@012/120sub86vt',
'3@012/5gxj7u0om8',
'3@012/95gzec2rsm',
'3@012/dihoerqp40',
'3@012/nif7secp8l',
'3@012/q8awn1iplo',
'3@012/ubzfcjte63',
'3@013/b9atnq1les',
'3@013/zof4tprh73',
'3@013/lvdyctgefs',
'4@013/c7fgqbsxkn',
'4@013/ci9vg76yj5',
'4@013/y3v8tjreas',
'4@014/75krn9ifbp',
'4@014/9tf7n5iexy',
'4@014/boz3y1wauf',
'4@014/ihqxma938n',
'4@014/suxj4yv5w6',
'4@014/o19tbsqjxe',
'4@014/wy8tx24g0c',
'4@015/302u7cs5nx',
'4@015/4bo9c8053u',
'4@015/8clxnh6eao',
'4@015/hbi1d9grwz',
'4@015/w8vmtnod7z',
'5@016/0yeinjua6h',
'5@016/rgpcwv4nym',
'5@016/scxjm7ofar'];<|fim▁end|>
|
'1@004/j29uftobmh',
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.utils import unittest<|fim▁hole|><|fim▁end|>
|
from spacescout_web.test.not_found import NotFound404Test
from spacescout_web.test.url_filtering import URLFiltering
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>const AppError = require('../../../lib/errors/app')
const assert = require('assert')
function doSomethingBad () {
throw new AppError('app error message')
}
it('Error details', function () {
try {
doSomethingBad()
} catch (err) {
assert.strictEqual(
err.name,
'AppError',
"Name property set to error's name"
)
assert(
err instanceof AppError,
'Is an instance of its class'
)
assert(
err instanceof Error,
'Is instance of built-in Error'
)
assert(
require('util').isError(err),
'Should be recognized by Node.js util#isError'
)
assert(
err.stack,
'Should have recorded a stack'
)
assert.strictEqual(
err.toString(),
'AppError: app error message',
'toString should return the default error message formatting'
)
assert.strictEqual(<|fim▁hole|> err.stack.split('\n')[0],
'AppError: app error message',
'Stack should start with the default error message formatting'
)
assert.strictEqual(
err.stack.split('\n')[1].indexOf('doSomethingBad'),
7,
'The first stack frame should be the function where the error was thrown'
)
}
})<|fim▁end|>
| |
<|file_name|>methoddef2.py<|end_file_name|><|fim▁begin|>class Foo(object):
def mm(self, barparam):
'''<|fim▁hole|> '''
f = Foo()
f.mm(barparam=10)<|fim▁end|>
|
@param barparam: this is barparam
|
<|file_name|>cryptauth.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import httplib
import json
import logging
import pprint
import time
logger = logging.getLogger('proximity_auth.%s' % __name__)
_GOOGLE_APIS_URL = 'www.googleapis.com'
_REQUEST_PATH = '/cryptauth/v1/%s?alt=JSON'
class CryptAuthClient(object):
""" A client for making blocking CryptAuth API calls. """
def __init__(self, access_token, google_apis_url=_GOOGLE_APIS_URL):
self._access_token = access_token
self._google_apis_url = google_apis_url
def GetMyDevices(self):
""" Invokes the GetMyDevices API.
Returns:
A list of devices or None if the API call fails.
Each device is a dictionary of the deserialized JSON returned by
CryptAuth.
"""
request_data = {
'approvedForUnlockRequired': False,
'allowStaleRead': False,
'invocationReason': 13 # REASON_MANUAL
}
response = self._SendRequest('deviceSync/getmydevices', request_data)
return response['devices'] if response is not None else None
def GetUnlockKey(self):
"""
Returns:
The unlock key registered with CryptAuth if it exists or None.
The device is a dictionary of the deserialized JSON returned by CryptAuth.
"""
devices = self.GetMyDevices()
if devices is None:
return None
for device in devices:
if device['unlockKey']:
return device
return None
def ToggleEasyUnlock(self, enable, public_key=''):
""" Calls the ToggleEasyUnlock API.
Args:
enable: True to designate the device specified by |public_key| as an<|fim▁hole|> unlock key.
public_key: The public key of the device to toggle. Ignored if |enable| is
False, which toggles all unlock keys off.
Returns:
True upon success, else False.
"""
request_data = { 'enable': enable, }
if not enable:
request_data['applyToAll'] = True
else:
request_data['publicKey'] = public_key
response = self._SendRequest('deviceSync/toggleeasyunlock', request_data)
return response is not None
def FindEligibleUnlockDevices(self, time_delta_millis=None):
""" Finds devices eligible to be an unlock key.
Args:
time_delta_millis: If specified, then only return eligible devices that
have contacted CryptAuth in the last time delta.
Returns:
A tuple containing two lists, one of eligible devices and the other of
ineligible devices.
Each device is a dictionary of the deserialized JSON returned by
CryptAuth.
"""
request_data = {}
if time_delta_millis is not None:
request_data['maxLastUpdateTimeDeltaMillis'] = time_delta_millis * 1000;
response = self._SendRequest(
'deviceSync/findeligibleunlockdevices', request_data)
if response is None:
return None
eligibleDevices = (
response['eligibleDevices'] if 'eligibleDevices' in response else [])
ineligibleDevices = (
response['ineligibleDevices'] if (
'ineligibleDevices' in response) else [])
return eligibleDevices, ineligibleDevices
def PingPhones(self, timeout_secs=10):
""" Asks CryptAuth to ping registered phones and determine their status.
Args:
timeout_secs: The number of seconds to wait for phones to respond.
Returns:
A tuple containing two lists, one of eligible devices and the other of
ineligible devices.
Each device is a dictionary of the deserialized JSON returned by
CryptAuth.
"""
response = self._SendRequest(
'deviceSync/senddevicesynctickle',
{ 'tickleType': 'updateEnrollment' })
if response is None:
return None
# We wait for phones to update their status with CryptAuth.
logger.info('Waiting for %s seconds for phone status...' % timeout_secs)
time.sleep(timeout_secs)
return self.FindEligibleUnlockDevices(time_delta_millis=timeout_secs)
def _SendRequest(self, function_path, request_data):
""" Sends an HTTP request to CryptAuth and returns the deserialized
response.
"""
conn = httplib.HTTPSConnection(self._google_apis_url)
path = _REQUEST_PATH % function_path
headers = {
'authorization': 'Bearer ' + self._access_token,
'Content-Type': 'application/json'
}
body = json.dumps(request_data)
logger.info('Making request to %s with body:\n%s' % (
path, pprint.pformat(request_data)))
conn.request('POST', path, body, headers)
response = conn.getresponse()
if response.status == 204:
return {}
if response.status != 200:
logger.warning('Request to %s failed: %s' % (path, response.status))
return None
return json.loads(response.read())<|fim▁end|>
| |
<|file_name|>nonterminal-expansion.rs<|end_file_name|><|fim▁begin|>// Macros were previously expanded in `Expr` nonterminal tokens, now they are not.
<|fim▁hole|> #[repr(align($n))]
//~^ ERROR expected unsuffixed literal or identifier, found `n!()`
//~| ERROR incorrect `repr(align)` attribute format
struct S;
};
}
macro_rules! n {
() => { 32 };
}
pass_nonterminal!(n!());
fn main() {}<|fim▁end|>
|
macro_rules! pass_nonterminal {
($n:expr) => {
|
<|file_name|>Matrix.cpp<|end_file_name|><|fim▁begin|>/*
Copyright © 2007, 2008, 2009, 2010, 2011 Vladimír Vondruš <[email protected]>
This file is part of Kompas.
Kompas is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License version 3
only, as published by the Free Software Foundation.
Kompas is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License version 3 for more details.
*/
#include "Matrix.h"
#include <algorithm> /* sort() */
using namespace std;
namespace Kompas { namespace Sdl {
/* Zakázání položky */
template<class Item> void Matrix<Item>::disableItem(itemId item) {
items[item].flags |= DISABLED;
reloadItems();
}
/* Povolení položky */
template<class Item> void Matrix<Item>::enableItem(itemId item) {
items[item].flags &= ~DISABLED;
reloadItems();
}
/* Obnovení položek */
template<class Item> void Matrix<Item>::reloadItems() {
sortedHorizontal.clear(); sortedHorizontal.reserve(items.size());
sortedVertical.clear(); sortedVertical.reserve(items.size());
/* Naplnění tříděných jen aktivními položkami */
for(typename vector<Item>::const_iterator it = items.begin(); it != items.end(); ++it) {
if(!((*it).flags & DISABLED)) {
sortedHorizontal.push_back(it);
sortedVertical.push_back(it);
}
}
sort(sortedHorizontal.begin(), sortedHorizontal.end(), horizontalCompare);
sort(sortedVertical.begin(), sortedVertical.end(), verticalCompare);
if(sortedVertical.size() != 0) actualItem = sortedVertical.front();
else actualItem = items.end();
}
/* Posun nahoru */
template<class Item> bool Matrix<Item>::moveUp(void) {
if(sortedHorizontal.size() == 0) return false;
/* Nalezení aktuální položky */
typename vector<typename vector<Item>::const_iterator>::const_iterator it = sortedHorizontal.begin();
for( ; it != sortedHorizontal.end(); ++it) {
if(*it == actualItem) break;
}
/* Hledání jiného řádku (nekonečné procházení, dokud se nevrátíme zpět na počáteční položku) */
do {
if(it-- == sortedHorizontal.begin()) it = sortedHorizontal.end()-1;
} while(*it != actualItem && (**it).y == (*actualItem).y);
/* Žádný jiný řádek neexistuje, jsme zpět na aktuální položce */
if(*it == actualItem) return false;
/* Hledání lepšího kandidáta v daném řádku (y) - blíže k x než předešlý */
int y = (**it).y; int x = (*actualItem).x; actualItem = *it;
while(it-- != sortedHorizontal.begin() && (**it).y == y) {
/* Pokud je položka blíže aktuální než předešlý kandidát, přiřazení */
if( ((**it).x <= x && (*actualItem).x < (**it).x) || /* vlevo od aktuální */
((**it).x >= x && (*actualItem).x > (**it).x) /* vpravo od aktuální */
) actualItem = *it;
/* Položka je dále než předešlý kandidát, konec */
else break;
}
return true;
}
/* Posun dolů */
template<class Item> bool Matrix<Item>::moveDown(void) {
if(sortedHorizontal.size() == 0) return false;
/* Nalezení aktuální položky */
typename vector<typename vector<Item>::const_iterator>::const_iterator it = sortedHorizontal.begin();
for( ; it != sortedHorizontal.end(); ++it) {
if(*it == actualItem) break;
}
/* Hledání jiného řádku (nekonečné procházení, dokud se nevrátíme zpět na počáteční položku) */
do {
if(++it == sortedHorizontal.end()) it = sortedHorizontal.begin();
} while(*it != actualItem && (**it).y == (*actualItem).y);
/* Žádný jiný řádek neexistuje, jsme zpět na aktuální položce */
if(*it == actualItem) return false;
/* Hledání lepšího kandidáta v daném řádku (y) - blíže k x než předešlý */
int y = (**it).y; int x = (*actualItem).x; actualItem = *it;
while(++it != sortedHorizontal.end() && (**it).y == y) {
/* Pokud je položka blíže aktuální než předešlý kandidát, přiřazení */
if( ((**it).x <= x && (*actualItem).x < (**it).x) || /* vlevo od aktuální */
((**it).x >= x && (*actualItem).x > (**it).x) /* vpravo od aktuální */
) actualItem = *it;
/* Položka je dále než předešlý kandidát, konec */
else break;
}
return true;
}
/* Posun doleva */
template<class Item> bool Matrix<Item>::moveLeft(void) {
if(sortedVertical.size() == 0) return false;
/* Nalezení aktuální položky */
typename vector<typename vector<Item>::const_iterator>::const_iterator it = sortedVertical.begin();
for( ; it != sortedVertical.end(); ++it) {
if(*it == actualItem) break;
}
/* Hledání jiného sloupce (nekonečné procházení, dokud se nevrátíme zpět na počáteční položku) */
do {
if(it-- == sortedVertical.begin()) it = sortedVertical.end()-1;<|fim▁hole|> /* Žádný jiný sloupec neexistuje, jsme zpět na aktuální položce */
if(*it == actualItem) return false;
/* Hledání lepšího kandidáta v daném sloupci (x) - blíže k y než předešlý */
int x = (**it).x; int y = (*actualItem).y; actualItem = *it;
while(it-- != sortedVertical.begin() && (**it).x == x) {
/* Pokud je položka blíže aktuální než předešlý kandidát, přiřazení */
if( ((**it).y <= y && (*actualItem).y < (**it).y) || /* výše než aktuální */
((**it).y >= y && (*actualItem).y > (**it).y) /* níže než aktuální */
) actualItem = *it;
/* Položka je dále než předešlý kandidát, konec */
else break;
}
return true;
}
/* Posun doprava */
template<class Item> bool Matrix<Item>::moveRight(void) {
if(sortedVertical.size() == 0) return false;
/* Nalezení aktuální položky */
typename vector<typename vector<Item>::const_iterator>::const_iterator it = sortedVertical.begin();
for( ; it != sortedVertical.end(); ++it) {
if(*it == actualItem) break;
}
/* Hledání jiného sloupce (nekonečné procházení, dokud se nevrátíme zpět na počáteční položku) */
do {
if(++it == sortedVertical.end()) it = sortedVertical.begin();
} while(*it != actualItem && (**it).x == (*actualItem).x);
/* Žádný jiný sloupec neexistuje, jsme zpět na aktuální položce */
if(*it == actualItem) return false;
/* Hledání lepšího kandidáta v daném sloupci (x) - blíže k y než předešlý */
int x = (**it).x; int y = (*actualItem).y; actualItem = *it;
while(++it != sortedVertical.end() && (**it).x == x) {
/* Pokud je položka blíže aktuální než předešlý kandidát, přiřazení */
if( ((**it).y <= y && (*actualItem).y < (**it).y) || /* výše než aktuální */
((**it).y >= y && (*actualItem).y > (**it).y) /* níže než aktuální */
) actualItem = *it;
/* Položka je dále než předešlý kandidát (už lepší nenajdeme), konec */
else break;
}
return true;
}
}}<|fim▁end|>
|
} while(*it != actualItem && (**it).x == (*actualItem).x);
|
<|file_name|>lbfgs.py<|end_file_name|><|fim▁begin|>from cantilever_divingboard import *
# We need to scale the parameters before applying the optimization algorithm
# Normally there are about 20 orders of magnitude between the dimensions and
# the doping concentration, so this is a critical step
# Run the script
freq_min = 1e3
freq_max = 1e5<|fim▁hole|> 30e-6, 1e-6, 1e-6, 500e-9, 5., 1e15)
constraints = ((30e-6, 100e-6), (500e-9, 20e-6), (1e-6, 10e-6),
(2e-6, 100e-6), (500e-9, 5e-6), (500e-9, 20e-6), (30e-9, 10e-6),
(1., 10.), (1e15, 4e19))
x = optimize_cantilever(initial_guess, constraints, freq_min, freq_max, omega_min)
c = cantilever_divingboard(freq_min, freq_max, x)
c.print_performance()<|fim▁end|>
|
omega_min = 100e3
initial_guess = (50e-6, 1e-6, 1e-6,
|
<|file_name|>E0067.rs<|end_file_name|><|fim▁begin|>use std::collections::LinkedList;<|fim▁hole|>fn main() {
LinkedList::new() += 1; //~ ERROR E0368
//~^ ERROR E0067
}<|fim▁end|>
| |
<|file_name|>sgopt.py<|end_file_name|><|fim▁begin|>"""Gradient descent
"""
import numpy as np
from frankenstein.tools.perf_utils import TIMER
from pyscf.lib import logger
""" Helper functions
"""
def get_gHp_fd(get_grad, p, order=1, eps=1.E-4):
""" Compute gradient-Hessian product using finite difference
Inps:
get_grad (callable):
grad(p) --> gradient given a direction p
p (np.ndarray):
initial gradient
order (int, default=1):
order 1 --> foward FD (err ~ O(eps))
order 2 --> central FD (err ~ O(eps^2))
eps (float, default=1.E-4):
strength of perturbation
"""
p_f = get_grad(eps*p)
if order == 1:
return 2. * (p_f-p) / eps
elif order == 2:
p_b = get_grad(-eps*p)
return (p_f-p_b) / eps
else:
raise ValueError("Invalid order (must be 1 or 2)!")
# Newton-raphson (for debug)
class NR:
def __init__(self, mf, eps=1.E-3, fd=2):
self.verbose = mf.verbose
self.stdout = mf.stdout
self.comment = ""
self.eps = eps
self.fd = fd
try:
stdout = mf.stdout
except:
stdout = None
self.timer = TIMER(4, stdout=stdout)
self.iteration = 0
def next_step(self, mf):
f = mf.get_value_gdm()
g = mf.get_grad_gdm()
# build fd hessian
def dphi(i, eps):
mf.back_to_origin()
mf.ov = np.zeros([mf.ov_size])
mf.ov[i] = eps
mf.update_all()
mf.ov[i] = 0.
return mf.get_grad_gdm()
self.timer.start(0)
mf.save_new_origin()
H = np.zeros([mf.ov_size]*2)
for i in range(mf.ov_size):
if self.fd == 1:
H[i] = (dphi(i,self.eps) - g) / self.eps
elif self.fd == 2:
H[i] = (dphi(i,self.eps) - dphi(i,-self.eps)) / (2.*self.eps)
else:
raise ValueError("fd must be 1 or 2.")
mf.back_to_origin()
self.timer.stop(0)
# get raw NR step
self.timer.start(1)
lbd = 1.E-5
du = -np.linalg.solve(H+lbd*np.eye(H.shape[1]), g)
self.timer.stop(1)
# line search
fc = [0]
def phi(alp):
fc[0] += 1
mf.back_to_origin()
mf.ov = alp * mf.regularize_step_gdm(du)
mf.update_all(skip_grad=True)
return mf.get_value_gdm()
self.timer.start(2)
mf.save_new_origin()
fold = f
dphi0 = g @ du
alp, fnew = scopt_linsrc.scalar_search_armijo(
phi, fold, dphi0, c1=1.E-4, alpha0=1.)
self.timer.stop(2)
fc = fc[0]
if alp is None:
raise RuntimeError("Line search failed.")
if fc == 1:
self.comment = "NR"
else:
self.comment = "LnSr (%d,%.2f)"%(fc,alp)
self.timer.start(3)
mf.update_gdm()
self.timer.stop(3)
self.iteration += 1
def report_timing(self):
self.timer.report(tnames=["hess", "linsolve", "linsrch", "grad"])
# Direct minimization (for debug)
class DM:
def __init__(self, mf, bounds=[-1,0], method="bf", plot=False):
if method == "bf":
self.alps = np.arange(*bounds, 0.05)
elif method == "interpolate":
self.amin = min(bounds)
self.amax = max(bounds)
self.ninter = 5
self.neval = 100
else:
raise ValueError("Unknown method '%s'." % method)
self.method = method
self.plot = plot
self.verbose = mf.verbose
self.stdout = mf.stdout
self.comment = ""
try:
stdout = mf.stdout
except:
stdout = None
self.timer = TIMER(2, stdout=stdout)
self.iteration = 0
def next_step(self, mf):
from scipy import interpolate as itplt
from matplotlib import pyplot as plt
<|fim▁hole|> def phi(alp):
mf.back_to_origin()
mf.ov = alp * g
mf.update_all(skip_grad=True)
mf.ov = np.zeros(mf.ov_size)
return mf.get_value_gdm()
mf.save_new_origin()
E0 = mf.get_value_gdm()
self.timer.start(0)
if self.method == "bf":
alps = self.alps
Es = np.asarray([phi(alp) for alp in alps]) - E0
elif self.method == "interpolate":
amin = self.amin
amax = self.amax
err_g = np.mean(g**2)**0.5
if err_g > 1.E-3:
xs = np.linspace(amin, amax, self.ninter)
ys = np.asarray([phi(x) for x in xs])
xyrep = itplt.splrep(xs, ys)
fp = lambda x: itplt.splev(x, xyrep)
else:
xs = np.linspace(amin, amax, 3)
ys = np.asarray([phi(x) for x in xs])
p = np.polyfit(xs, ys, 2)
fp = np.poly1d(p)
alps = np.linspace(amin, amax, self.neval)
Es = fp(alps)
idmin = np.argmin(Es)
alp = alps[idmin]
E = Es[idmin]
self.timer.stop(0)
if self.plot:
plt.plot(alps, Es, "-")
if self.method == "interpolate": plt.plot(xs, ys, "o")
plt.plot(alp, E, "rx")
plt.show()
self.comment = "alp = % .2f" % alp
self.timer.start(1)
mf.back_to_origin()
mf.ov = alp * g
mf.update_all()
self.timer.stop(1)
self.iteration += 1
def report_timing(self):
self.timer.report(["lnsrch", "update me"])
# Direct inversion of iterative subspace (DIIS)
from pyscf.lib.diis import DIIS as pyDIIS
class DIIS:
def __init__(self, mf, ndiis=50, diis_start=1):
self.adiis = pyDIIS()
self.adiis.space = ndiis
self.adiis.min_space = diis_start
self.iteration = 0
self.comment = ""
try:
stdout = mf.stdout
except:
stdout = None
self.timer = TIMER(4, stdout=stdout)
def next_step(self, mf):
self.iteration += 1
self.timer.start(0)
f = mf.get_fock_diis()
ferr = mf.get_err_diis()
self.timer.stop(0)
self.timer.start(1)
f = self.adiis.update(f, ferr)
self.timer.stop(1)
self.timer.start(2)
if hasattr(mf, "mom_start"):
mom = self.iteration >= mf.mom_start
else:
mom = False
comment = mf.update_diis(f, mom=mom)
self.timer.stop(2)
self.timer.start(3)
mf.update_all()
self.timer.stop(3)
self.comment = "DIIS" if self.iteration > 0 else "Roothaan"
self.comment += " %s" % comment
def report_timing(self):
self.timer.report(tnames=["diis prep", "diis extrap", "roothaan",
"fock build"])<|fim▁end|>
|
g = mf.get_grad_gdm()
|
<|file_name|>lhcb.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2016 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""CAP LHCb permissions"""
from invenio_access import DynamicPermission
from cap.modules.experiments.permissions.common import get_collaboration_group_needs, get_superuser_needs
lhcb_group_need = set(
[g for g in get_collaboration_group_needs('LHCb')])
lhcb_group_need |= set([g for g in
get_superuser_needs()])
lhcb_permission = DynamicPermission(*lhcb_group_need)<|fim▁end|>
| |
<|file_name|>dev-server.js<|end_file_name|><|fim▁begin|>const project = require('../config/project.config')<|fim▁hole|>const server = require('../server/index')
const debug = require('debug')('app:bin:dev-server')
server.listen(project.server_port)
debug(`Server is now running at http://localhost:${project.server_port}.`)<|fim▁end|>
| |
<|file_name|>UniformJavaUtilRandomWrapper.java<|end_file_name|><|fim▁begin|>/**
* Copyright by Michael Weiss, [email protected]
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.spectrumauctions.sats.core.util.random;
import java.math.BigDecimal;
import java.util.Date;
import java.util.Random;
public class UniformJavaUtilRandomWrapper implements UniformDistributionRNG {
/**
*
*/
private static final long serialVersionUID = 4285241684660761136L;
private Random rng;
public UniformJavaUtilRandomWrapper() {
this(new Date().getTime());
}
public UniformJavaUtilRandomWrapper(long seed) {
this.rng = new Random(seed);
}
@Override
public int nextInt() {
return rng.nextInt();
}
@Override
public int nextInt(int lowerLimit, int upperLimit) {
if (upperLimit == Integer.MAX_VALUE)
upperLimit--;
return rng.nextInt((upperLimit - lowerLimit) + 1) + lowerLimit;
}
@Override
public int nextInt(IntegerInterval interval) {
return nextInt(interval.getMinValue(), interval.getMaxValue());
}
@Override
public double nextDouble() {
return rng.nextDouble();
}
@Override
public double nextDouble(double lowerLimit, double upperLimit) {
return rng.nextDouble() * (upperLimit - lowerLimit) + lowerLimit;
}
@Override
public long nextLong() {
return rng.nextLong();
}
@Override
public double nextDouble(DoubleInterval interval) {
return nextDouble(interval.getMinValue(), interval.getMaxValue());
}<|fim▁hole|> *
* @see UniformDistributionRNG#nextBigDecimal()
*/
@Override
public BigDecimal nextBigDecimal() {
return new BigDecimal(nextDouble());
}
/*
* (non-Javadoc)
*
* @see UniformDistributionRNG#nextBigDecimal(double, double)
*/
@Override
public BigDecimal nextBigDecimal(double lowerLimit, double upperLimit) {
return new BigDecimal(nextDouble(lowerLimit, upperLimit));
}
/*
* (non-Javadoc)
*
* @see
* UniformDistributionRNG#nextBigDecimal(org.spectrumauctions.sats.core.util
* .random.DoubleInterval)
*/
@Override
public BigDecimal nextBigDecimal(DoubleInterval interval) {
return new BigDecimal(nextDouble(interval));
}
/*
* (non-Javadoc)
*
* @see UniformDistributionRNG#nextInt(int)
*/
@Override
public int nextInt(int upperLimit) {
return rng.nextInt(upperLimit);
}
}<|fim▁end|>
|
/*
* (non-Javadoc)
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from thumbnails.conf import settings
from thumbnails.engines import DummyEngine
from thumbnails.helpers import get_engine, generate_filename, get_cache_backend
from thumbnails.images import SourceFile, Thumbnail
__version__ = '0.5.1'
def get_thumbnail(original, size, **options):
"""
Creates or gets an already created thumbnail for the given image with the given size and
options.
:param original: File-path, url or base64-encoded string of the image that you want an
thumbnail.
:param size: String with the wanted thumbnail size. On the form: ``200x200``, ``200`` or<|fim▁hole|> :param quality: Overrides ``THUMBNAIL_QUALITY``, will set the quality used by the backend while
saving the thumbnail.
:param scale_up: Overrides ``THUMBNAIL_SCALE_UP``, if set to ``True`` the image will be scaled
up if necessary.
:param colormode: Overrides ``THUMBNAIL_COLORMODE``, The default colormode for thumbnails.
Supports all values supported by pillow. In other engines there is a best
effort translation from pillow modes to the modes supported by the current
engine.
:param format: Overrides the format the thumbnail will be saved in. This will override both the
detected file type as well as the one specified in ``THUMBNAIL_FALLBACK_FORMAT``.
:return: A Thumbnail object
"""
engine = get_engine()
cache = get_cache_backend()
original = SourceFile(original)
crop = options.get('crop', None)
options = engine.evaluate_options(options)
thumbnail_name = generate_filename(original, size, crop)
if settings.THUMBNAIL_DUMMY:
engine = DummyEngine()
return engine.get_thumbnail(thumbnail_name, engine.parse_size(size), crop, options)
cached = cache.get(thumbnail_name)
force = options is not None and 'force' in options and options['force']
if not force and cached:
return cached
thumbnail = Thumbnail(thumbnail_name, engine.get_format(original, options))
if force or not thumbnail.exists:
size = engine.parse_size(size)
thumbnail.image = engine.get_thumbnail(original, size, crop, options)
thumbnail.save(options)
for resolution in settings.THUMBNAIL_ALTERNATIVE_RESOLUTIONS:
resolution_size = engine.calculate_alternative_resolution_size(resolution, size)
image = engine.get_thumbnail(original, resolution_size, crop, options)
thumbnail.save_alternative_resolution(resolution, image, options)
cache.set(thumbnail)
return thumbnail<|fim▁end|>
|
``x200``.
:param crop: Crop settings, should be ``center``, ``top``, ``right``, ``bottom``, ``left``.
:param force: If set to ``True`` the thumbnail will be created even if it exists before.
|
<|file_name|>perf-tool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""Tool to gauge message passing throughput and latencies"""
import logging
import optparse
import time
import uuid
<|fim▁hole|>
from utils import connect_socket
from utils import get_host_port
from utils import process_connection
LOG = logging.getLogger()
LOG.addHandler(logging.StreamHandler())
class ConnectionEventHandler(pyngus.ConnectionEventHandler):
def __init__(self):
super(ConnectionEventHandler, self).__init__()
def connection_failed(self, connection, error):
"""Connection has failed in some way."""
LOG.warn("Connection failed callback: %s", error)
def connection_remote_closed(self, connection, pn_condition):
"""Peer has closed its end of the connection."""
LOG.debug("connection_remote_closed condition=%s", pn_condition)
connection.close()
class SenderHandler(pyngus.SenderEventHandler):
def __init__(self, count):
self._count = count
self._msg = Message()
self.calls = 0
self.total_ack_latency = 0.0
self.stop_time = None
self.start_time = None
def credit_granted(self, sender_link):
if self.start_time is None:
self.start_time = time.time()
self._send_message(sender_link)
def _send_message(self, link):
now = time.time()
self._msg.body = {'tx-timestamp': now}
self._last_send = now
link.send(self._msg, self)
def __call__(self, link, handle, status, error):
now = time.time()
self.total_ack_latency += now - self._last_send
self.calls += 1
if self._count:
self._count -= 1
if self._count == 0:
self.stop_time = now
link.close()
return
self._send_message(link)
def sender_remote_closed(self, sender_link, pn_condition):
LOG.debug("Sender peer_closed condition=%s", pn_condition)
sender_link.close()
def sender_failed(self, sender_link, error):
"""Protocol error occurred."""
LOG.debug("Sender failed error=%s", error)
sender_link.close()
class ReceiverHandler(pyngus.ReceiverEventHandler):
def __init__(self, count, capacity):
self._count = count
self._capacity = capacity
self._msg = Message()
self.receives = 0
self.tx_total_latency = 0.0
def receiver_active(self, receiver_link):
receiver_link.add_capacity(self._capacity)
def receiver_remote_closed(self, receiver_link, pn_condition):
"""Peer has closed its end of the link."""
LOG.debug("receiver_remote_closed condition=%s", pn_condition)
receiver_link.close()
def receiver_failed(self, receiver_link, error):
"""Protocol error occurred."""
LOG.warn("receiver_failed error=%s", error)
receiver_link.close()
def message_received(self, receiver, message, handle):
now = time.time()
receiver.message_accepted(handle)
self.tx_total_latency += now - message.body['tx-timestamp']
self.receives += 1
if self._count:
self._count -= 1
if self._count == 0:
receiver.close()
return
lc = receiver.capacity
cap = self._capacity
if lc < (cap / 2):
receiver.add_capacity(cap - lc)
def main(argv=None):
_usage = """Usage: %prog [options]"""
parser = optparse.OptionParser(usage=_usage)
parser.add_option("-a", dest="server", type="string",
default="amqp://0.0.0.0:5672",
help="The address of the server [amqp://0.0.0.0:5672]")
parser.add_option("--node", type='string', default='amq.topic',
help='Name of source/target node')
parser.add_option("--count", type='int', default=100,
help='Send N messages (send forever if N==0)')
parser.add_option("--debug", dest="debug", action="store_true",
help="enable debug logging")
parser.add_option("--trace", dest="trace", action="store_true",
help="enable protocol tracing")
opts, _ = parser.parse_args(args=argv)
if opts.debug:
LOG.setLevel(logging.DEBUG)
host, port = get_host_port(opts.server)
my_socket = connect_socket(host, port)
# create AMQP Container, Connection, and SenderLink
#
container = pyngus.Container(uuid.uuid4().hex)
conn_properties = {'hostname': host,
'x-server': False}
if opts.trace:
conn_properties["x-trace-protocol"] = True
c_handler = ConnectionEventHandler()
connection = container.create_connection("perf_tool",
c_handler,
conn_properties)
r_handler = ReceiverHandler(opts.count, opts.count or 1000)
receiver = connection.create_receiver(opts.node, opts.node, r_handler)
s_handler = SenderHandler(opts.count)
sender = connection.create_sender(opts.node, opts.node, s_handler)
connection.open()
receiver.open()
while not receiver.active:
process_connection(connection, my_socket)
sender.open()
# Run until all messages transfered
while not sender.closed or not receiver.closed:
process_connection(connection, my_socket)
connection.close()
while not connection.closed:
process_connection(connection, my_socket)
duration = s_handler.stop_time - s_handler.start_time
thru = s_handler.calls / duration
permsg = duration / s_handler.calls
ack = s_handler.total_ack_latency / s_handler.calls
lat = r_handler.tx_total_latency / r_handler.receives
print("Stats:\n"
" TX Avg Calls/Sec: %f Per Call: %f Ack Latency %f\n"
" RX Latency: %f" % (thru, permsg, ack, lat))
sender.destroy()
receiver.destroy()
connection.destroy()
container.destroy()
my_socket.close()
return 0
if __name__ == "__main__":
main()<|fim▁end|>
|
import pyngus
from proton import Message
|
<|file_name|>search.js<|end_file_name|><|fim▁begin|>// Search script generated by doxygen
// Copyright (C) 2009 by Dimitri van Heesch.
// The code in this file is loosly based on main.js, part of Natural Docs,
// which is Copyright (C) 2003-2008 Greg Valure
// Natural Docs is licensed under the GPL.
var indexSectionsWithContent =
{
0: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111001110111111111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
1: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011010000001010110000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
2: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111100010001110111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
3: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000111111111001110111101010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
4: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000101111111001110111110111100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
5: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010010000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
6: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000011110000000010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
7: "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"
};
var indexSectionNames =
{
0: "all",
1: "classes",
2: "files",
3: "functions",
4: "variables",
5: "typedefs",
6: "defines",
7: "pages"
};
function convertToId(search)
{
var result = '';
for (i=0;i<search.length;i++)
{
var c = search.charAt(i);
var cn = c.charCodeAt(0);
if (c.match(/[a-z0-9]/))
{
result+=c;
}
else if (cn<16)
{
result+="_0"+cn.toString(16);
}
else
{
result+="_"+cn.toString(16);
}
}
return result;
}
function getXPos(item)
{
var x = 0;
if (item.offsetWidth)
{
while (item && item!=document.body)
{
x += item.offsetLeft;
item = item.offsetParent;
}
}
return x;
}
function getYPos(item)
{
var y = 0;
if (item.offsetWidth)
{
while (item && item!=document.body)
{
y += item.offsetTop;
item = item.offsetParent;
}
}
return y;
}
/* A class handling everything associated with the search panel.
Parameters:
name - The name of the global variable that will be
storing this instance. Is needed to be able to set timeouts.
resultPath - path to use for external files
*/
function SearchBox(name, resultsPath, inFrame, label)
{
if (!name || !resultsPath) { alert("Missing parameters to SearchBox."); }
// ---------- Instance variables
this.name = name;
this.resultsPath = resultsPath;
this.keyTimeout = 0;
this.keyTimeoutLength = 500;
this.closeSelectionTimeout = 300;
this.lastSearchValue = "";
this.lastResultsPage = "";
this.hideTimeout = 0;
this.searchIndex = 0;
this.searchActive = false;
this.insideFrame = inFrame;
this.searchLabel = label;
// ----------- DOM Elements
this.DOMSearchField = function()
{ return document.getElementById("MSearchField"); }
this.DOMSearchSelect = function()
{ return document.getElementById("MSearchSelect"); }
this.DOMSearchSelectWindow = function()
{ return document.getElementById("MSearchSelectWindow"); }
this.DOMPopupSearchResults = function()
{ return document.getElementById("MSearchResults"); }
this.DOMPopupSearchResultsWindow = function()
{ return document.getElementById("MSearchResultsWindow"); }
this.DOMSearchClose = function()
{ return document.getElementById("MSearchClose"); }
this.DOMSearchBox = function()
{ return document.getElementById("MSearchBox"); }
// ------------ Event Handlers
// Called when focus is added or removed from the search field.
this.OnSearchFieldFocus = function(isActive)
{
this.Activate(isActive);
}
this.OnSearchSelectShow = function()
{
var searchSelectWindow = this.DOMSearchSelectWindow();
var searchField = this.DOMSearchSelect();
if (this.insideFrame)
{
var left = getXPos(searchField);
var top = getYPos(searchField);
left += searchField.offsetWidth + 6;
top += searchField.offsetHeight;
// show search selection popup
searchSelectWindow.style.display='block';
left -= searchSelectWindow.offsetWidth;
searchSelectWindow.style.left = left + 'px';
searchSelectWindow.style.top = top + 'px';
}
else
{
var left = getXPos(searchField);
var top = getYPos(searchField);
top += searchField.offsetHeight;
// show search selection popup
searchSelectWindow.style.display='block';
searchSelectWindow.style.left = left + 'px';
searchSelectWindow.style.top = top + 'px';
}
// stop selection hide timer
if (this.hideTimeout)
{
clearTimeout(this.hideTimeout);
this.hideTimeout=0;
}
return false; // to avoid "image drag" default event
}
this.OnSearchSelectHide = function()
{
this.hideTimeout = setTimeout(this.name +".CloseSelectionWindow()",
this.closeSelectionTimeout);
}
// Called when the content of the search field is changed.
this.OnSearchFieldChange = function(evt)
{
if (this.keyTimeout) // kill running timer
{
clearTimeout(this.keyTimeout);
this.keyTimeout = 0;
}
var e = (evt) ? evt : window.event; // for IE
if (e.keyCode==40 || e.keyCode==13)
{
if (e.shiftKey==1)
{
this.OnSearchSelectShow();
var win=this.DOMSearchSelectWindow();
for (i=0;i<win.childNodes.length;i++)
{
var child = win.childNodes[i]; // get span within a
if (child.className=='SelectItem')
{
child.focus();
return;
}
}
return;
}
else if (window.frames.MSearchResults.searchResults)
{
var elem = window.frames.MSearchResults.searchResults.NavNext(0);
if (elem) elem.focus();
}
}
else if (e.keyCode==27) // Escape out of the search field
{
this.DOMSearchField().blur();
this.DOMPopupSearchResultsWindow().style.display = 'none';
this.DOMSearchClose().style.display = 'none';
this.lastSearchValue = '';
this.Activate(false);
return;
}
// strip whitespaces
var searchValue = this.DOMSearchField().value.replace(/ +/g, "");
if (searchValue != this.lastSearchValue) // search value has changed
{
if (searchValue != "") // non-empty search
{
// set timer for search update
this.keyTimeout = setTimeout(this.name + '.Search()',
this.keyTimeoutLength);
}
else // empty search field
{
this.DOMPopupSearchResultsWindow().style.display = 'none';
this.DOMSearchClose().style.display = 'none';
this.lastSearchValue = '';
}
}
}
this.SelectItemCount = function(id)
{
var count=0;
var win=this.DOMSearchSelectWindow();
for (i=0;i<win.childNodes.length;i++)
{
var child = win.childNodes[i]; // get span within a
if (child.className=='SelectItem')
{
count++;
}
}
return count;
}
this.SelectItemSet = function(id)
{
var i,j=0;
var win=this.DOMSearchSelectWindow();
for (i=0;i<win.childNodes.length;i++)
{
var child = win.childNodes[i]; // get span within a
if (child.className=='SelectItem')
{
var node = child.firstChild;
if (j==id)
{
node.innerHTML='•';
}
else
{
node.innerHTML=' ';
}
j++;
}
}
}
// Called when an search filter selection is made.
// set item with index id as the active item
this.OnSelectItem = function(id)
{
this.searchIndex = id;
this.SelectItemSet(id);
var searchValue = this.DOMSearchField().value.replace(/ +/g, "");
if (searchValue!="" && this.searchActive) // something was found -> do a search
{
this.Search();
}
}
this.OnSearchSelectKey = function(evt)
{
var e = (evt) ? evt : window.event; // for IE
if (e.keyCode==40 && this.searchIndex<this.SelectItemCount()) // Down
{
this.searchIndex++;
this.OnSelectItem(this.searchIndex);
}
else if (e.keyCode==38 && this.searchIndex>0) // Up
{
this.searchIndex--;
this.OnSelectItem(this.searchIndex);
}
else if (e.keyCode==13 || e.keyCode==27)
{
this.OnSelectItem(this.searchIndex);
this.CloseSelectionWindow();
this.DOMSearchField().focus();
}
return false;
}
// --------- Actions
// Closes the results window.
this.CloseResultsWindow = function()
{
this.DOMPopupSearchResultsWindow().style.display = 'none';
this.DOMSearchClose().style.display = 'none';
this.Activate(false);
}
this.CloseSelectionWindow = function()
{
this.DOMSearchSelectWindow().style.display = 'none';
}
// Performs a search.
this.Search = function()
{
this.keyTimeout = 0;
// strip leading whitespace
var searchValue = this.DOMSearchField().value.replace(/^ +/, "");
var code = searchValue.toLowerCase().charCodeAt(0);
var hexCode;
if (code<16)
{
hexCode="0"+code.toString(16);
}
else
{
hexCode=code.toString(16);
}
var resultsPage;
var resultsPageWithSearch;
var hasResultsPage;
if (indexSectionsWithContent[this.searchIndex].charAt(code) == '1')
{
resultsPage = this.resultsPath + '/' + indexSectionNames[this.searchIndex] + '_' + hexCode + '.html';
resultsPageWithSearch = resultsPage+'?'+escape(searchValue);
hasResultsPage = true;
}
else // nothing available for this search term
{
resultsPage = this.resultsPath + '/nomatches.html';
resultsPageWithSearch = resultsPage;
hasResultsPage = false;
}
window.frames.MSearchResults.location = resultsPageWithSearch;
var domPopupSearchResultsWindow = this.DOMPopupSearchResultsWindow();
if (domPopupSearchResultsWindow.style.display!='block')
{
var domSearchBox = this.DOMSearchBox();
this.DOMSearchClose().style.display = 'inline';
if (this.insideFrame)
{
var domPopupSearchResults = this.DOMPopupSearchResults();
domPopupSearchResultsWindow.style.position = 'relative';
domPopupSearchResultsWindow.style.display = 'block';
var width = document.body.clientWidth - 8; // the -8 is for IE :-(
domPopupSearchResultsWindow.style.width = width + 'px';
domPopupSearchResults.style.width = width + 'px';
}
else
{
var domPopupSearchResults = this.DOMPopupSearchResults();
var left = getXPos(domSearchBox) + 150; // domSearchBox.offsetWidth;
var top = getYPos(domSearchBox) + 20; // domSearchBox.offsetHeight + 1;
domPopupSearchResultsWindow.style.display = 'block';
left -= domPopupSearchResults.offsetWidth;
domPopupSearchResultsWindow.style.top = top + 'px';
domPopupSearchResultsWindow.style.left = left + 'px';
}
}
this.lastSearchValue = searchValue;
this.lastResultsPage = resultsPage;
}
// -------- Activation Functions
// Activates or deactivates the search panel, resetting things to
// their default values if necessary.
this.Activate = function(isActive)
{
if (isActive || // open it
this.DOMPopupSearchResultsWindow().style.display == 'block'
)
{
this.DOMSearchBox().className = 'MSearchBoxActive';
var searchField = this.DOMSearchField();
if (searchField.value == this.searchLabel) // clear "Search" term upon entry
{
searchField.value = '';
this.searchActive = true;
}
}
else if (!isActive) // directly remove the panel
{
this.DOMSearchBox().className = 'MSearchBoxInactive';
this.DOMSearchField().value = this.searchLabel;
this.searchActive = false;
this.lastSearchValue = ''
this.lastResultsPage = '';
}
}
}
// -----------------------------------------------------------------------
// The class that handles everything on the search results page.
function SearchResults(name)
{
// The number of matches from the last run of <Search()>.
this.lastMatchCount = 0;
this.lastKey = 0;
this.repeatOn = false;
// Toggles the visibility of the passed element ID.
this.FindChildElement = function(id)
{
var parentElement = document.getElementById(id);
var element = parentElement.firstChild;
while (element && element!=parentElement)
{
if (element.nodeName == 'DIV' && element.className == 'SRChildren')
{
return element;
}
if (element.nodeName == 'DIV' && element.hasChildNodes())
{
element = element.firstChild;
}
else if (element.nextSibling)
{
element = element.nextSibling;
}
else
{
do
{
element = element.parentNode;
}
while (element && element!=parentElement && !element.nextSibling);
if (element && element!=parentElement)
{
element = element.nextSibling;
}
}
}
}
this.Toggle = function(id)
{
var element = this.FindChildElement(id);
if (element)
{
if (element.style.display == 'block')
{
element.style.display = 'none';
}
else
{
element.style.display = 'block';
}
}
}
// Searches for the passed string. If there is no parameter,
// it takes it from the URL query.
//
// Always returns true, since other documents may try to call it
// and that may or may not be possible.
this.Search = function(search)
{
if (!search) // get search word from URL
{
search = window.location.search;
search = search.substring(1); // Remove the leading '?'
search = unescape(search);
}
search = search.replace(/^ +/, ""); // strip leading spaces
search = search.replace(/ +$/, ""); // strip trailing spaces
search = search.toLowerCase();
search = convertToId(search);
var resultRows = document.getElementsByTagName("div");
var matches = 0;
var i = 0;
while (i < resultRows.length)
{
var row = resultRows.item(i);
if (row.className == "SRResult")
{
var rowMatchName = row.id.toLowerCase();
rowMatchName = rowMatchName.replace(/^sr\d*_/, ''); // strip 'sr123_'
if (search.length<=rowMatchName.length &&
rowMatchName.substr(0, search.length)==search)
{
row.style.display = 'block';
matches++;
}
else
{
row.style.display = 'none';
}
}
i++;
}
document.getElementById("Searching").style.display='none';
if (matches == 0) // no results
{
document.getElementById("NoMatches").style.display='block';
}
else // at least one result
{
document.getElementById("NoMatches").style.display='none';
}
this.lastMatchCount = matches;
return true;
}
// return the first item with index index or higher that is visible
this.NavNext = function(index)
{
var focusItem;
while (1)
{
var focusName = 'Item'+index;
focusItem = document.getElementById(focusName);
if (focusItem && focusItem.parentNode.parentNode.style.display=='block')
{
break;
}
else if (!focusItem) // last element
{
break;
}
focusItem=null;
index++;
}
return focusItem;
}
this.NavPrev = function(index)
{
var focusItem;
while (1)
{
var focusName = 'Item'+index;
focusItem = document.getElementById(focusName);
if (focusItem && focusItem.parentNode.parentNode.style.display=='block')
{
break;
}
else if (!focusItem) // last element
{
break;
}
focusItem=null;
index--;
}
return focusItem;
}
this.ProcessKeys = function(e)
{
if (e.type == "keydown")
{
this.repeatOn = false;
this.lastKey = e.keyCode;
}
else if (e.type == "keypress")
{
if (!this.repeatOn)
{
if (this.lastKey) this.repeatOn = true;
return false; // ignore first keypress after keydown
}
}
else if (e.type == "keyup")
{
this.lastKey = 0;
this.repeatOn = false;
}
return this.lastKey!=0;
}
this.Nav = function(evt,itemIndex)
{
var e = (evt) ? evt : window.event; // for IE
if (e.keyCode==13) return true;
if (!this.ProcessKeys(e)) return false;
if (this.lastKey==38) // Up
{
var newIndex = itemIndex-1;
var focusItem = this.NavPrev(newIndex);
if (focusItem)
{
var child = this.FindChildElement(focusItem.parentNode.parentNode.id);
if (child && child.style.display == 'block') // children visible
{
var n=0;
var tmpElem;
while (1) // search for last child
{
tmpElem = document.getElementById('Item'+newIndex+'_c'+n);
if (tmpElem)
{
focusItem = tmpElem;
}
else // found it!
{
break;
}
n++;
}
}
}
if (focusItem)
{
focusItem.focus();
}
else // return focus to search field
{
parent.document.getElementById("MSearchField").focus();
}
}
else if (this.lastKey==40) // Down
{
var newIndex = itemIndex+1;
var focusItem;
var item = document.getElementById('Item'+itemIndex);
var elem = this.FindChildElement(item.parentNode.parentNode.id);
if (elem && elem.style.display == 'block') // children visible
{
focusItem = document.getElementById('Item'+itemIndex+'_c0');
}
if (!focusItem) focusItem = this.NavNext(newIndex);
if (focusItem) focusItem.focus();
}
else if (this.lastKey==39) // Right
{
var item = document.getElementById('Item'+itemIndex);
var elem = this.FindChildElement(item.parentNode.parentNode.id);
if (elem) elem.style.display = 'block';
}
else if (this.lastKey==37) // Left
{
var item = document.getElementById('Item'+itemIndex);
var elem = this.FindChildElement(item.parentNode.parentNode.id);
if (elem) elem.style.display = 'none';
}
else if (this.lastKey==27) // Escape
{
parent.searchBox.CloseResultsWindow();
parent.document.getElementById("MSearchField").focus();
}
else if (this.lastKey==13) // Enter
{
return true;
}
return false;
}
this.NavChild = function(evt,itemIndex,childIndex)
{
var e = (evt) ? evt : window.event; // for IE
if (e.keyCode==13) return true;
if (!this.ProcessKeys(e)) return false;
if (this.lastKey==38) // Up
{
if (childIndex>0)
{
var newIndex = childIndex-1;
document.getElementById('Item'+itemIndex+'_c'+newIndex).focus();
}
else // already at first child, jump to parent
{
document.getElementById('Item'+itemIndex).focus();
}
}
else if (this.lastKey==40) // Down
{
var newIndex = childIndex+1;
var elem = document.getElementById('Item'+itemIndex+'_c'+newIndex);
if (!elem) // last child, jump to parent next parent
{
elem = this.NavNext(itemIndex+1);
}
if (elem)
{
elem.focus();
}
}
else if (this.lastKey==27) // Escape
{
parent.searchBox.CloseResultsWindow();
parent.document.getElementById("MSearchField").focus();
}
else if (this.lastKey==13) // Enter
{
return true;
}
return false;
}
}
function setKeyActions(elem,action)
{
elem.setAttribute('onkeydown',action);
elem.setAttribute('onkeypress',action);
elem.setAttribute('onkeyup',action);
}
function setClassAttr(elem,attr)
{
elem.setAttribute('class',attr);
elem.setAttribute('className',attr);
}
function createResults()
{
var results = document.getElementById("SRResults");
for (var e=0; e<searchData.length; e++)
{
var id = searchData[e][0];
var srResult = document.createElement('div');
srResult.setAttribute('id','SR_'+id);
setClassAttr(srResult,'SRResult');
var srEntry = document.createElement('div');
setClassAttr(srEntry,'SREntry');
var srLink = document.createElement('a');
srLink.setAttribute('id','Item'+e);
setKeyActions(srLink,'return searchResults.Nav(event,'+e+')');
setClassAttr(srLink,'SRSymbol');
srLink.innerHTML = searchData[e][1][0];
srEntry.appendChild(srLink);
if (searchData[e][1].length==2) // single result
{
srLink.setAttribute('href',searchData[e][1][1][0]);
if (searchData[e][1][1][1])
{
srLink.setAttribute('target','_parent');
}
var srScope = document.createElement('span');
setClassAttr(srScope,'SRScope');<|fim▁hole|> else // multiple results
{
srLink.setAttribute('href','javascript:searchResults.Toggle("SR_'+id+'")');
var srChildren = document.createElement('div');
setClassAttr(srChildren,'SRChildren');
for (var c=0; c<searchData[e][1].length-1; c++)
{
var srChild = document.createElement('a');
srChild.setAttribute('id','Item'+e+'_c'+c);
setKeyActions(srChild,'return searchResults.NavChild(event,'+e+','+c+')');
setClassAttr(srChild,'SRScope');
srChild.setAttribute('href',searchData[e][1][c+1][0]);
if (searchData[e][1][c+1][1])
{
srChild.setAttribute('target','_parent');
}
srChild.innerHTML = searchData[e][1][c+1][2];
srChildren.appendChild(srChild);
}
srEntry.appendChild(srChildren);
}
srResult.appendChild(srEntry);
results.appendChild(srResult);
}
}<|fim▁end|>
|
srScope.innerHTML = searchData[e][1][1][2];
srEntry.appendChild(srScope);
}
|
<|file_name|>shell_config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance<|fim▁hole|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import textwrap
LOG = logging.getLogger(__name__)
def do_get_shell_config(self, config_name):
rows = []
headers = ["KEY", "VAlUE"]
if not config_name:
for i in self.global_shell_config.items():
rows.append([i[0], i[1]])
else:
if config_name in self.global_shell_config.keys():
rows.append([config_name, self.global_shell_config[config_name]])
self.generate_output(headers, rows)
def do_set_shell_config(self, config=None):
kv = config.split(" ")
if len(kv) != 2:
self.help_set_shell_config()
return
config_name = kv[0]
config_value = kv[1]
if config_name in self.global_shell_config.keys():
self.global_shell_config[config_name] = config_value
self.do_get_shell_config(config_name=None)
def help_get_shell_config(self):
print textwrap.dedent("""
Usage:
> get_shell_config <config_name> get all shell config
""")
def help_set_shell_config(self):
print textwrap.dedent("""
Usage:
> set_shell_config <config_name> <config_value> sets shell config
""")
def complete_get_shell_config(self, pattern, line, start_index, end_index):
if pattern:
return [
c for c in self.global_shell_config.keys() if c.startswith(pattern)]
else:
return self.CLUSTERS
def complete_set_shell_config(self, pattern, line, start_index, end_index):
if pattern:
return [
c for c in self.global_shell_config.keys() if c.startswith(pattern)]
else:
return self.CLUSTERS<|fim▁end|>
|
# with the License. You may obtain a copy of the License at
|
<|file_name|>server.py<|end_file_name|><|fim▁begin|>import urllib
from flask import Flask, Response, abort, request, send_file
from flask_restful import Resource, Api
from flask_cors import CORS, cross_origin
import datetime
import json
import vacker.file_factory<|fim▁hole|>
app = Flask(__name__)
api = Api(app)
file_factory = vacker.file_factory.FileFactory()
CORS(app, resources={"*": {"origins": "*"}})
class Stats(Resource):
def get(self, query_string):
pass
class Search(Resource):
def get(self):
res = file_factory.query_files(
query_string=request.args.get('q', ''),
start=request.args.get('start', 0),
limit=request.args.get('limit', 10),
sort=request.args.get('sort_field', None),
sort_dir=('asc' if request.args.get('sort_order', '1') == '1' else 'desc'))
for file_ in res['files']:
file_['blob_url'] = '/blob/' + urllib.parse.quote(file_['id'])
return {
'data': [file_ for file_ in res['files']],
'recordsTotal': res['total_results'],
'recordsFiltered': res['total_results']
}
class Blob(Resource):
def get(self, file_id):
file_ = file_factory.get_file_by_id(file_id)
parent = file_.get('a_parent_archive')
if parent:
return send_file(parent)
return send_file(file_.get_path())
# Year API
api.add_resource(Stats, '/stats/<string:query_string>')
api.add_resource(Search, '/search')
#api.add_resource(Blob, '/blob/<string:file_id>')<|fim▁end|>
| |
<|file_name|>example_selfcollector_test.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package prometheus_test
import (
"runtime"
"github.com/coreos/etcd/Godeps/_workspace/src/code.google.com/p/goprotobuf/proto"
"github.com/coreos/etcd/Godeps/_workspace/src/github.com/prometheus/client_golang/prometheus"
dto "github.com/coreos/etcd/Godeps/_workspace/src/github.com/prometheus/client_model/go"
)
func NewCallbackMetric(desc *prometheus.Desc, callback func() float64) *CallbackMetric {
result := &CallbackMetric{desc: desc, callback: callback}
result.Init(result) // Initialize the SelfCollector.
return result
}
// TODO: Come up with a better example.
// CallbackMetric is an example for a user-defined Metric that exports the
// result of a function call as a metric of type "untyped" without any
// labels. It uses SelfCollector to turn the Metric into a Collector so that it
// can be registered with Prometheus.
//
// Note that this example is pretty much academic as the prometheus package
// already provides an UntypedFunc type.
type CallbackMetric struct {
prometheus.SelfCollector
desc *prometheus.Desc
callback func() float64
}
func (cm *CallbackMetric) Desc() *prometheus.Desc {
return cm.desc
}
func (cm *CallbackMetric) Write(m *dto.Metric) error {
m.Untyped = &dto.Untyped{Value: proto.Float64(cm.callback())}
return nil
}
func ExampleSelfCollector() {
m := NewCallbackMetric(
prometheus.NewDesc(
"runtime_goroutines_count",
"Total number of goroutines that currently exist.",
nil, nil, // No labels, these must be nil.
),
func() float64 {
return float64(runtime.NumGoroutine())
},
)
prometheus.MustRegister(m)
}<|fim▁end|>
|
// Copyright 2014 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
|
<|file_name|>mouse-push-button.js<|end_file_name|><|fim▁begin|>/*
* default/mouse-push-button.js
*/
"use strict";
var Q = require('q'),
Button = require('./../../button');
var MousePushButton = function (options) {
Button.prototype.constructor.call(this, options);
this.delay = options.delay > 0 ? options.delay : 0;
this.g = null;
if(typeof options.g === 'function') this.g = options.g;
this.promisef = null;
this.boundaries = { minX : 0, maxX : 0, minY : 0, maxY : 0 };
this.leftOrEnded = false;
};
MousePushButton.prototype = (function (proto) {
function F() {};
F.prototype = proto;
return new F();
})(Button.prototype);
MousePushButton.prototype.constructor = MousePushButton;
MousePushButton.prototype.setG = function (g) {
if (typeof g !== 'function')
throw new Error("Button setG method needs a g function as argument.");
this.g = g;
return this;
};
MousePushButton.prototype._isInActiveZone = function (touch) {
var x = touch.clientX,
y = touch.clientY,
b = this.boundaries;
return x < b.maxX && x > b.minX && y < b.maxY && y > b.minY;
};
MousePushButton.prototype.bind = function () {
Button.prototype.bind.call(this);
this.el.addEventListener('mousedown', this, false);
this.binded = true;
return this;
};
MousePushButton.prototype.unbind = function () {
Button.prototype.unbind.call(this);
this.el.removeEventListener('mousedown', this, false);
this.binded = false;
return this;
};
MousePushButton.prototype.handleEvent = function (evt) {
switch (evt.type) {
case 'mousedown':
this.onMousedown(evt);
break;
case 'mousemove':
this.onMousemove(evt);
break;
case 'mouseup':
this.onMouseup(evt);
break;
}
};
MousePushButton.prototype.onMousedown = function (evt) {
if (!this.active) {
if (evt.button === 0) {
evt.preventDefault();
this.setActive(true);
var boundingRect = this.el.getBoundingClientRect();
this.boundaries.minX = boundingRect.left;
this.boundaries.maxX = boundingRect.left + boundingRect.width;
this.boundaries.minY = boundingRect.top;
this.boundaries.maxY = boundingRect.bottom;
this.el.ownerDocument.addEventListener('mousemove', this, false);
this.el.ownerDocument.addEventListener('mouseup', this, false);
this.promisef = Q.delay(evt, this.delay).then(this.f);
}
}
};
MousePushButton.prototype.onMousemove = function (evt) {
if(this.active && !this.leftOrEnded) {
evt.preventDefault();
if (!this._isInActiveZone(evt))
this.onMouseup(evt);
}
};
MousePushButton.prototype.onMouseup = function (evt) {
if(this.active && !this.leftOrEnded) {
this._removeCls();
this.leftOrEnded = true;
this.promisef
.then(evt)
.then(this.g)
.finally(this._done(evt))
.done();<|fim▁hole|> }
};
MousePushButton.prototype._done = function (evt) {
var btn = this;
return function () {
btn.setActive(false);
btn.leftOrEnded = false;
btn.el.ownerDocument.removeEventListener('mousemove', btn, false);
btn.el.ownerDocument.removeEventListener('mouseup', btn, false);
};
};
module.exports = MousePushButton;<|fim▁end|>
| |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Pitot - a customizable aviation information receiver
// Copyright (C) 2017-2018 Datong Sun ([email protected])
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or<|fim▁hole|>// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#[macro_use]
extern crate log;
extern crate chrono;
extern crate env_logger;
extern crate serial;
#[macro_use]
extern crate nom;
extern crate serde_json;
extern crate time;
#[macro_use]
extern crate serde_derive;
extern crate i2cdev_bmp280;
extern crate i2csensors;
extern crate i2cdev;
extern crate icmp;
extern crate inotify;
extern crate libc;
extern crate ws;
#[macro_use]
mod utils;
mod pitot;
mod processor;
mod protocol;
mod sensor;
mod transport;
use pitot::Pitot;
use processor::Processor;
use sensor::Sensor;
fn main() {
env_logger::init().unwrap();
let mut p = Pitot::new(10); // 10 Hz
sensor::gnss::ublox::UbloxGNSSProvider::new().and_then(&mut |g| {
p.link_sensor(g);
Some(())
});
sensor::barometer::bmp280::BMP280BaroProvider::new().and_then(&mut |b| {
p.link_sensor(b);
Some(())
});
sensor::sdr::es::ES::new().and_then(&mut |e| {
p.link_sensor(Box::new(e) as Box<Sensor>);
Some(())
});
sensor::sdr::uat::UAT::new().and_then(&mut |e| {
p.link_sensor(Box::new(e) as Box<Sensor>);
Some(())
});
p.link_processor(processor::ownship::Ownship::new());
p.link_processor(Box::new(processor::clock::Clock::new()) as Box<Processor>);
p.link_processor(Box::new(processor::traffic::Traffic::new()) as Box<Processor>);
p.link_processor(Box::new(processor::fisb::FISB::new()) as Box<Processor>);
p.link_processor(Box::new(processor::gnss::GNSS::new()) as Box<Processor>);
p.link_protocol(protocol::gdl90::GDL90::new());
p.link_protocol(protocol::websocket::WebSocket::new(
"0.0.0.0:9001".to_string(),
));
p.link_transport(transport::udp::UDP::new());
p.run();
}<|fim▁end|>
|
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
|
<|file_name|>task.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Basic multitasking interface.
use core::mem::size_of;
use core::intrinsics::abort;
use hal::cortex_m3::{sched, systick};
use hal::cortex_m3::sched::NoInterrupts;
use os::syscall::syscall;
use hal::stack;
/// Task takes one argument, which is u32.
pub type Task = fn(u32);
mod current_stack_offset {
/// Currently allocated stack memory, growing down, starting at __STACK_BASE.
static mut CurrentStackOffset: u32 = 0;
pub fn get() -> u32 {
unsafe { CurrentStackOffset }
}
pub fn set(val: u32) {
unsafe { CurrentStackOffset = val };
}
}
/// Bytes to reserve in privileged stack based on stack size at the time of task::setup() call.
static ReservedPivilegedStackSize: u32 = 256;
/// Maximum number of tasks.
static MaxTasksCount: uint = 4;
mod defined_tasks_count {
use core::intrinsics::abort;
/// Total defined tasks count.
static mut DefinedTasksCount: uint = 0;
pub fn get() -> uint {
unsafe { DefinedTasksCount }
}
pub fn increase() {
unsafe {
DefinedTasksCount += 1;
if DefinedTasksCount > super::MaxTasksCount {
abort();
}
}
}
}
pub enum Status {
Runnable,
Blocked
}
/// Task descriptor, provides task stack pointer.
pub struct TaskDescriptor {
pub stack_start: u32,
pub stack_end: u32,
pub status: Status
}
impl TaskDescriptor {
pub fn block(&mut self, _: NoInterrupts) {
self.status = Blocked;
sched::switch_context();
}
pub fn unblock(&mut self, _: &NoInterrupts) { self.status = Runnable; }
}
struct TasksCollection {
pub current_task: uint,
pub tasks: [TaskDescriptor, ..MaxTasksCount],
}
<|fim▁hole|>pub static mut Tasks: TasksCollection = TasksCollection {
current_task: 0,
tasks: [TaskDescriptor { stack_start: 0, stack_end: 0, status: Runnable }, ..MaxTasksCount]
};
impl TasksCollection {
pub fn current_task<'a>(&'a mut self) -> &'a mut TaskDescriptor {
&mut self.tasks[self.current_task]
}
fn next_task(&mut self) {
loop {
self.current_task += 1;
if self.current_task == defined_tasks_count::get() {
self.current_task = 0;
}
match self.current_task() {
&task if !task.valid() => {}
&TaskDescriptor {status: Runnable, ..} => break,
_ => {}
}
}
}
fn add_task(&mut self, t: TaskDescriptor) {
self.tasks[defined_tasks_count::get()] = t;
defined_tasks_count::increase();
}
}
/// Initialize and start task manager.
///
/// This function keeps main stack intact. It starts the task scheduler and
/// never returns.
///
/// t should point to initial task.
#[inline(never)]
pub fn setup(t: Task, stack_size: u32) {
systick::setup(::hal::cortex_m3::systick::CALIBRATED, true);
let current_stack = sched::get_current_stack_pointer();
// User tasks start at this current stack size + reserved size aligned by 4
// bytes.
let task_stack_base: u32 = (current_stack as u32 - ReservedPivilegedStackSize) & !3;
current_stack_offset::set(task_stack_base);
let td = define_task(t, 0, stack_size, true);
td.load();
systick::enable();
sched::switch_context();
unsafe { abort() };
}
#[inline(never)]
pub fn define_task(t: Task, arg: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
systick::disable_irq();
let task_base = current_stack_offset::get();
let task_stack_size: u32 = (
stack_size +
8*4 + // hw saved regs
8*4 + // sw saved regs
8*4 // scratch pad for __morestack failure. see note on morestack below.
) & !0b1111;
current_stack_offset::set(task_base - task_stack_size);
let td = TaskDescriptor::new(t, arg, task_base, stack_size, initial);
unsafe { Tasks.add_task(td) };
systick::enable_irq();
td
}
impl TaskDescriptor {
/// Creates a new TaskDescriptor for given task, arg and stack base.
///
/// This function initializes task stack with hw saved registers.
#[inline(never)]
pub fn new(t: Task, arg: u32, stack_base: u32, stack_size: u32, initial: bool) -> TaskDescriptor {
let state = sched::SavedState::new(t, arg);
let mut stack_top: u32 = stack_base - size_of::<sched::SavedState>() as u32;
unsafe { *(stack_top as *mut sched::SavedState) = state };
if !initial {
stack_top -= 8*4;
}
TaskDescriptor {
stack_start: stack_top,
stack_end: stack_base - stack_size,
status: Runnable,
}
}
pub fn load(&self) {
sched::set_task_stack_pointer(self.stack_start);
stack::set_stack_limit(self.stack_end);
}
pub fn save(&mut self) {
self.stack_start = sched::get_task_stack_pointer();
}
pub fn valid(&self) -> bool {
self.stack_end != 0
}
pub fn invalidate(&mut self) {
self.stack_end = 0;
}
}
#[inline(always)]
pub unsafe fn task_scheduler() {
stack::set_stack_limit(stack::stack_base() - ReservedPivilegedStackSize);
Tasks.current_task().save();
Tasks.next_task();
Tasks.current_task().load();
}
// TODO(farcaller): this should not actually use stack!
// At the time of the call of syscall(), the stack is overflown by 4, we still
// have 12 bytes in reserve and 2*8*4 to save the frame in pendsv after kill.
#[no_split_stack]
pub fn morestack() {
let psp = sched::get_task_stack_pointer();
let sp = sched::get_current_stack_pointer();
if psp == sp {
unsafe { syscall(kill_current_task, 0) };
} else {
unsafe { abort() };
}
}
#[inline(never)]
#[no_mangle]
#[no_split_stack]
pub fn kill_current_task(_: u32) {
unsafe { Tasks.current_task().invalidate() };
sched::switch_context();
}<|fim▁end|>
| |
<|file_name|>protomol_functions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env cctools_python
# CCTOOLS_PYTHON_VERSION 2.7 2.6
import random
import math
#-------------------------------Constants-----------------------------------
DEFAULT_MONTE_CARLO_STEPS = 100
DEFAULT_OUTPUT_PATH = '.'
DEFAULT_MDSTEPS = 10000
DEFAULT_BOUNDARY_CONDITIONS = "Vacuum"
DEFAULT_OUTPUT_FREQ = 10000
DEFAULT_PHYSICAL_TEMP = 300
EXECUTABLE = "ProtoMol"
#-----------------------------Global Data-------------------------------------
pdb_file = ""
psf_file = ""
par_file = ""
xyz_file_name = ""
dcd_file_name = ""
boundary_conditions = DEFAULT_BOUNDARY_CONDITIONS
monte_carlo_steps = DEFAULT_MONTE_CARLO_STEPS
md_steps = DEFAULT_MDSTEPS
output_freq = DEFAULT_OUTPUT_FREQ
output_path = DEFAULT_OUTPUT_PATH
replica_list = []
#------------------------Initialize random generator----------------------------
random.seed()
#-------------------------Global functions---------------------------------
#Function to parse the file name from a string holding its location.
def parse_file_name(file_name):
split_name = file_name.split('/')
return split_name[len(split_name)-1]
#Function to parse the file name and leave out its extension.
def remove_trailing_dots(file_name):
split_name = file_name.split('.')
return split_name[0]
#-------------------------Define Replica Object---------------------------------
class Replica(object):
def __init__(self, id, temp):
self.id = id
self.temp = temp
self.exchgd_replica_id = -1
self.potential_energy = None
self.prev_temp = None
self.exch_steps = []
self.running = 0
self.last_seen_step = -1
def __str__(self):
return "Replica %d at temp %f" % (self.id, self.temp)
#Function to generate a config file to send to workqueue. It returns the generated config file name.
def generate_config(output_path, pdb_file, psf_file, par_file, monte_carlo_step, md_steps, output_freq, replica_obj, generate_xyz = False, generate_dcd = False):
#initialize the config file name based on the replica id.
cfg_file_name = "%s/%s/%s/%d/%d-%d.cfg" % ( output_path, "simfiles", "config", replica_obj.id, replica_obj.id, monte_carlo_step)
cfg_file_stream = open(cfg_file_name, "w")
#initialize string that will hold the config file values
write_str = ""
#Parse supplied files so only actual file name is passed, not full path of the file name
input_pdb = "%s.%d-%d.pdb" % (remove_trailing_dots(parse_file_name(pdb_file)), replica_obj.id, monte_carlo_step)
parsed_psf_file = parse_file_name(psf_file)
parsed_par_file = parse_file_name(par_file)
#Start writing the config file parameters and values
write_str += "randomtype 1\n"
write_str += "numsteps %d\n" % md_steps
write_str += "outputfreq %d\n" % output_freq
write_str += "posfile %s\n" % input_pdb
write_str += "psffile %s\n" % parsed_psf_file
write_str += "parfile %s\n" % parsed_par_file
if monte_carlo_step > 0:
write_str += "velfile %s.%d-%d.vel\n" % (remove_trailing_dots(parse_file_name(pdb_file)), replica_obj.id, monte_carlo_step)
write_str += "dofinPDBPosFile true\n"
write_str += "finPDBPosFile %s.%d-%d.pdb\n" % (remove_trailing_dots(parse_file_name(pdb_file)), replica_obj.id, monte_carlo_step+1)
write_str += "finXYZVelFile %s.%d-%d.vel\n" % (remove_trailing_dots(parse_file_name(pdb_file)), replica_obj.id, monte_carlo_step+1)
write_str += "temperature %f\n" % replica_obj.temp
write_str += "boundaryConditions %s\n" % boundary_conditions
write_str += "cellManager Cubic\n"
write_str += "cellsize 69\n"
if generate_xyz:
write_str += "XYZPosFile %d.xyz\n" % replica_obj.id
write_str += "XYZPosFileOutputFreq %d\n" % md_steps
if generate_dcd:
write_str += "DCDFile %d.dcd\n" % replica_obj.id
write_str += "DCDFileOutputFreq %d\n" % output_freq
write_str += "allEnergiesFile %d.eng\n" % replica_obj.id
write_str += "allEnergiesFileOutputFreq %d\n" % output_freq
write_str += "seed %d\n" % random.randint(1, 1000000)
write_str += "shake on\n"
write_str += "integrator {\n"
write_str += "level 0 langevinImpulse {\n"
write_str += "temperature %f\n" % replica_obj.temp<|fim▁hole|> write_str += "force angle\n"
write_str += "force dihedral\n"
write_str += "force improper\n"
write_str += "force LennardJones Coulomb\n"
write_str += " -switchingFunction C2 -switchingFunction C1 -algorithm NonbondedCutoff\n"
write_str += " -switchon 10\n"
write_str += " -cutoff 12\n"
write_str += " -cutoff 12\n"
write_str += " -cutoff 12\n"
write_str += "}\n}"
#Write to the config file
cfg_file_stream.write(write_str)
return cfg_file_name
#Function that to perform metropolis criteria check for two replicas.
def metropolis( u_i, u_j, t_i, t_j ):
# Metropolis for replica i with potential energy u_i, temp t_i
# and replica j with potential energy u_j, temp t_j
K_b = 0.001987191 #Constants.boltzmann()
deltaE = (1 / (K_b * t_i) - 1/ (K_b * t_j) ) - (u_j - u_i)
if( deltaE < 0 ):
return True
acceptProb = math.exp(-deltaE)
randNum = random.random()
if( randNum < acceptProb ):
return True
else:
return False<|fim▁end|>
|
write_str += "gamma 5\n"
write_str += "timestep 2\n"
write_str += "force bond\n"
|
<|file_name|>availability_zones.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the<|fim▁hole|>from novaclient.tests.unit.fixture_data import base
class V1(base.Fixture):
base_url = 'os-availability-zone'
zone_info_key = 'availabilityZoneInfo'
zone_name_key = 'zoneName'
zone_state_key = 'zoneState'
def setUp(self):
super(V1, self).setUp()
get_os_availability_zone = {
self.zone_info_key: [
{
self.zone_name_key: "zone-1",
self.zone_state_key: {"available": True},
"hosts": None
},
{
self.zone_name_key: "zone-2",
self.zone_state_key: {"available": False},
"hosts": None
}
]
}
self.requests.register_uri('GET', self.url(),
json=get_os_availability_zone,
headers=self.json_headers)
get_os_zone_detail = {
self.zone_info_key: [
{
self.zone_name_key: "zone-1",
self.zone_state_key: {"available": True},
"hosts": {
"fake_host-1": {
"nova-compute": {
"active": True,
"available": True,
"updated_at": '2012-12-26 14:45:25'
}
}
}
},
{
self.zone_name_key: "internal",
self.zone_state_key: {"available": True},
"hosts": {
"fake_host-1": {
"nova-sched": {
"active": True,
"available": True,
"updated_at": '2012-12-26 14:45:25'
}
},
"fake_host-2": {
"nova-network": {
"active": True,
"available": False,
"updated_at": '2012-12-26 14:45:24'
}
}
}
},
{
self.zone_name_key: "zone-2",
self.zone_state_key: {"available": False},
"hosts": None
}
]
}
self.requests.register_uri('GET', self.url('detail'),
json=get_os_zone_detail,
headers=self.json_headers)<|fim▁end|>
|
# License for the specific language governing permissions and limitations
# under the License.
|
<|file_name|>saturating_add_mul.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::signeds::PrimitiveSigned;
use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base_test_util::bench::bucketers::triple_max_bit_bucketer;
use malachite_base_test_util::bench::{run_benchmark, BenchmarkType};
use malachite_base_test_util::generators::common::{GenConfig, GenMode};
use malachite_base_test_util::generators::{signed_triple_gen, unsigned_triple_gen_var_19};
use malachite_base_test_util::runner::Runner;
pub(crate) fn register(runner: &mut Runner) {
register_unsigned_demos!(runner, demo_saturating_add_mul_unsigned);
register_signed_demos!(runner, demo_saturating_add_mul_signed);
register_unsigned_demos!(runner, demo_saturating_add_mul_assign_unsigned);
register_signed_demos!(runner, demo_saturating_add_mul_assign_signed);
register_unsigned_benches!(runner, benchmark_saturating_add_mul_unsigned);
register_signed_benches!(runner, benchmark_saturating_add_mul_signed);
register_unsigned_benches!(runner, benchmark_saturating_add_mul_assign_unsigned);
register_signed_benches!(runner, benchmark_saturating_add_mul_assign_signed);
}
fn demo_saturating_add_mul_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (x, y, z) in unsigned_triple_gen_var_19::<T>()
.get(gm, &config)
.take(limit)
{
println!(
"{}.saturating_add_mul({}, {}) = {}",
x,
y,
z,
x.saturating_add_mul(y, z)
);
}
}
fn demo_saturating_add_mul_signed<T: PrimitiveSigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (x, y, z) in signed_triple_gen::<T>().get(gm, &config).take(limit) {
println!(
"{}.saturating_add_mul({}, {}) = {}",
x,
y,
z,
x.saturating_add_mul(y, z)
);
}
}
fn demo_saturating_add_mul_assign_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (mut x, y, z) in unsigned_triple_gen_var_19::<T>()
.get(gm, &config)
.take(limit)
{
let old_x = x;
x.saturating_add_mul_assign(y, z);
println!(
"x := {}; x.saturating_add_mul_assign({}, {}); x = {}",
old_x, y, z, x
);
}
}
fn demo_saturating_add_mul_assign_signed<T: PrimitiveSigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
) {
for (mut x, y, z) in signed_triple_gen::<T>().get(gm, &config).take(limit) {
let old_x = x;
x.saturating_add_mul_assign(y, z);
println!(
"x := {}; x.saturating_add_mul_assign({}, {}); x = {}",
old_x, y, z, x
);
}
}
fn benchmark_saturating_add_mul_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.saturating_add_mul({}, {})", T::NAME, T::NAME, T::NAME),
BenchmarkType::Single,
unsigned_triple_gen_var_19::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(x, y, z)| {
no_out!(x.saturating_add_mul(y, z))
})],
);
}
fn benchmark_saturating_add_mul_signed<T: PrimitiveSigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!("{}.saturating_add_mul({}, {})", T::NAME, T::NAME, T::NAME),
BenchmarkType::Single,
signed_triple_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(x, y, z)| {
no_out!(x.saturating_add_mul(y, z))
})],
);
}
fn benchmark_saturating_add_mul_assign_unsigned<T: PrimitiveUnsigned>(
gm: GenMode,
config: GenConfig,<|fim▁hole|> run_benchmark(
&format!(
"{}.saturating_add_mul_assign({}, {})",
T::NAME,
T::NAME,
T::NAME
),
BenchmarkType::Single,
unsigned_triple_gen_var_19::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(mut x, y, z)| {
x.saturating_add_mul_assign(y, z)
})],
);
}
fn benchmark_saturating_add_mul_assign_signed<T: PrimitiveSigned>(
gm: GenMode,
config: GenConfig,
limit: usize,
file_name: &str,
) {
run_benchmark(
&format!(
"{}.saturating_add_mul_assign({}, {})",
T::NAME,
T::NAME,
T::NAME
),
BenchmarkType::Single,
signed_triple_gen::<T>().get(gm, &config),
gm.name(),
limit,
file_name,
&triple_max_bit_bucketer("x", "y", "z"),
&mut [("Malachite", &mut |(mut x, y, z)| {
x.saturating_add_mul_assign(y, z)
})],
);
}<|fim▁end|>
|
limit: usize,
file_name: &str,
) {
|
<|file_name|>331.py<|end_file_name|><|fim▁begin|>class Solution:
def isValidSerialization(self, preorder):
"""
:type preorder: str<|fim▁hole|> arr_pre_order = preorder.split(',')
stack = []
for node in arr_pre_order:
stack.append(node)
while len(stack) > 1 and stack[-1] == '#' and stack[-2] == '#':
stack.pop()
stack.pop()
if len(stack) < 1:
return False
stack[-1] = '#'
if len(stack) == 1 and stack[0] == '#':
return True
return False<|fim▁end|>
|
:rtype: bool
"""
|
<|file_name|>rainx-monitor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import urllib2
RAINX_STAT_KEYS = [
("rainx.reqpersec", "total_reqpersec"),
("rainx.reqputpersec", "put_reqpersec"),
("rainx.reqgetpersec", "get_reqpersec"),
("rainx.avreqtime", "total_avreqtime"),
("rainx.avputreqtime", "put_avreqtime"),
("rainx.avgetreqtime", "get_avreqtime"),
]
def parse_info(stream):
data = {}
for line in stream.readlines():
parts = line.split()
if len(parts) > 1:
# try to cast value to int or float
try:
value = int(parts[1])
except ValueError:
try:<|fim▁hole|> data[parts[0]] = value
else:
data[parts[0]] = None
return data
def get_stat_lines(url, stat_keys):
stream = urllib2.urlopen(url)
data = parse_info(stream)
stream.close()
stats = [("stat.%s = %s" % (k[1], str(data[k[0]])))
for k in stat_keys if k[0] in data]
return stats
def main(args):
ip_port = args[1].split("|")[2]
stats_url = "http://%s/stat" % ip_port
for stat in get_stat_lines(stats_url, RAINX_STAT_KEYS):
print stat
if __name__ == "__main__":
main(sys.argv)<|fim▁end|>
|
value = float(parts[1])
except ValueError:
value = parts[1]
|
<|file_name|>ActalogicApp.cpp<|end_file_name|><|fim▁begin|>#include "pch.h"
#include "ActalogicApp.h"
TCHAR ActalogicApp::m_szWindowClass[] = _T("Actalogic");
TCHAR ActalogicApp::m_szTitle[] = _T("Actalogic");
ActalogicApp::ActalogicApp():
m_hWnd(NULL),
m_hInstance(NULL),
m_d2d1Manager(),
m_entityFPS(),
m_entityDebugInfoLayer(),
m_entitySceneContainer(this),
m_inputHelper()
{
m_entityDebugInfoLayer.SetApp(this);
}
ActalogicApp::~ActalogicApp()
{
}
HRESULT ActalogicApp::Initialize(HINSTANCE hInstance, int nCmdShow)
{
HRESULT hresult;
hresult = m_d2d1Manager.CreateDeviceIndependentResources();
if (FAILED(hresult)) {return hresult;}
//TODO:±±ÉEntityÌfoCXñ˶Ìú»ðÇÁ
hresult = m_entityDebugInfoLayer.OnCreateDeviceIndependentResources(&m_d2d1Manager);
if (FAILED(hresult)) { return hresult; }
hresult = m_entityFPS.OnCreateDeviceIndependentResources(&m_d2d1Manager);
if (FAILED(hresult)) { return hresult; }
hresult = m_entitySceneContainer.OnCreateDeviceIndependentResources(&m_d2d1Manager);
if (FAILED(hresult)) { return hresult; }
m_hInstance = hInstance;
m_hWnd = InitializeWindow(hInstance, nCmdShow, 800.0F, 600.0F);
return m_hWnd==NULL ? E_FAIL : S_OK;
}
HWND ActalogicApp::InitializeWindow(HINSTANCE hInstance, int nCmdShow, FLOAT width, FLOAT height)
{
WNDCLASSEX wcex;
wcex.cbSize = sizeof(WNDCLASSEX);
wcex.style = CS_HREDRAW | CS_VREDRAW;
wcex.lpfnWndProc = ActalogicApp::WndProc;
wcex.cbClsExtra = 0;
wcex.cbWndExtra = 0;
wcex.hInstance = hInstance;
wcex.hIcon = LoadIcon(hInstance, MAKEINTRESOURCE(IDI_APPLICATION));
wcex.hCursor = LoadCursor(NULL, IDC_ARROW);
wcex.hbrBackground = (HBRUSH)(COLOR_WINDOW + 1);
wcex.lpszMenuName = NULL;
wcex.lpszClassName = m_szWindowClass;
wcex.hIconSm = LoadIcon(wcex.hInstance, MAKEINTRESOURCE(IDI_APPLICATION));
if (!RegisterClassEx(&wcex))
{
MessageBox(NULL,
_T("Call to RegisterClassEx failed!"),
m_szTitle,
NULL);
return NULL;
}
FLOAT dpiX, dpiY;
m_d2d1Manager.GetDesktopDpi(&dpiX, &dpiY);
UINT desktopWidth = static_cast<UINT>(ceil(width * dpiX / 96.f));
UINT desktopHeight = static_cast<UINT>(ceil(height * dpiY / 96.f));
HWND hWnd = CreateWindow(
m_szWindowClass,
m_szTitle,
WS_OVERLAPPEDWINDOW & ~WS_MAXIMIZEBOX & ~WS_SIZEBOX,
CW_USEDEFAULT, CW_USEDEFAULT,
desktopWidth, desktopHeight,
NULL,
NULL,
hInstance,
this
);
if (!hWnd)
{
MessageBox(NULL,
_T("Call to CreateWindow failed!"),
m_szTitle,
NULL);
return NULL;
}
SetClientSize(hWnd, desktopWidth, desktopHeight);
if (nCmdShow == SW_MAXIMIZE)
{
ShowWindow(hWnd, SW_RESTORE);
}
else
{
ShowWindow(hWnd, nCmdShow);
}
UpdateWindow(hWnd);
return hWnd;
}
int ActalogicApp::Run()
{
MSG msg;
for (;;)
{
if (PeekMessage(&msg, NULL, 0, 0, PM_NOREMOVE)) {
if (!GetMessage(&msg, NULL, 0, 0))
{
return msg.wParam;
}
TranslateMessage(&msg);
DispatchMessage(&msg);
}
else
{
if (m_isActive)
{
OnTick();
}
else
{
Sleep(1);
}
}
}
}
void ActalogicApp::Dispose()
{
//TODO:±±ÉEntityÌ\[XÌJúðÇÁ
m_entityDebugInfoLayer.OnDiscardAllResources();
m_entitySceneContainer.OnDiscardAllResources();
m_d2d1Manager.DiscardAllResources();
}
void ActalogicApp::Exit()
{
PostMessage(m_hWnd, WM_DESTROY, 0, 0);
}
///////////////////////////////////////////////////////////////////////////////
void ActalogicApp::OnTick()
{
m_inputHelper.OnTick();
OnPreRender();
OnRender();
OnPostRender();
}
void ActalogicApp::OnPreRender()
{
//TODO:±±É`æOÌðÇÁ
m_entityDebugInfoLayer.OnPreRender(&m_inputHelper);
m_entityFPS.OnPreRender(&m_inputHelper);
m_entitySceneContainer.OnPreRender(&m_inputHelper);
}
void ActalogicApp::OnRender()
{
HRESULT hresult = S_OK;
hresult = m_d2d1Manager.CreateDeviceResources(m_hWnd);
//TODO:±±ÉfoCX˶\[Xú»ðÇÁ
if (SUCCEEDED(hresult)){ m_entityDebugInfoLayer.OnCreateDeviceResources(&m_d2d1Manager); }
if (SUCCEEDED(hresult)){ m_entitySceneContainer.OnCreateDeviceResources(&m_d2d1Manager); }
if (SUCCEEDED(hresult))
{
m_d2d1Manager.BeginDraw();
//TODO:±±É`æðÇÁ
m_entitySceneContainer.OnRender(&m_d2d1Manager);
m_entityDebugInfoLayer.OnRender(&m_d2d1Manager);
hresult = m_d2d1Manager.EndDraw();
}
if (FAILED(hresult))
{<|fim▁hole|> //TODO:±±É\[XÌðúðÇÁ
m_entityDebugInfoLayer.OnDiscardDeviceResources();
m_entitySceneContainer.OnDiscardDeviceResources();
m_d2d1Manager.DiscardDeviceResources();
}
}
void ActalogicApp::OnPostRender()
{
//TODO:±±É`æOÌðÇÁ
m_entityDebugInfoLayer.OnPostRender();
m_entitySceneContainer.OnPostRender();
}
void ActalogicApp::OnResize(WORD width, WORD height, BOOL isActive)
{
m_isActive = isActive;
}
///////////////////////////////////////////////////////////////////////////////
LRESULT CALLBACK ActalogicApp::WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
if (message == WM_CREATE)
{
LPCREATESTRUCT pcs = (LPCREATESTRUCT)lParam;
ActalogicApp *pApp = (ActalogicApp *)pcs->lpCreateParams;
SetWindowLongPtrW(hWnd, GWLP_USERDATA, PtrToUlong(pApp));
}
else
{
ActalogicApp *pApp = reinterpret_cast<ActalogicApp *>(static_cast<LONG_PTR>(
::GetWindowLongPtrW(hWnd, GWLP_USERDATA)));
switch (message)
{
case WM_DISPLAYCHANGE:
InvalidateRect(hWnd, NULL, FALSE);
case WM_PAINT:
pApp->OnRender();
ValidateRect(hWnd, NULL);
break;
case WM_SIZE:
{
BOOL isActive = wParam == SIZE_MINIMIZED ? FALSE : TRUE;
WORD width = lParam & 0xFFFF;
WORD height = (lParam >> 16) & 0xFFFF;
pApp->OnResize(width, height, isActive);
break;
}
case WM_DESTROY:
PostQuitMessage(0);
break;
default:
return DefWindowProc(hWnd, message, wParam, lParam);
break;
}
}
return S_OK;
}
VOID ActalogicApp::SetClientSize(HWND hWnd, LONG sx, LONG sy)
{
RECT rc1;
RECT rc2;
GetWindowRect(hWnd, &rc1);
GetClientRect(hWnd, &rc2);
sx += ((rc1.right - rc1.left) - (rc2.right - rc2.left));
sy += ((rc1.bottom - rc1.top) - (rc2.bottom - rc2.top));
SetWindowPos(hWnd, NULL, 0, 0, sx, sy, (SWP_NOZORDER | SWP_NOOWNERZORDER | SWP_NOMOVE));
}<|fim▁end|>
| |
<|file_name|>init-res-into-things.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes, unsafe_destructor)]
use std::cell::Cell;
use std::gc::{Gc, GC};
// Resources can't be copied, but storing into data structures counts
// as a move unless the stored thing is used afterwards.
struct r {
i: Gc<Cell<int>>,
}
struct Box { x: r }
#[unsafe_destructor]
impl Drop for r {
fn drop(&mut self) {
self.i.set(self.i.get() + 1)
}
}
fn r(i: Gc<Cell<int>>) -> r {
r {
i: i
}
}
fn test_box() {
let i = box(GC) Cell::new(0i);
{
let _a = box(GC) r(i);
}
assert_eq!(i.get(), 1);
}
fn test_rec() {
let i = box(GC) Cell::new(0i);
{
let _a = Box {x: r(i)};
}
assert_eq!(i.get(), 1);
}
fn test_tag() {
enum t {
t0(r),
}
let i = box(GC) Cell::new(0i);
{
let _a = t0(r(i));<|fim▁hole|>fn test_tup() {
let i = box(GC) Cell::new(0i);
{
let _a = (r(i), 0i);
}
assert_eq!(i.get(), 1);
}
fn test_unique() {
let i = box(GC) Cell::new(0i);
{
let _a = box r(i);
}
assert_eq!(i.get(), 1);
}
fn test_box_rec() {
let i = box(GC) Cell::new(0i);
{
let _a = box(GC) Box {
x: r(i)
};
}
assert_eq!(i.get(), 1);
}
pub fn main() {
test_box();
test_rec();
test_tag();
test_tup();
test_unique();
test_box_rec();
}<|fim▁end|>
|
}
assert_eq!(i.get(), 1);
}
|
<|file_name|>test_arpcache.py<|end_file_name|><|fim▁begin|># Copyright 2016-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""
Unit tests for the arpcache module
"""
import os
import subprocess
import sys
import unittest
import mock
import moduletests.src.arpcache
try:<|fim▁hole|> from io import StringIO
if sys.hexversion >= 0x3040000:
# contextlib.redirect_stdout was introduced in Python 3.4
import contextlib
else:
# contextlib2 is a backport of contextlib from Python 3.5 and is compatible with Python2/3
import contextlib2 as contextlib
class TestArpcache(unittest.TestCase):
config_file_path = "/etc/sysctl.d/55-arp-gc_thresh1.conf"
def setUp(self):
self.output = StringIO()
def tearDown(self):
self.output.close()
@mock.patch("subprocess.check_output")
def test_detect_noproblem(self, check_output_mock):
check_output_mock.return_value = "net.ipv4.neigh.default.gc_thresh1 = 0"
self.assertFalse(moduletests.src.arpcache.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
def test_detect_problem(self, check_output_mock):
check_output_mock.return_value = "net.ipv4.neigh.default.gc_thresh1 = 1"
self.assertTrue(moduletests.src.arpcache.detect())
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output", side_effect=subprocess.CalledProcessError(
"1", "test", "/etc/sysctl.d/55-arp-gc_thresh1.conf: no such file or directory"))
def test_fix_cpe(self, check_output_mock):
with contextlib.redirect_stdout(self.output):
self.assertRaises(subprocess.CalledProcessError, moduletests.src.arpcache.fix, self.config_file_path)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] 'sysctl -w net.ipv4.neigh.default.gc_thresh1=0' failed for running system\n"))
self.assertTrue(check_output_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="stuff"))
def test_fix_exists_sudo_true(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[True])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="net.ipv4.neigh.default.gc_thresh1 = 0\n"
"something else\n"))
def test_fix_sudo_true(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[True])
@mock.patch("moduletests.src.arpcache.open", mock.mock_open(read_data="net.ipv4.neigh.default.gc_thresh1 = 0\n"
"net.ipv4.neigh.default.gc_thresh1 = 0\n"))
def test_fix_sudo_true_found_twice(self, check_output_mock, exists_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.fix(self.config_file_path))
self.assertTrue(self.output.getvalue().endswith(
"[FIXED] set net.ipv4.neigh.default.gc_thresh1=0 for running system\n"
"[FIXED] net.ipv4.neigh.default.gc_thresh1=0 in /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
@mock.patch("subprocess.check_output")
@mock.patch("moduletests.src.arpcache.os.path.exists", side_effect=[False])
@mock.patch("moduletests.src.arpcache.open", side_effect=IOError)
def test_fix_writefail(self, open_mock, exists_mock, check_output_mock):
check_output_mock.return_value = "True"
with contextlib.redirect_stdout(self.output):
self.assertRaises(IOError, moduletests.src.arpcache.fix, self.config_file_path)
self.assertTrue(check_output_mock.called)
self.assertTrue(exists_mock.called)
self.assertTrue(open_mock.called)
self.assertTrue(self.output.getvalue().endswith(
"[UNFIXED] Failed to write config to /etc/sysctl.d/55-arp-gc_thresh1.conf\n"))
@mock.patch("moduletests.src.arpcache.detect", return_value=False)
def test_run_success(self, detect_mock):
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.run())
self.assertTrue(self.output.getvalue().endswith("Determining if aggressive ARP caching is enabled\n"
"[SUCCESS] Aggressive arp caching is disabled.\n"))
self.assertTrue(detect_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
def test_run_no_remediate(self, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": False,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
moduletests.src.arpcache.run()
self.assertTrue("[UNFIXED] Remediation impossible without sudo and --remediate.\n"
"-- Running as root/sudo: True\n"
"-- Required --remediate flag specified: False\n"
"[FAILURE] Aggressive arp caching is enabled."
in self.output.getvalue())
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=True)
@mock.patch("moduletests.src.arpcache.backup", return_value=True)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
@mock.patch("moduletests.src.arpcache.restore", return_value=True)
def test_run_failure_isfile(self, restore_mock, fix_mock, backup_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue("Determining if aggressive ARP caching is enabled\n"
"[FAILURE] Aggressive arp caching is enabled. "
"This can cause issues communicating with instances in the same subnet"
in self.output.getvalue())
self.assertTrue(restore_mock.called)
self.assertTrue(fix_mock.called)
self.assertTrue(backup_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", return_value=True)
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
def test_run_failure(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue("Determining if aggressive ARP caching is enabled\n"
"[FAILURE] Aggressive arp caching is enabled. "
"This can cause issues communicating with instances in the same subnet"
in self.output.getvalue())
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", side_effect=(True, False))
@mock.patch("moduletests.src.arpcache.os.path.isfile", return_value=False)
@mock.patch("moduletests.src.arpcache.fix", return_value=True)
def test_run_fix(self, fix_mock, isfile_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": dict(),
"REMEDIATE": True,
"SUDO": True}
with contextlib.redirect_stdout(self.output):
self.assertTrue(moduletests.src.arpcache.run())
self.assertTrue(self.output.getvalue().endswith("Determining if aggressive ARP caching is enabled\n"
"[SUCCESS] Aggressive arp caching is disabled after "
"remediation. Please see the logs for further details\n"))
self.assertTrue(fix_mock.called)
self.assertTrue(isfile_mock.called)
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict")
@mock.patch("moduletests.src.arpcache.detect", side_effect=Exception)
@mock.patch("moduletests.src.arpcache.restore", return_value=True)
def test_run_detect_exception(self, restore_mock, detect_mock, config_mock):
config_mock.return_value = {"BACKUP_DIR": "/var/tmp/ec2rl",
"LOG_DIR": "/var/tmp/ec2rl",
"BACKED_FILES": {self.config_file_path: "/some/path"},
"REMEDIATE": True}
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue(detect_mock.called)
self.assertTrue(config_mock.called)
self.assertTrue(restore_mock.called)
@mock.patch("moduletests.src.arpcache.get_config_dict", side_effect=Exception)
def test_run_config_exception(self, config_mock):
with contextlib.redirect_stdout(self.output):
self.assertFalse(moduletests.src.arpcache.run())
self.assertTrue(config_mock.called)<|fim▁end|>
|
# Python 2.x
from cStringIO import StringIO
except ImportError:
# Python 3.x
|
<|file_name|>support.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016-2019 The University of Manchester
#<|fim▁hole|># (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
from spalloc import (
config, ProtocolClient, ProtocolError, ProtocolTimeoutError,
SpallocServerException)
# The acceptable range of server version numbers
VERSION_RANGE_START = (0, 1, 0)
VERSION_RANGE_STOP = (5, 0, 1)
class Terminate(Exception):
def __init__(self, code, *args):
super().__init__()
self._code = code
args = list(args)
message = args.pop(0) if args else None
if message is None:
self._msg = None
elif args:
self._msg = message.format(*args)
else:
self._msg = message
def exit(self):
if self._msg is not None:
sys.stderr.write(self._msg + "\n")
sys.exit(self._code)
def version_verify(client, timeout):
version = tuple(map(int, client.version(timeout=timeout).split(".")))
if not (VERSION_RANGE_START <= version < VERSION_RANGE_STOP):
raise Terminate(2, "Incompatible server version ({})",
".".join(map(str, version)))
class Script(object):
def __init__(self):
self.client_factory = ProtocolClient
def get_parser(self, cfg):
""" Return a set-up instance of :py:class:`argparse.ArgumentParser`
"""
raise NotImplementedError
def verify_arguments(self, args):
""" Check the arguments for sanity and do any second-stage parsing\
required.
"""
def body(self, client, args):
""" How to do the processing of the script once a client has been\
obtained and verified to be compatible.
"""
def build_server_arg_group(self, server_args, cfg):
server_args.add_argument(
"--hostname", "-H", default=cfg["hostname"],
help="hostname or IP of the spalloc server (default: %(default)s)")
server_args.add_argument(
"--port", "-P", default=cfg["port"], type=int,
help="port number of the spalloc server (default: %(default)s)")
server_args.add_argument(
"--timeout", default=cfg["timeout"], type=float, metavar="SECONDS",
help="seconds to wait for a response from the server (default: "
"%(default)s)")
server_args.add_argument(
"--ignore_version", default=cfg["ignore_version"], type=bool,
help="Ignore the server version (WARNING: could result in errors) "
"default: %(default)s)")
def __call__(self, argv=None):
cfg = config.read_config()
parser = self.get_parser(cfg)
server_args = parser.add_argument_group("spalloc server arguments")
self.build_server_arg_group(server_args, cfg)
args = parser.parse_args(argv)
# Fail if server not specified
if args.hostname is None:
parser.error("--hostname of spalloc server must be specified")
self.verify_arguments(args)
try:
with self.client_factory(args.hostname, args.port) as client:
if not args.ignore_version:
version_verify(client, args.timeout)
self.body(client, args)
return 0
except (IOError, OSError, ProtocolError, ProtocolTimeoutError) as e:
sys.stderr.write("Error communicating with server: {}\n".format(e))
return 1
except SpallocServerException as srv_exn:
sys.stderr.write("Error from server: {}\n".format(srv_exn))
return 1
except Terminate as t:
t.exit()<|fim▁end|>
|
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
|
<|file_name|>mpu_rbar.rs<|end_file_name|><|fim▁begin|>#[doc = "Register `MPU_RBAR` reader"]
pub struct R(crate::R<MPU_RBAR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<MPU_RBAR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<MPU_RBAR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<MPU_RBAR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `MPU_RBAR` writer"]
pub struct W(crate::W<MPU_RBAR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<MPU_RBAR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<MPU_RBAR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<MPU_RBAR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `REGION` reader - MPU region field"]
pub struct REGION_R(crate::FieldReader<u8, u8>);
impl REGION_R {
pub(crate) fn new(bits: u8) -> Self {
REGION_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for REGION_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `REGION` writer - MPU region field"]
pub struct REGION_W<'a> {
w: &'a mut W,
}
impl<'a> REGION_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
#[doc = "MPU Region Number valid bit\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]<|fim▁hole|> VALUE1 = 0,
#[doc = "1: the processor: - updates the value of the MPU_RNR to the value of the REGION field - updates the base address for the region specified in the REGION field."]
VALUE2 = 1,
}
impl From<VALID_A> for bool {
#[inline(always)]
fn from(variant: VALID_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `VALID` reader - MPU Region Number valid bit"]
pub struct VALID_R(crate::FieldReader<bool, VALID_A>);
impl VALID_R {
pub(crate) fn new(bits: bool) -> Self {
VALID_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> VALID_A {
match self.bits {
false => VALID_A::VALUE1,
true => VALID_A::VALUE2,
}
}
#[doc = "Checks if the value of the field is `VALUE1`"]
#[inline(always)]
pub fn is_value1(&self) -> bool {
**self == VALID_A::VALUE1
}
#[doc = "Checks if the value of the field is `VALUE2`"]
#[inline(always)]
pub fn is_value2(&self) -> bool {
**self == VALID_A::VALUE2
}
}
impl core::ops::Deref for VALID_R {
type Target = crate::FieldReader<bool, VALID_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `VALID` writer - MPU Region Number valid bit"]
pub struct VALID_W<'a> {
w: &'a mut W,
}
impl<'a> VALID_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: VALID_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "MPU_RNR not changed, and the processor: - updates the base address for the region specified in the MPU_RNR - ignores the value of the REGION field"]
#[inline(always)]
pub fn value1(self) -> &'a mut W {
self.variant(VALID_A::VALUE1)
}
#[doc = "the processor: - updates the value of the MPU_RNR to the value of the REGION field - updates the base address for the region specified in the REGION field."]
#[inline(always)]
pub fn value2(self) -> &'a mut W {
self.variant(VALID_A::VALUE2)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | ((value as u32 & 0x01) << 4);
self.w
}
}
#[doc = "Field `ADDR` reader - Region base address field"]
pub struct ADDR_R(crate::FieldReader<u32, u32>);
impl ADDR_R {
pub(crate) fn new(bits: u32) -> Self {
ADDR_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ADDR_R {
type Target = crate::FieldReader<u32, u32>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ADDR` writer - Region base address field"]
pub struct ADDR_W<'a> {
w: &'a mut W,
}
impl<'a> ADDR_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x007f_ffff << 9)) | ((value as u32 & 0x007f_ffff) << 9);
self.w
}
}
impl R {
#[doc = "Bits 0:3 - MPU region field"]
#[inline(always)]
pub fn region(&self) -> REGION_R {
REGION_R::new((self.bits & 0x0f) as u8)
}
#[doc = "Bit 4 - MPU Region Number valid bit"]
#[inline(always)]
pub fn valid(&self) -> VALID_R {
VALID_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bits 9:31 - Region base address field"]
#[inline(always)]
pub fn addr(&self) -> ADDR_R {
ADDR_R::new(((self.bits >> 9) & 0x007f_ffff) as u32)
}
}
impl W {
#[doc = "Bits 0:3 - MPU region field"]
#[inline(always)]
pub fn region(&mut self) -> REGION_W {
REGION_W { w: self }
}
#[doc = "Bit 4 - MPU Region Number valid bit"]
#[inline(always)]
pub fn valid(&mut self) -> VALID_W {
VALID_W { w: self }
}
#[doc = "Bits 9:31 - Region base address field"]
#[inline(always)]
pub fn addr(&mut self) -> ADDR_W {
ADDR_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "MPU Region Base Address Register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [mpu_rbar](index.html) module"]
pub struct MPU_RBAR_SPEC;
impl crate::RegisterSpec for MPU_RBAR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [mpu_rbar::R](R) reader structure"]
impl crate::Readable for MPU_RBAR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [mpu_rbar::W](W) writer structure"]
impl crate::Writable for MPU_RBAR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets MPU_RBAR to value 0"]
impl crate::Resettable for MPU_RBAR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}<|fim▁end|>
|
pub enum VALID_A {
#[doc = "0: MPU_RNR not changed, and the processor: - updates the base address for the region specified in the MPU_RNR - ignores the value of the REGION field"]
|
<|file_name|>fill_test.cc<|end_file_name|><|fim▁begin|>/* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <stdint.h>
#include <initializer_list>
#include <string>
#include <vector>
#include <gmock/gmock.h>
#include <gtest/gtest.h>
#include "tensorflow/lite/kernels/test_util.h"
#include "tensorflow/lite/schema/schema_generated.h"
#include "tensorflow/lite/string_type.h"
namespace tflite {
namespace {
using ::testing::ElementsAreArray;
using ::testing::IsEmpty;
enum class TestType {
kConst = 0,
kDynamic = 1,
};
template <typename dims_type, typename value_type>
class FillOpModel : public SingleOpModel {
public:
explicit FillOpModel(TensorType dims_tensor_type,
std::initializer_list<int> dims_shape,
std::initializer_list<dims_type> dims_data,
value_type value, TestType input_tensor_types) {
if (input_tensor_types == TestType::kDynamic) {
dims_ = AddInput(dims_tensor_type);
} else {
dims_ = AddConstInput(dims_tensor_type, dims_data, dims_shape);
}
value_ = AddInput(GetTensorType<value_type>());
output_ = AddOutput(GetTensorType<value_type>());
SetBuiltinOp(BuiltinOperator_FILL, BuiltinOptions_FillOptions,
CreateFillOptions(builder_).Union());
BuildInterpreter({dims_shape, {}});
if (input_tensor_types == TestType::kDynamic) {
if (dims_data.size() > 0) {
PopulateTensor<dims_type>(dims_, dims_data);
}
}
PopulateTensor<value_type>(value_, {value});
}
std::vector<value_type> GetOutput() {
return ExtractVector<value_type>(output_);
}
std::vector<int> GetOutputShape() { return GetTensorShape(output_); }
protected:
int dims_;
int value_;
int output_;
};
template <typename dims_type, typename quant_type>
class QuantizedFillOpModel : public SingleOpModel {
public:
explicit QuantizedFillOpModel(TensorType dims_tensor_type,
std::initializer_list<int> dims_shape,
std::initializer_list<dims_type> dims_data,
const TensorData& tensor_data, float value) {
dims_ = AddInput(dims_tensor_type);
value_ = AddInput(tensor_data);
output_ = AddOutput(tensor_data);
SetBuiltinOp(BuiltinOperator_FILL, BuiltinOptions_FillOptions,
CreateFillOptions(builder_).Union());
BuildInterpreter({dims_shape, {}});
if (dims_data.size() > 0) {
PopulateTensor<dims_type>(dims_, dims_data);
}
QuantizeAndPopulate<quant_type>(value_, {value});
}
std::vector<quant_type> GetOutput() {
return ExtractVector<quant_type>(output_);
}
std::vector<float> GetDequantizedOutput() {
TfLiteTensor* t = interpreter_->tensor(output_);
return Dequantize(GetOutput(), t->params.scale, t->params.zero_point);
}
std::vector<int> GetOutputShape() { return GetTensorShape(output_); }
protected:
int dims_;
int value_;
int output_;
};
class FillOpTest : public ::testing::TestWithParam<TestType> {};
TEST_P(FillOpTest, FillInt32) {
FillOpModel<int32_t, int32_t> m(TensorType_INT32, {2}, {2, 3}, -11,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({-11, -11, -11, -11, -11, -11}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 3}));
}
TEST_P(FillOpTest, FillInt64) {
FillOpModel<int64_t, int64_t> m(TensorType_INT64, {2}, {2, 4}, 1LL << 45,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(),
ElementsAreArray({1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45,
1LL << 45, 1LL << 45, 1LL << 45, 1LL << 45}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 4}));
}
TEST_P(FillOpTest, FillFloat) {
FillOpModel<int64_t, float> m(TensorType_INT64, {3}, {2, 2, 2}, 4.0,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(),
ElementsAreArray({4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2}));
}
TEST_P(FillOpTest, FillFloatInt32Dims) {
FillOpModel<int32_t, float> m(TensorType_INT32, {3}, {2, 2, 2}, 4.0,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(),
ElementsAreArray({4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0, 4.0}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2}));
}
TEST_P(FillOpTest, FillOutputScalar) {
FillOpModel<int64_t, float> m(TensorType_INT64, {0}, {}, 4.0, GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({4.0}));
EXPECT_THAT(m.GetOutputShape(), IsEmpty());
}
TEST_P(FillOpTest, FillBool) {
FillOpModel<int64_t, bool> m(TensorType_INT64, {3}, {2, 2, 2}, true,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({true, true, true, true, true,
true, true, true}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2}));
}
TEST(FillOpTest, FillString) {
FillOpModel<int64_t, std::string> m(TensorType_INT64, {3}, {2, 2, 2}, "AB",
TestType::kDynamic);
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({"AB", "AB", "AB", "AB", "AB",
"AB", "AB", "AB"}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2}));
}
TEST_P(FillOpTest, FillInt8) {
FillOpModel<int64_t, int8_t> m(TensorType_INT64, {3}, {2, 2, 2}, 5,
GetParam());
m.Invoke();
EXPECT_THAT(m.GetOutput(), ElementsAreArray({5, 5, 5, 5, 5, 5, 5, 5}));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 2, 2}));
}<|fim▁hole|>template <typename quant_type>
void QuantizedFill(float value) {
// Prepare TensorData for quantization of value
const float kMin = -1;
// Workaround to get a zero-point of 0
const float kMax =
std::numeric_limits<quant_type>::max() /
static_cast<float>(std::numeric_limits<quant_type>::max() + 1);
const TensorData tensor_data(GetTensorType<quant_type>(), {},
std::abs(value) * kMin, std::abs(value) * kMax);
QuantizedFillOpModel<int32_t, quant_type> m(TensorType_INT32, {2}, {2, 3},
tensor_data, value);
m.Invoke();
constexpr float epsilon = 0.01f;
const float min_value = tensor_data.min - epsilon;
const float max_value = tensor_data.max + epsilon;
const float kQuantizedTolerance =
(max_value - min_value) / (std::numeric_limits<quant_type>::max() -
std::numeric_limits<quant_type>::min());
EXPECT_THAT(
m.GetDequantizedOutput(),
ElementsAreArray(ArrayFloatNear(
{value, value, value, value, value, value}, kQuantizedTolerance)));
EXPECT_THAT(m.GetOutputShape(), ElementsAreArray({2, 3}));
}
TEST(FillOpTest, QuantizedFillInt8) { QuantizedFill<int8_t>(3.14f); }
TEST(FillOpTest, QuantizedFillInt16) { QuantizedFill<int16_t>(3.14f); }
INSTANTIATE_TEST_SUITE_P(FillOpTest, FillOpTest,
::testing::Values(TestType::kConst,
TestType::kDynamic));
} // namespace
} // namespace tflite<|fim▁end|>
| |
<|file_name|>view.py<|end_file_name|><|fim▁begin|># Webhooks for external integrations.
from __future__ import absolute_import
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.models import Client, UserProfile
from django.http import HttpRequest, HttpResponse
from typing import Any, Dict, Text
CRASHLYTICS_SUBJECT_TEMPLATE = '{display_id}: {title}'
CRASHLYTICS_MESSAGE_TEMPLATE = '[Issue]({url}) impacts at least {impacted_devices_count} device(s).'
CRASHLYTICS_SETUP_SUBJECT_TEMPLATE = "Setup"
CRASHLYTICS_SETUP_MESSAGE_TEMPLATE = "Webhook has been successfully configured."
VERIFICATION_EVENT = 'verification'
@api_key_only_webhook_view('Crashlytics')
@has_request_variables
def api_crashlytics_webhook(request, user_profile, client, payload=REQ(argument_type='body'),
stream=REQ(default='crashlytics')):<|fim▁hole|> try:
event = payload['event']
if event == VERIFICATION_EVENT:
subject = CRASHLYTICS_SETUP_SUBJECT_TEMPLATE
body = CRASHLYTICS_SETUP_MESSAGE_TEMPLATE
else:
issue_body = payload['payload']
subject = CRASHLYTICS_SUBJECT_TEMPLATE.format(
display_id=issue_body['display_id'],
title=issue_body['title']
)
body = CRASHLYTICS_MESSAGE_TEMPLATE.format(
impacted_devices_count=issue_body['impacted_devices_count'],
url=issue_body['url']
)
except KeyError as e:
return json_error(_("Missing key {} in JSON".format(str(e))))
check_send_message(user_profile, client, 'stream', [stream],
subject, body)
return json_success()<|fim▁end|>
|
# type: (HttpRequest, UserProfile, Client, Dict[str, Any], Text) -> HttpResponse
|
<|file_name|>mount_id.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="id">
<context>
<name>Button</name>
<message>
<location filename="../button.cpp" line="38"/>
<source>Removable media/devices manager</source>
<translation>Pengelola media/perangkat yang dapat dilepas</translation>
</message>
</context>
<context>
<name>Configuration</name>
<message>
<location filename="../configuration.ui" line="14"/>
<source>Removable Media Settings</source>
<translation>Pengaturan Media yang dapat dilepas</translation>
</message>
<message>
<location filename="../configuration.ui" line="20"/>
<source>Behaviour</source>
<translation>Perilaku</translation>
</message>
<message>
<location filename="../configuration.ui" line="26"/>
<source>When a device is connected:</source>
<translation>Ketika perangkat terhubung:</translation>
</message>
<message>
<location filename="../configuration.cpp" line="40"/>
<source>Popup menu</source>
<translation>Menu popup</translation>
</message>
<message>
<location filename="../configuration.cpp" line="41"/>
<source>Show info</source>
<translation>Lihat info</translation>
</message>
<message>
<location filename="../configuration.cpp" line="42"/>
<source>Do nothing</source>
<translation>Tidak melakukan apapun</translation>
</message>
</context>
<context>
<name>DeviceActionInfo</name>
<message><|fim▁hole|> </message>
<message>
<location filename="../actions/deviceaction_info.cpp" line="45"/>
<source>The device <b><nobr>"%1"</nobr></b> is removed.</source>
<translation>Perangkat <b><nobr>"%1"</nobr></b> telah dicopot.</translation>
</message>
<message>
<location filename="../actions/deviceaction_info.cpp" line="50"/>
<source>Removable media/devices manager</source>
<translation>Pengelola media/perangkat yang dapat dilepas</translation>
</message>
</context>
<context>
<name>MenuDiskItem</name>
<message>
<location filename="../menudiskitem.cpp" line="155"/>
<source>Mounting of <b><nobr>"%1"</nobr></b> failed: %2</source>
<translation>Memuat <b><nobr>"%1"</nobr></b> gagal: %2</translation>
</message>
<message>
<location filename="../menudiskitem.cpp" line="157"/>
<location filename="../menudiskitem.cpp" line="178"/>
<source>Removable media/devices manager</source>
<translation>Pengelola media/perangkat yang dapat dilepas</translation>
</message>
<message>
<location filename="../menudiskitem.cpp" line="176"/>
<source>Unmounting of <strong><nobr>"%1"</nobr></strong> failed: %2</source>
<translation>Melepas kaitan <strong><nobr>"%1"</nobr></strong> gagal: %2</translation>
</message>
</context>
<context>
<name>Popup</name>
<message>
<location filename="../popup.cpp" line="68"/>
<source>No devices are available</source>
<translation>Tidak ada perangkat yang tersedia</translation>
</message>
</context>
</TS><|fim▁end|>
|
<location filename="../actions/deviceaction_info.cpp" line="40"/>
<source>The device <b><nobr>"%1"</nobr></b> is connected.</source>
<translation>Perangkat <b><nobr>"%1"</nobr></b> telah tersambung.</translation>
|
<|file_name|>arts2.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import pandas as pd
arts = pd.DataFrame()
# 1. Clean the dates so you only see numbers by using string manipulations
arts["execution_date"] = arts["execution_date"].str.findall(r"([0-9]+)").str[0]
arts["execution_date"] = arts["execution_date"].astype(float)
arts.head()
# 1. If a year is lower than 100, then is referred to 1900. For example, 78 is actually 1978, and that needs to be fixed too.
arts["execution_date"] = arts["execution_date"].apply(lambda x: 1900 + x if x < 100 else x)
arts.head()
# 2. Get the average execution year per artist.
arts.groupby("artist_name").mean().head()
# 3. Get the average execution year per category.
arts.groupby("category").mean().head()
# 4. Get the number of artworks per artist. Which artist is the most prolific?
artworks_by_artist = arts.groupby("artist_name")[["title"]].aggregate(np.count_nonzero)
artworks_by_artist.sort("title", ascending=False).head()
# 5. Get the number of artworks per category. Which category has the highest number?
artworks_by_category = arts.groupby("category")[["title"]].aggregate(np.count_nonzero)
artworks_by_category.sort("title", ascending=False).head()
# 6. Get the average length of artworks titles per category and artist.
arts['title_length'] = arts['title'].str.len()
length_by_category = arts.groupby("category")[["title_length"]].aggregate(np.mean)
length_by_category.sort("title_length", ascending=False).head()
# 6. Get the year with the highest production.
artworks_by_year = arts.groupby("execution_date")[["title"]].aggregate(np.count_nonzero)
artworks_by_year.sort("title", ascending=False).head()
# 8. Get the approximate period of production for each artist. If an artist painted from 1970 to 1990, the period is 20.
period_min = arts.groupby("artist_name")[['execution_date']].aggregate(np.min)
period_max = arts.groupby("artist_name")[['execution_date']].aggregate(np.max)
(period_max - period_min).sort("execution_date", ascending=False).head()<|fim▁end|>
|
import numpy as np
|
<|file_name|>tableHeading.component.tsx<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2017 3D Repo Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import React from 'react';
import { SortLabel } from './tableHeading.styles';
export const TableHeading = ({ label, activeSort, sortOrder, onClick, onChange, hideSortIcon }) => {
if (!label) {
return (<></>);
}
if (hideSortIcon) {
return (<>{label}</>);
}
return (
<SortLabel
active={activeSort}
direction={sortOrder}<|fim▁hole|> onClick={onClick}
>
{label}
</SortLabel>
);
};<|fim▁end|>
| |
<|file_name|>stats.py<|end_file_name|><|fim▁begin|>from util.runescape_util import get_users_stats, get_level_at_exp
from util.ascii_table import Table, Column
from util.image_util import text_to_image
from discord.ext import commands
from util.arguments import Arguments
from shlex import split
<|fim▁hole|>
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True, aliases=['stat', 'hiscores'],
description='Shows Runescape stats for a given user.')
async def stats(self, ctx, *, msg: str):
parser = Arguments(allow_abbrev=False, prog='stats')
parser.add_argument('name', nargs='+', help='The name of the character to get stats for.')
parser.add_argument('-v', '--virtual', action='store_true',
help='Shows that your level would be if it wasn\'t capped at 99.')
parser.add_argument('-i', '--image', action='store_true',
help='Displays the table as an image. (Useful for mobile)')
await self.bot.send_typing(ctx.message.channel)
try:
args = parser.parse_args(split(msg))
except SystemExit:
await self.bot.say('```%s```' % parser.format_help())
return
except Exception as e:
await self.bot.say('```%s```' % str(e))
return
args.name = ' '.join(args.name)
await self.execute(args)
async def execute(self, args):
stats = await get_users_stats(args.name)
# Checking if the request was a success
if not stats:
await self.bot.say('Stats for **%s** could not be retrieved.' % args.name)
return
table = Table()
table.set_title('VIEWING RS3 STATS FOR %s' % args.name.upper())
table.set_headings('Skill', 'Level', 'Experience', 'Rank')
# Adding rows
total = 0
for i, key in enumerate(stats.keys()):
stat = stats[key]
# Skipping if overall
if i == 0:
continue
level = stat['level'] if not args.virtual else get_level_at_exp(stat['exp'], key == 'invention')
total += level
table.add_row(
key.capitalize(),
Column(level, 2),
Column('{:,}'.format(stat['exp']), 2),
Column('{:,}'.format(stat['rank']), 2))
# Adding the overall level
table.add_row(
'Overall',
Column(total, 3),
Column('{:,}'.format(stats['overall']['exp']), 2),
Column('{:,}'.format(stats['overall']['rank']), 2),
index=0
)
# Plain text
text = str(table)
if not args.image:
await self.bot.say('```%s```' % text)
else:
link = await text_to_image(text)
# Checking if table was uploaded
if not link:
await self.bot.say('Table could not be uploaded as an image to imgur.')
else:
await self.bot.say(link)
def setup(bot):
bot.add_cog(Stats(bot))<|fim▁end|>
|
class Stats:
|
<|file_name|>a.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
using namespace std;
using ull = unsigned long long;
ull arr[int(1e5)+5];
int n, q, x;
int bs(int key) {
int l = 0, r = n-1, mid, ans=-1;
while (l <= r) {
mid = l + (r - l + 1)/2;
if (arr[mid] == key) {<|fim▁hole|> }
else if (arr[mid] < key) l = mid + 1;
else r = mid - 1;
}
return ans;
}
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
cin >> n >> q;
for (int i = 0; i < n; ++i) cin >> x, arr[i] = x + INT_MAX;
for (int i = 0; i < q; ++i) {
cin >> x;
cout << bs(x + INT_MAX) << '\n';
}
return 0;
}<|fim▁end|>
|
r = mid - 1;
ans = mid;
|
<|file_name|>urlbuffer.cpp<|end_file_name|><|fim▁begin|>/* Copyright (C) 2011 uberspot
*
* Compiling: znc-buildmod urlbuffer.cpp
* Dependencies: curl, wget, sed and a unix environment.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 3 as published
* by the Free Software Foundation (http://www.gnu.org/licenses/gpl.txt).
*/
#include "main.h"
#include "User.h"
#include "Nick.h"
#include "Modules.h"
#include "Chan.h"
#include "FileUtils.h"
#include <pthread.h>
#include <climits>
#define MAX_EXTS 6
#define MAX_CHARS 16
class CUrlBufferModule : public CModule
{
private:
VCString lastUrls, nicks;
unsigned int linkNum;
CString target;
static const string supportedExts[MAX_EXTS];
static const char unSupportedChars[MAX_CHARS];
static inline CString getStdoutFromCommand(const CString& cmd);
inline void LoadDefaults();
inline CString convertTime(const CString& str)
{
time_t curtime;
tm* timeinfo;
char buffer[1024];
time(&curtime);
curtime += (time_t) (m_pUser->GetTimezoneOffset() * 60 * 60);
timeinfo = localtime(&curtime);
if (!strftime(buffer, sizeof(buffer), str.c_str(), timeinfo))
{
return "";
}
return CString(buffer);
}
inline bool isValidExtension(CString ext)
{
ext.MakeLower();
for(int i=0; i< MAX_EXTS; i++)
{
if( ext == supportedExts[i])
return true;
}
return false;
}
inline bool isValidDir(const string& dir)
{
for(int i=0; i< MAX_CHARS; i++)
{
if (dir.find(unSupportedChars[i]) !=string::npos)
return false;
}
return true;
}
static void* sendLinks(void *ptr);
inline void CheckLineForLink(const CString& sMessage, const CString& sOrigin);
inline void CheckLineForTrigger(const CString& sMessage, const CString& sTarget);
public:
MODCONSTRUCTOR(CUrlBufferModule) {}
bool OnLoad(const CString& sArgs, CString& sErrorMsg);
~CUrlBufferModule();
EModRet OnUserMsg(CString& sTarget, CString& sMessage);
EModRet OnPrivMsg(CNick& Nick, CString& sMessage);
EModRet OnChanMsg(CNick& Nick, CChan& Channel, CString& sMessage);
void OnModCommand(const CString& sCommand);
};
const string CUrlBufferModule::supportedExts[MAX_EXTS] =
{"jpg", "png", "gif", "jpeg", "bmp", "tiff"} ;
const char CUrlBufferModule::unSupportedChars[MAX_CHARS] =
{'|', ';', '!', '@', '#', '(', ')', '<', '>', '"', '\'', '`', '~', '=', '&', '^'};
bool CUrlBufferModule::OnLoad(const CString& sArgs, CString& sErrorMsg)
{
LoadDefaults();
return true;
}
CUrlBufferModule::~CUrlBufferModule() {}
CUrlBufferModule::EModRet CUrlBufferModule::OnUserMsg(CString& sTarget, CString& sMessage)
{
CheckLineForLink(sMessage, "");
CheckLineForTrigger(sMessage, m_pUser->GetIRCNick().GetNick());
return CONTINUE;
}
CUrlBufferModule::EModRet CUrlBufferModule::OnPrivMsg(CNick& Nick, CString& sMessage)
{
CheckLineForLink(sMessage, Nick.GetNick());
CheckLineForTrigger(sMessage, Nick.GetNick());
return CONTINUE;
}
CUrlBufferModule::EModRet CUrlBufferModule::OnChanMsg(CNick& Nick, CChan& Channel, CString& sMessage)
{
CheckLineForLink(sMessage, Nick.GetNick());
CheckLineForTrigger(sMessage, Nick.GetNick());
return CONTINUE;
}
void CUrlBufferModule::OnModCommand(const CString& sCommand)
{
CString command = sCommand.Token(0).AsLower().Trim_n();
if (command == "help")
{
CTable CmdTable;
CmdTable.AddColumn("Command");
CmdTable.AddColumn("Description");
CmdTable.AddRow();
CmdTable.SetCell("Command", "ENABLE");
CmdTable.SetCell("Description", "Activates link buffering.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "DISABLE");
CmdTable.SetCell("Description", "Deactivates link buffering.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "ENABLELOCAL");
CmdTable.SetCell("Description", "Enables downloading of each link to local directory.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "DISABLELOCAL");
CmdTable.SetCell("Description", "Disables downloading of each link to local directory.");
CmdTable.AddRow();
CmdTable.SetCell("Command","ENABLEPUBLIC");
CmdTable.SetCell("Description", "Enables the usage of !showlinks publicly, by other users.");
CmdTable.AddRow();
CmdTable.SetCell("Command","DISABLEPUBLIC");
CmdTable.SetCell("Description", "Disables the usage of !showlinks publicly, by other users.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "DIRECTORY <#directory>");
CmdTable.SetCell("Description", "Sets the local directory where the links will be saved.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "CLEARBUFFER");
CmdTable.SetCell("Description", "Empties the link buffer.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "BUFFERSIZE <#size>");
CmdTable.SetCell("Description", "Sets the size of the link buffer. Only integers >=0.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "SHOWSETTINGS");
CmdTable.SetCell("Description", "Prints all the settings.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "BUFFERALLLINKS");
CmdTable.SetCell("Description", "Toggles the buffering of all links or only image links.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "SHOWLINKS <#number>");
CmdTable.SetCell("Description", "Prints <#number> or <#buffersize> number of cached links.");
CmdTable.AddRow();
CmdTable.SetCell("Command", "HELP");
CmdTable.SetCell("Description", "This help.");
PutModule(CmdTable);
return;
}else if (command == "enable")
{
SetNV("enable","true",true);
PutModule("Enabled buffering");
}else if (command == "disable")
{
SetNV("enable","false",true);
PutModule("Disabled buffering");
}else if (command == "enablelocal")
{
if(GetNV("directory") == "")
{
PutModule("Directory is not set. First set a directory and then enable local caching");
return;
}
SetNV("enablelocal","true",true);
PutModule("Enabled local caching");
}else if (command == "disablelocal")
{
SetNV("enablelocal", "false", true);
PutModule("Disabled local caching");
}else if (command == "enablepublic")
{
SetNV("enablepublic", "true", true);
PutModule("Enabled public usage of showlinks.");
}else if (command == "disablepublic")
{
SetNV("enablepublic", "false", true);
PutModule("Disabled public usage of showlinks.");
}else if (command == "directory")
{
CString dir=sCommand.Token(1).Replace_n("//", "/").TrimRight_n("/") + "/";
if (!isValidDir(dir))
{
PutModule("Error in directory name. Avoid using: | ; ! @ # ( ) < > \" ' ` ~ = & ^ <space> <tab>");
return;
}
// Check if file exists and is directory
if (dir.empty() || !CFile::Exists(dir) || !CFile::IsDir(dir, false))
{
PutModule("Invalid path or no write access to ["+ sCommand.Token(1) +"].");
return;
}
SetNV("directory", dir, true);
PutModule("Directory for local caching set to " + GetNV("directory"));
}else if (command == "clearbuffer")
{
lastUrls.clear();
nicks.clear();
}else if (command == "buffersize")
{
unsigned int bufSize = sCommand.Token(1).ToUInt();
if(bufSize==0 || bufSize==UINT_MAX)
{
PutModule("Error in buffer size. Use only integers >= 0.");
return;
}
SetNV("buffersize", CString(bufSize), true);
PutModule("Buffer size set to " + GetNV("buffersize"));
}else if (command == "showsettings")
{
for(MCString::iterator it = BeginNV(); it != EndNV(); it++)
{
PutModule(it->first.AsUpper() + " : " + it->second);
}
}else if(command == "bufferalllinks"){
SetNV("bufferalllinks", CString(!GetNV("bufferalllinks").ToBool()), true);
PutModule( CString(GetNV("bufferalllinks").ToBool()?"Enabled":"Disabled") + " buffering of all links.");
}else if (command == "showlinks")
{
if(lastUrls.empty())
PutModule("No links were found...");
else
{
unsigned int maxLinks = GetNV("buffersize").ToUInt();
unsigned int size = sCommand.Token(1).ToUInt();
if(size!=0 && size<UINT_MAX) //if it was a valid number
maxLinks = size;
unsigned int maxSize = lastUrls.size()-1;
for(unsigned int i=0; i<=maxSize && i< maxLinks; i++)
{
PutModule(nicks[maxSize-i] + ": " + lastUrls[maxSize-i]);
}
}
}else
{
PutModule("Unknown command! Try HELP.");
}
}
<|fim▁hole|> if(GetNV("enable")==""){
SetNV("enable", "true", true);
}
if(GetNV("enablelocal")==""){
SetNV("enablelocal", "false", true);
}
if(GetNV("buffersize")== ""){
SetNV("buffersize", "5", true);
}
if(GetNV("enablepublic")==""){
SetNV("enablepublic", "true", true);
}
if(GetNV("bufferalllinks")==""){
SetNV("bufferalllinks", "false", true);
}
}
void CUrlBufferModule::CheckLineForLink(const CString& sMessage, const CString& sOrigin)
{
if(sOrigin != m_pUser->GetIRCNick().GetNick() && GetNV("enable").ToBool() )
{
VCString words;
CString output;
sMessage.Split(" ", words, false, "", "", true, true);
for (size_t a = 0; a < words.size(); a++)
{
CString& word = words[a];
if(word.Left(4) == "http" || word.Left(4) == "www.")
{
//if you find an image download it, save it in the www directory and keep the new link in buffer
VCString tokens;
word.Split("/", tokens, false, "", "", true, true);
string name = tokens[tokens.size()-1];
word.Split(".", tokens, false, "", "", true, true);
//if it's an image link download/upload it else just keep the link
if(isValidExtension( tokens[tokens.size()-1] ))
{
std::stringstream ss;
if( GetNV("enablelocal").ToBool())
{
CString dir = GetNV("directory") + convertTime("%Y-%m-%d") + "/";
if(!CFile::Exists(dir) && !CFile::IsDir(dir, false))
{
CDir::MakeDir(dir, 0755);
}
ss << "wget -b -O " << dir.c_str() << name <<" -q " << word.c_str() << " 2>&1";
getStdoutFromCommand(ss.str());
}
ss.str("");
if (!word.WildCmp("*imgur*")) {
ss << "curl -d \"image=" << word.c_str() << "\" -d \"key=5ce86e7f95d8e58b18931bf290f387be\" http://api.imgur.com/2/upload.xml | sed -n 's/.*<original>\\(.*\\)<\\/original>.*/\\1/p' 2>&1";
output = getStdoutFromCommand(ss.str());
lastUrls.push_back(output);
}else {
lastUrls.push_back(word);
}
} else if(GetNV("bufferalllinks").ToBool()){
lastUrls.push_back(word);
}
nicks.push_back( (sOrigin.empty())? m_pUser->GetIRCNick().GetNick() : sOrigin );
}
}
}
}
CString CUrlBufferModule::getStdoutFromCommand(const CString& cmd)
{
string data="";
char buffer[128];
FILE* stream = popen(cmd.c_str(), "r");
if (stream == NULL || !stream || ferror(stream))
{
return "Error!";
}
while (!feof(stream))
{
if (fgets(buffer, 128, stream) != NULL)
data.append(buffer);
}
pclose(stream);
return data;
}
void *CUrlBufferModule::sendLinks(void *ptr)
{
CUrlBufferModule *caller = static_cast<CUrlBufferModule*> (ptr);
VCString links = caller->lastUrls;
VCString nicks = caller->nicks;
unsigned int maxSize = links.size()-1;
for(unsigned int i=0; i<=maxSize && i<caller->linkNum; i++)
{
sleep(2);
caller->PutIRC("PRIVMSG " + caller->target + " :" + nicks[maxSize-i] + ": "+ links[maxSize-i]);
}
return NULL;
}
void CUrlBufferModule::CheckLineForTrigger(const CString& sMessage, const CString& sTarget)
{
if(GetNV("enablepublic").ToBool())
{
VCString words;
sMessage.Split(" ", words, false, "", "", true, true);
for (size_t a = 0; a < words.size(); a++)
{
CString& word = words[a];
if(word.AsLower() == "!showlinks")
{
if(lastUrls.empty())
PutIRC("PRIVMSG " + sTarget + " :No links were found...");
else
{
unsigned int maxLinks = GetNV("buffersize").ToUInt();
if (a+1 < words.size())
{
unsigned int size = words[a+1].ToUInt();
if(size!=0 && size<UINT_MAX) //if it was a valid number
maxLinks = size;
}
linkNum = maxLinks;
target = sTarget;
pthread_t thread;
pthread_attr_t attr;
pthread_attr_init(&attr);
pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
pthread_create( &thread, &attr, &sendLinks, this);
}
}
}
}
}
MODULEDEFS(CUrlBufferModule, "Module that caches locally images/links posted on irc channels.")<|fim▁end|>
|
void CUrlBufferModule::LoadDefaults()
{
|
<|file_name|>list.ts<|end_file_name|><|fim▁begin|>class FileModel {
readonly fileMetadataId: string;
readonly width: number;
readonly height: number;
readonly takenAt: Date | null;
readonly uploadedAt: Date;
readonly originalFilename: string;
readonly userName: KnockoutObservable<string>;
readonly category: KnockoutObservable<string>;
readonly size: number;
readonly contentType: string;
readonly url: string;
readonly thumbnailUrl: string;
readonly newFilename: string;
checked: KnockoutObservable<boolean>;
readonly takenOrUploadedAt: Date;
readonly sizeStr: string;
readonly takenAtStr: KnockoutComputed<string | null>;
readonly uploadedAtStr: KnockoutComputed<string>;
constructor(readonly parent: ListViewModel, m: IFileMetadata) {
this.fileMetadataId = m.fileMetadataId;
this.width = m.width;
this.height = m.height;
this.takenAt = m.takenAt instanceof Date ? m.takenAt
: m.takenAt != null ? new Date(m.takenAt)
: null;
this.uploadedAt = m.uploadedAt instanceof Date
? m.uploadedAt
: new Date(m.uploadedAt);
this.takenOrUploadedAt = this.takenAt || this.uploadedAt;
this.originalFilename = m.originalFilename;
this.userName = ko.observable(m.userName || "");
this.category = ko.observable(m.category || "");
this.size = m.size;
this.sizeStr = this.size >= 1048576 ? `${(this.size / 1048576).toFixed(2)} MiB`
: this.size >= 1024 ? `${(this.size / 1024).toFixed(2)} KiB`
: `${this.size} bytes`;
this.contentType = m.contentType;
this.newFilename = m.newFilename;
this.url = m.url;
this.thumbnailUrl = m.thumbnailUrl;
this.checked = ko.observable(false);
this.takenAtStr = ko.pureComputed(() => this.takenAt && this.takenAt.toLocaleString());
this.uploadedAtStr = ko.pureComputed(() => this.uploadedAt && this.uploadedAt.toLocaleString());
}
toggle(x: FileModel, e: JQuery.Event) {
if (e.shiftKey) {
let index1 = Math.max(0, this.parent.files.indexOf(this.parent.lastClicked));
let index2 = Math.max(0, this.parent.files.indexOf(this));
let min = Math.min(index1, index2);
let max = Math.max(index1, index2);
this.parent.files().forEach((f, i) => {
f.checked(i >= min && i <= max);
});
} else if (e.ctrlKey) {
this.checked(!this.checked());
this.parent.lastClicked = this;
} else {
this.parent.files().forEach(f => {
f.checked(f === this);
});
this.parent.lastClicked = this;
}
}
defaultAction(f: FileModel, e: Event) {
return true;
}
}
interface KnockoutObservableArray<T> {
orderField: KnockoutObservable<string>;
orderDirection: KnockoutObservable<"asc" | "desc">;
}
class ListViewModel {
readonly files: KnockoutObservableArray<FileModel>;<|fim▁hole|> readonly endDate: KnockoutObservable<string>;
readonly userName: KnockoutObservable<string>;
readonly category: KnockoutObservable<string>;
readonly viewStyle: KnockoutObservable<string>;
readonly selectAllChecked: KnockoutObservable<boolean>;
readonly selectedFiles: KnockoutComputed<FileModel[]>;
readonly userNames: KnockoutObservable<string[]>;
readonly categories: KnockoutObservable<string[]>;
readonly displayedFiles: KnockoutComputed<FileModel[]>;
readonly undisplayedSelectedFiles: KnockoutComputed<FileModel[]>;
public lastClicked: FileModel;
private password: string | null;
constructor() {
this.files = ko.observableArray();
this.lastClicked = this.files()[0];
try {
this.myTimeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
} catch (e) {
this.myTimeZone = `UTC + ${new Date().getTimezoneOffset()} minutes`;
}
this.startDate = ko.observable("");
this.endDate = ko.observable("");
this.userName = ko.observable("");
this.category = ko.observable("");
this.resetFilters();
this.viewStyle = ko.observable("table");
this.selectAllChecked = ko.observable(false);
this.selectAllChecked.subscribe(newValue => {
for (const f of this.displayedFiles()) {
f.checked(newValue);
}
});
this.selectedFiles = ko.pureComputed(() => this.files().filter(f => f.checked()));
this.userNames = ko.pureComputed(() => [""].concat(this.files().map(f => f.userName()))
.filter((v, i, a) => a.indexOf(v) === i)
.sort());
this.categories = ko.pureComputed(() => [""].concat(this.files().map(f => f.category()))
.filter((v, i, a) => a.indexOf(v) === i)
.sort());
this.displayedFiles = ko.pureComputed(() => this.files().filter(f => {
if (this.startDate() && new Date(this.startDate()) > (f.takenAt || f.uploadedAt)) return false;
if (this.endDate() && new Date(this.endDate()) < (f.takenAt || f.uploadedAt)) return false;
if (this.userName() && f.userName() != this.userName()) return false;
if (this.category() && f.category() != this.category()) return false;
return true;
}));
this.undisplayedSelectedFiles = ko.pureComputed(() => {
const displayedFiles = this.displayedFiles();
return this.selectedFiles().filter(f => displayedFiles.indexOf(f) < 0);
});
this.password = null;
}
async loadFiles() {
let resp = await this.fetchOrError("/api/files");
let files: IFileMetadata[] = await resp.json();
this.files(files.map(f => new FileModel(this, f)));
this.resetFilters();
}
private async getPassword() {
if (this.password == null) {
const p = await promptAsync("Enter the file management password to make changes.");
if (p != null) {
const response = await fetch("/api/password/check", {
headers: { "X-FileManagementPassword": p },
method: "GET"
});
if (Math.floor(response.status / 100) == 2) {
this.password = p;
} else {
console.error(`${response.status} ${response.statusText}: ${await response.text()}`);
await alertAsync(response.status == 400 ? "The password is not valid." : "An unknown error occurred.");
}
}
}
return this.password;
}
resetFilters() {
let dates = this.files().map(f => f.takenOrUploadedAt).sort((a, b) => +a - +b);
if (dates.length == 0) {
this.startDate("2000-01-01T00:00");
this.endDate("2025-01-01T00:00");
} else {
this.startDate(`${dates[0].getFullYear()}-01-01T00:00`);
this.endDate(`${dates[dates.length - 1].getFullYear() + 1}-01-01T00:00`);
}
this.userName("");
this.category("");
}
private async fetchOrError(input: RequestInfo, init?: RequestInit) {
const response = await fetch(input, {
...init,
headers: {
'X-FileManagementPassword': await this.getPassword() || "",
...(init ? init.headers : {})
}
});
if (Math.floor(response.status / 100) != 2) {
throw new Error(`${response.status} ${response.statusText}: ${await response.text()}`);
}
return response;
}
download() {
const f = $("<form></form>")
.attr("method", "post")
.attr("action", "/api/files/zip")
.appendTo(document.body);
$("<textarea></textarea>")
.attr("name", "ids")
.text(this.selectedFiles().map(f => f.fileMetadataId).join(","))
.appendTo(f);
f.submit().remove();
}
async changeUserName() {
if (await this.getPassword() == null) return;
const newName = await promptAsync("What \"taken by\" name should be listed for these files?", this.selectedFiles()[0].userName());
if (newName != null) {
try {
for (const f of this.selectedFiles()) {
const response = await this.fetchOrError(f.url, {
headers: {
'Content-Type': 'application/json'
},
method: "PATCH",
body: JSON.stringify({ userName: newName })
});
f.userName(newName);
f.checked(false);
}
} catch (e) {
console.error(e);
await alertAsync("An unknown error occurred.");
}
}
}
async changeCategory() {
if (await this.getPassword() == null) return;
const newCategory = await promptAsync("What category should these files be part of?", this.selectedFiles()[0].category());
if (newCategory != null) {
try {
for (const f of this.selectedFiles()) {
const response = await this.fetchOrError(f.url, {
headers: {
'Content-Type': 'application/json',
},
method: "PATCH",
body: JSON.stringify({ category: newCategory })
});
f.category(newCategory);
f.checked(false);
}
} catch (e) {
console.error(e);
await alertAsync("An unknown error occurred.");
}
}
}
async del() {
if (await this.getPassword() == null) return;
if (await confirmAsync(`Are you sure you want to permanently delete ${this.selectedFiles().length} file(s) from the server?`)) {
try {
for (const f of this.selectedFiles()) {
const response = await this.fetchOrError(f.url, {
method: "DELETE"
});
this.files.remove(f);
}
} catch (e) {
console.error(e);
await alertAsync("An unknown error occurred.");
}
}
}
}
var vm: ListViewModel;
document.addEventListener("DOMContentLoaded", async () => {
ko.applyBindings(vm = new ListViewModel(), document.getElementById("ko-area"));
$(document.body).applyDateTimeLocalPolyfill();
vm.files.orderField("takenOrUploadedAt");
vm.loadFiles();
}, false);<|fim▁end|>
|
readonly myTimeZone: string;
readonly startDate: KnockoutObservable<string>;
|
<|file_name|>token.rs<|end_file_name|><|fim▁begin|>use std::fmt::{Formatter, Result, Show};
use syntax::ast::pos::Position;
use syntax::ast::punc::Punctuator;
use syntax::ast::keyword::Keyword;
#[deriving(Clone, PartialEq)]
/// A single of token of Javascript code including its position
pub struct Token {
/// The token
pub data : TokenData,
/// The token's position
pub pos : Position
}
impl Token {
/// Create a new detailed token from the token data, line number and column number
pub fn new(data: TokenData, line_number: uint, column_number: uint) -> Token {
Token {
data: data,
pos: Position::new(line_number, column_number)
}
}
}
impl Show for Token {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(f, "{}", self.data)
}
}
#[deriving(Clone, PartialEq)]
/// A single token of Javacript code - a single word, symbol or constant
pub enum TokenData {<|fim▁hole|> /// An identifier
TIdentifier(String),
/// A keyword
TKeyword(Keyword),
/// A `null` literal
TNullLiteral,
/// A numeric literal
TNumericLiteral(f64),
/// A piece of punctuation
TPunctuator(Punctuator),
/// A string literal
TStringLiteral(String),
/// A regular expression
TRegularExpression(String),
/// A comment
TComment(String)
}
impl Show for TokenData {
fn fmt(&self, f: &mut Formatter) -> Result {
match self.clone() {
TBooleanLiteral(val) => write!(f, "{}", val),
TEOF => write!(f, "end of file"),
TIdentifier(ident) => write!(f, "{}", ident),
TKeyword(word) => write!(f, "{}", word),
TNullLiteral => write!(f, "null"),
TNumericLiteral(num) => write!(f, "{}", num),
TPunctuator(punc) => write!(f, "{}", punc),
TStringLiteral(lit) => write!(f, "{}", lit),
TRegularExpression(reg) => write!(f, "{}", reg),
TComment(comm) => write!(f, "/*{}*/", comm)
}
}
}<|fim▁end|>
|
/// A boolean literal, which is either `true` or `false`
TBooleanLiteral(bool),
/// The end of the file
TEOF,
|
<|file_name|>problem11.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Largest product in a grid
Problem 11
Published on 22 February 2002 at 06:00 pm [Server Time]
In the 20x20 grid below, four numbers along a diagonal line have been marked in red.
The product of these numbers is 26 * 63 * 78 * 14 = 1788696.
What is the greatest product of four adjacent numbers in the same direction (up, down, left, right, or diagonally) in the 20x20 grid?
"""
THE_GRID = [[int(column) for column in row.split(' ')] for row in
"""
08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08
49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00
81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65
52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91
22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80
24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50
32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70
67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21
24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72
21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95
78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92
16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57
86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58
19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40
04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66
88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69
04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36
20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16
20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54
01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48
""".strip().split('\n')]
"""
A few words about the declaration of THE_GRID:
This is not the easiest thing to digest on first look. I think it is "pythonic"
in its implementation and it allows to copy/paste the grid straight out of the problem
statement without a bunch of mucking around to manually turn it into a 2d array
( or nested lists, actually ). It is arranged as a list of rows. Each row is a
list of numbers for each column in that row. Looking at it, the multi-line string
definition actually converts to a list of strings from the split operation. One
string for each row. The top list comprehension converts each row into a list of
short strings ( the columns ) which are also converted to int.
"""
#------------------------------------------------------------------------------
import operator
#------------------------------------------------------------------------------
def product(iterable):
return reduce(operator.mul, iterable, 1)
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for y_dir in (0, 1):
for x_dir in (0,1):
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for i in range(run_length):
for y_dir in (0, 1):
for x_dir in (0,1):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
for i in range(run_length):
print THE_GRID[row+(y_dir*i)][column+x_dir*i]
def solve(run_length):
height = len(THE_GRID)
width = len(THE_GRID[0])
highest = 0
for row in range(height-run_length+1):
for column in range(width-run_length+1):
for x_dir, y_dir in [(1, 0), (0, 1), (1, 1)]:
run =[THE_GRID[row+(y_dir*i)][column+x_dir*i] for i in range(run_length)]
result = product(run)
print run, result
#if result > highest:
# highest = result
#return(highest)
#------------------------------------------------------------------------------
def solve():
g = THE_GRID
maxp = 0
rows, cols, path_size = len(g), len(g[0]), 5
for i in range(rows):
for j in range(cols - path_size + 1):
<|fim▁hole|> product([g[j+s][i] for s in range(path_size)]))
#phv = max(g[i][j] * g[i][j+1] * g[i][j+2] * g[i][j+3],
# g[j][i] * g[j+1][i] * g[j+2][i] * g[j+3][i])
if i < rows - path_size:
pdd = max(product([g[i+s][j+s] for s in range(path_size)]),
product([g[i+s][j+path_size-s-1] for s in range(path_size)]))
#pdd = max(g[i][j] * g[i+1][j+1] * g[i+2][j+2] * g[i+3][j+3],
# g[i][j+3] * g[i+1][j+2] * g[i+2][j+1] * g[i+3][j])
maxp = max(maxp, phv, pdd)
return maxp
#------------------------------------------------------------------------------
def main():
print "PROBLEM:\n"
for line in __doc__.strip().split('\n'):
print '\t', line
print "\nSOLUTION:"
print "\n\t", solve()
#------------------------------------------------------------------------------
if __name__ == "__main__":
main()<|fim▁end|>
|
phv = max(product([g[i][j+s] for s in range(path_size)]),
|
<|file_name|>message-user-agent-client.ts<|end_file_name|><|fim▁begin|>import { OutgoingMessageRequest, OutgoingRequestDelegate, OutgoingRequestMessage } from "../messages";
import { NonInviteClientTransaction } from "../transactions";
import { UserAgentCore } from "../user-agent-core";
import { UserAgentClient } from "./user-agent-client";
/**
* MESSAGE UAC.
* @public
*/
export class MessageUserAgentClient extends UserAgentClient implements OutgoingMessageRequest {
constructor(core: UserAgentCore, message: OutgoingRequestMessage, delegate?: OutgoingRequestDelegate) {
super(NonInviteClientTransaction, core, message, delegate);
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>relative_paths.py<|end_file_name|><|fim▁begin|># All of the other examples directly embed the Javascript and CSS code for
# Bokeh's client-side runtime into the HTML. This leads to the HTML files
# being rather large. An alternative is to ask Bokeh to produce HTML that
# has a relative link to the Bokeh Javascript and CSS. This is easy to
# do; you just pass in a few extra arguments to the output_file() command.<|fim▁hole|>from bokeh.plotting import *
N = 100
x = np.linspace(0, 4*np.pi, N)
y = np.sin(x)
output_file("relative_paths.html", title="Relative path example", mode="relative")
scatter(x,y, color="#FF00FF", tools="pan,wheel_zoom,box_zoom,reset,previewsave")
show()
# By default, the URLs for the Javascript and CSS will be relative to
# the current directory, i.e. the directory in which the HTML file is
# generated. You can provide a different "root" directory from which
# the relative paths will be computed:
#
# output_file("scatter.html", title="scatter.py example",
# resources="relative", rootdir="some/other/path")<|fim▁end|>
|
import numpy as np
|
<|file_name|>create-admin.js<|end_file_name|><|fim▁begin|>const app = require('../server');
const readline = require('readline');
const {
User,
Role,<|fim▁hole|> RoleMapping,
} = app.models;
const rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
});
Role.findOne({ where: { name: 'admin' } })
.then((role) => {
if (!role) {
console.log('No admin role found. Create fixtures first.');
process.exit();
}
const admin = {};
const fields = ['username', 'password'];
let field = fields.shift();
console.log(`${field}: `);
rl.on('line', (line) => {
admin[field] = line;
field = fields.shift();
if (!field) {
process.stdout.write('Creating the user... ');
User.create(admin)
.then(user =>
RoleMapping.create({
UserId: user.id,
RoleId: role.id,
})
)
.then(() => {
console.log('Done!\n');
process.exit(0);
})
.catch((err) => {
console.error(err);
process.exit(1);
});
return;
}
process.stdout.write(`${field}: \n`);
});
});<|fim▁end|>
| |
<|file_name|>urp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import itertools
import os
import sys
try:
from urllib import quote_plus, urlencode
from urlparse import parse_qsl, urlparse, urlunparse
except ImportError:
from urllib.parse import parse_qsl, quote_plus, urlencode, urlparse, urlunparse
ERR_INVALID_PAIR = 3
def parse(args, data):
url = urlparse(data)
query = url.query
if not args.no_query_params:
query = parse_qsl(url.query)
return url, query
def build_authority(username, password, hostname, port):
netloc = hostname
if username or password:
auth = username + ':' + password
netloc = auth + '@' + netloc
if port:
netloc += ':' + port
return netloc
def process(args, url, query):
scheme = args.scheme or url.scheme
username = args.username or (url.username or '')
password = args.password or (url.password or '')
hostname = args.hostname or (url.hostname or '')
port = str(args.port or (url.port or ''))
params = args.params or url.params
fragment = args.fragment or url.fragment
authority = build_authority(username, password, hostname, port)
path = url.path
if args.path:
if args.path.startswith('/'):
path = args.path
else:
path = os.path.join(url.path, args.path)
path = os.path.normpath(path)
if args.no_query_params:
if args.query:
query = args.query
if args.queries:
query += ''.join(args.queries)
if args.no_url_encoding:
encoded_query = query
else:
encoded_query = quote_plus(query)
else:
if args.query:
query = parse_qsl(args.query)
if args.queries:
query.extend(p.split('=', 2) for p in args.queries)
query = [(q, v) for q, v in query if q not in args.ignored_queries]
if args.sort_query:
query = sorted(query, key=lambda p: p[0])
if args.no_url_encoding:
encoded_query = '&'.join('='.join(p) for p in query)
else:
encoded_query = urlencode(query)
suppress_default = False
if args.print_scheme:
suppress_default = True
yield scheme
if args.print_username:
suppress_default = True
yield username
if args.print_password:
suppress_default = True
yield password
if args.print_hostname:
suppress_default = True
yield hostname
if args.print_port:
suppress_default = True
yield port
if args.print_authority:
suppress_default = True
yield authority
if args.print_path:
suppress_default = True
yield path
if args.print_params:
suppress_default = True
yield params
if args.print_query:
suppress_default = True
yield encoded_query
if args.query_value and not args.no_query_params:
suppress_default = True
# Would be nice to make `query_map` a defaultdict, but that would
# restrict this program to newer Python versions.
query_map = {}
for q, v in query:
if q not in query_map:
query_map[q] = []
query_map[q].append(v)
for q in args.query_value:
for v in query_map.get(q, ['']):
yield v
if args.print_query_names and not args.no_query_params:
suppress_default = True
for q in query:
yield q[0]
if args.print_query_values and not args.no_query_params:
suppress_default = True
for q in query:
yield q[1]
if args.print_fragment:
suppress_default = True
yield fragment
if not suppress_default:
yield urlunparse((scheme, authority, path, params, encoded_query, fragment))
def main():
ap = argparse.ArgumentParser(description='extract and modify URL features')
# URL-printing options
ap.add_argument('-s', '--print-scheme', action='store_true', dest='print_scheme', help="print scheme")
ap.add_argument('-u', '--print-username', action='store_true', dest='print_username', help="print username")
ap.add_argument('-w', '--print-password', action='store_true', dest='print_password', help="print password")
ap.add_argument('-o', '--print-hostname', action='store_true', dest='print_hostname', help="print hostname")
ap.add_argument('-p', '--print-port', action='store_true', dest='print_port', help="print port")
ap.add_argument('-a', '--print-authority', action='store_true', dest='print_authority', help="print authority")
ap.add_argument('-d', '--print-path', action='store_true', dest='print_path', help="print path")
ap.add_argument( '--print-params', action='store_true', dest='print_params', help="print params")
ap.add_argument('-q', '--print-query', action='store_true', dest='print_query', help="print query string")
ap.add_argument( '--print-query-names', action='store_true', dest='print_query_names', help="print only query parameter names")
ap.add_argument( '--print-query-values', action='store_true', dest='print_query_values', help="print only query parameter values")
ap.add_argument('-f', '--print-fragment', action='store_true', dest='print_fragment', help="print fragment")
ap.add_argument('-g', '--print-query-value', action='append', metavar='QUERY', dest='query_value', help="print value of query parameter")
# URL-mutating options
ap.add_argument('-S', '--scheme', action='store', dest='scheme', help="set scheme")
ap.add_argument('-U', '--username', action='store', dest='username', help="set username")
ap.add_argument('-W', '--password', action='store', dest='password', help="set password")
ap.add_argument('-O', '--hostname', action='store', dest='hostname', help="set hostname")
ap.add_argument('-P', '--port', action='store', dest='port', help="set port")
ap.add_argument('-D', '--path', action='store', dest='path', help="set or append path")
ap.add_argument( '--params', action='store', dest='params', help="set params")
ap.add_argument( '--query', action='store', dest='query', help="set query")<|fim▁hole|> # Behavior-modifying options
ap.add_argument( '--no-url-encoding', action='store_true', help="disable URL encoding")
ap.add_argument( '--no-query-params', action='store_true', help="disable query parameter parsing")
ap.add_argument( '--sort-query', action='store_true', help="sort printed query parameters by name")
ap.add_argument('-x', '--ignore-query', action='append', dest='ignored_queries', metavar='QUERY', default=[], help="ignore query parameter")
ap.add_argument( '--version', action='version', version='%(prog)s 0.1.1')
# Positional arguments
ap.add_argument('urls', nargs='*', metavar='URL')
args = ap.parse_args()
for pair in args.queries:
if '=' not in pair:
sys.stderr.write("invalid name=value pair: {}\n".format(pair))
sys.exit(ERR_INVALID_PAIR)
# Use the field and record separators from the environment
ofs = os.environ.get('OFS', ' ')
rs = os.environ.get('RS', '\n')
inputs = []
if not sys.stdin.isatty():
inputs.append(sys.stdin)
inputs.append(args.urls)
for line in itertools.chain(*inputs):
url, query = parse(args, line.strip())
output = process(args, url, query)
sys.stdout.write(ofs.join(output))
sys.stdout.write(rs)
if __name__ == '__main__':
main()<|fim▁end|>
|
ap.add_argument('-Q', '--append-query', metavar='NAME=VALUE', action='append', dest='queries', default=[], help="append query parameter")
ap.add_argument('-F', '--fragment', action='store', dest='fragment', help="set fragment")
|
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Generated by typings
// Source: https://raw.githubusercontent.com/DefinitelyTyped/DefinitelyTyped/de82425735f84a10b43921ae4b1d085b3752a626/mocha/mocha.d.ts
interface MochaSetupOptions {
//milliseconds to wait before considering a test slow
slow?: number;
// timeout in milliseconds
timeout?: number;
// ui name "bdd", "tdd", "exports" etc
ui?: string;
//array of accepted globals
globals?: any[];
// reporter instance (function or string), defaults to `mocha.reporters.Spec`
reporter?: any;
// bail on the first test failure
bail?: boolean;
// ignore global leaks
ignoreLeaks?: boolean;
// grep string or regexp to filter tests with
grep?: any;
}
interface MochaDone {
(error?: Error): void;
}
declare var mocha: Mocha;
declare var describe: Mocha.IContextDefinition;
declare var xdescribe: Mocha.IContextDefinition;
// alias for `describe`
declare var context: Mocha.IContextDefinition;
// alias for `describe`
declare var suite: Mocha.IContextDefinition;
declare var it: Mocha.ITestDefinition;
declare var xit: Mocha.ITestDefinition;
// alias for `it`
declare var test: Mocha.ITestDefinition;
declare var specify: Mocha.ITestDefinition;
declare function before(action: () => void): void;
declare function before(action: (done: MochaDone) => void): void;
declare function before(description: string, action: () => void): void;
declare function before(description: string, action: (done: MochaDone) => void): void;
declare function setup(action: () => void): void;
declare function setup(action: (done: MochaDone) => void): void;
declare function after(action: () => void): void;
declare function after(action: (done: MochaDone) => void): void;
declare function after(description: string, action: () => void): void;
declare function after(description: string, action: (done: MochaDone) => void): void;
declare function teardown(action: () => void): void;
declare function teardown(action: (done: MochaDone) => void): void;
declare function beforeEach(action: () => void): void;<|fim▁hole|>declare function beforeEach(action: (done: MochaDone) => void): void;
declare function beforeEach(description: string, action: () => void): void;
declare function beforeEach(description: string, action: (done: MochaDone) => void): void;
declare function suiteSetup(action: () => void): void;
declare function suiteSetup(action: (done: MochaDone) => void): void;
declare function afterEach(action: () => void): void;
declare function afterEach(action: (done: MochaDone) => void): void;
declare function afterEach(description: string, action: () => void): void;
declare function afterEach(description: string, action: (done: MochaDone) => void): void;
declare function suiteTeardown(action: () => void): void;
declare function suiteTeardown(action: (done: MochaDone) => void): void;
declare class Mocha {
constructor(options?: {
grep?: RegExp;
ui?: string;
reporter?: string;
timeout?: number;
bail?: boolean;
});
/** Setup mocha with the given options. */
setup(options: MochaSetupOptions): Mocha;
bail(value?: boolean): Mocha;
addFile(file: string): Mocha;
/** Sets reporter by name, defaults to "spec". */
reporter(name: string): Mocha;
/** Sets reporter constructor, defaults to mocha.reporters.Spec. */
reporter(reporter: (runner: Mocha.IRunner, options: any) => any): Mocha;
ui(value: string): Mocha;
grep(value: string): Mocha;
grep(value: RegExp): Mocha;
invert(): Mocha;
ignoreLeaks(value: boolean): Mocha;
checkLeaks(): Mocha;
/**
* Function to allow assertion libraries to throw errors directly into mocha.
* This is useful when running tests in a browser because window.onerror will
* only receive the 'message' attribute of the Error.
*/
throwError(error: Error): void;
/** Enables growl support. */
growl(): Mocha;
globals(value: string): Mocha;
globals(values: string[]): Mocha;
useColors(value: boolean): Mocha;
useInlineDiffs(value: boolean): Mocha;
timeout(value: number): Mocha;
slow(value: number): Mocha;
enableTimeouts(value: boolean): Mocha;
asyncOnly(value: boolean): Mocha;
noHighlighting(value: boolean): Mocha;
/** Runs tests and invokes `onComplete()` when finished. */
run(onComplete?: (failures: number) => void): Mocha.IRunner;
}
// merge the Mocha class declaration with a module
declare namespace Mocha {
/** Partial interface for Mocha's `Runnable` class. */
interface IRunnable {
title: string;
fn: Function;
async: boolean;
sync: boolean;
timedOut: boolean;
}
/** Partial interface for Mocha's `Suite` class. */
interface ISuite {
parent: ISuite;
title: string;
fullTitle(): string;
}
/** Partial interface for Mocha's `Test` class. */
interface ITest extends IRunnable {
parent: ISuite;
pending: boolean;
fullTitle(): string;
}
/** Partial interface for Mocha's `Runner` class. */
interface IRunner {}
interface IContextDefinition {
(description: string, spec: () => void): ISuite;
only(description: string, spec: () => void): ISuite;
skip(description: string, spec: () => void): void;
timeout(ms: number): void;
}
interface ITestDefinition {
(expectation: string, assertion?: () => void): ITest;
(expectation: string, assertion?: (done: MochaDone) => void): ITest;
only(expectation: string, assertion?: () => void): ITest;
only(expectation: string, assertion?: (done: MochaDone) => void): ITest;
skip(expectation: string, assertion?: () => void): void;
skip(expectation: string, assertion?: (done: MochaDone) => void): void;
timeout(ms: number): void;
}
export module reporters {
export class Base {
stats: {
suites: number;
tests: number;
passes: number;
pending: number;
failures: number;
};
constructor(runner: IRunner);
}
export class Doc extends Base {}
export class Dot extends Base {}
export class HTML extends Base {}
export class HTMLCov extends Base {}
export class JSON extends Base {}
export class JSONCov extends Base {}
export class JSONStream extends Base {}
export class Landing extends Base {}
export class List extends Base {}
export class Markdown extends Base {}
export class Min extends Base {}
export class Nyan extends Base {}
export class Progress extends Base {
/**
* @param options.open String used to indicate the start of the progress bar.
* @param options.complete String used to indicate a complete test on the progress bar.
* @param options.incomplete String used to indicate an incomplete test on the progress bar.
* @param options.close String used to indicate the end of the progress bar.
*/
constructor(runner: IRunner, options?: {
open?: string;
complete?: string;
incomplete?: string;
close?: string;
});
}
export class Spec extends Base {}
export class TAP extends Base {}
export class XUnit extends Base {
constructor(runner: IRunner, options?: any);
}
}
}
declare module "mocha" {
export = Mocha;
}<|fim▁end|>
| |
<|file_name|>set.rs<|end_file_name|><|fim▁begin|>use num::Num;
use signed::Signed;
<|fim▁hole|>#[inline]
pub fn set<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4], x: T, y: T, z: T, w: T) -> &'a mut [T; 4] {
out[0] = x;
out[1] = y;
out[2] = z;
out[3] = w;
out
}
#[test]
fn test_set() {
let mut v = [0, 0, 0, 0];
set(&mut v, 1, 2, 3, 4);
assert!(v == [1, 2, 3, 4]);
}
#[inline]
pub fn zero<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), T::zero(), T::zero(), T::zero()) }
#[inline]
pub fn identity<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), T::zero(), T::zero(), T::one()) }
#[inline]
pub fn up<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), T::zero(), T::one(), T::one()) }
#[inline]
pub fn down<'a, 'b, T: Copy + Signed>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), T::zero(), -T::one(), T::one()) }
#[inline]
pub fn forward<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), T::one(), T::one(), T::one()) }
#[inline]
pub fn back<'a, 'b, T: Copy + Signed>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::zero(), -T::one(), T::zero(), T::one()) }
#[inline]
pub fn right<'a, 'b, T: Copy + Num>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, T::one(), T::zero(), T::one(), T::one()) }
#[inline]
pub fn left<'a, 'b, T: Copy + Signed>(out: &'a mut [T; 4]) -> &'a mut [T; 4] { set(out, -T::one(), T::zero(), T::one(), T::one()) }<|fim▁end|>
| |
<|file_name|>apps.js<|end_file_name|><|fim▁begin|>import AuthenticatedRoute from 'ghost/routes/authenticated';
import CurrentUserSettings from 'ghost/mixins/current-user-settings';
import styleBody from 'ghost/mixins/style-body';
var AppsRoute = AuthenticatedRoute.extend(styleBody, CurrentUserSettings, {
titleToken: 'Apps',
classNames: ['settings-view-apps'],<|fim▁hole|>
beforeModel: function () {
if (!this.get('config.apps')) {
return this.transitionTo('settings.general');
}
return this.get('session.user')
.then(this.transitionAuthor())
.then(this.transitionEditor());
},
model: function () {
return this.store.find('app');
}
});
export default AppsRoute;<|fim▁end|>
| |
<|file_name|>conn.go<|end_file_name|><|fim▁begin|>// Copyright 2014 beego Author. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package logs
import (
"encoding/json"
"io"
"net"
"time"
)
// connWriter implements LoggerInterface.
// it writes messages in keep-live tcp connection.
type connWriter struct {
lg *logWriter
innerWriter io.WriteCloser
ReconnectOnMsg bool `json:"reconnectOnMsg"`
Reconnect bool `json:"reconnect"`
Net string `json:"net"`
Addr string `json:"addr"`
Level int `json:"level"`
}
// NewConn create new ConnWrite returning as LoggerInterface.
func NewConn() Logger {
conn := new(connWriter)
conn.Level = LevelTrace
return conn
}
// Init init connection writer with json config.
// json config only need key "level".
func (c *connWriter) Init(jsonConfig string) error {
return json.Unmarshal([]byte(jsonConfig), c)
}
// WriteMsg write message in connection.
// if connection is down, try to re-connect.
func (c *connWriter) WriteMsg(when time.Time, msg string, level int) error {
if level > c.Level {
return nil
}
if c.needToConnectOnMsg() {
err := c.connect()
if err != nil {
return err
}
}
if c.ReconnectOnMsg {
defer c.innerWriter.Close()
}
c.lg.println(when, msg)
return nil
}
// Flush implementing method. empty.
func (c *connWriter) Flush() {
}
// Destroy destroy connection writer and close tcp listener.
func (c *connWriter) Destroy() {
if c.innerWriter != nil {
c.innerWriter.Close()
}
}
func (c *connWriter) connect() error {
if c.innerWriter != nil {
c.innerWriter.Close()
c.innerWriter = nil
}
conn, err := net.Dial(c.Net, c.Addr)
if err != nil {
return err
}
if tcpConn, ok := conn.(*net.TCPConn); ok {
tcpConn.SetKeepAlive(true)
}
c.innerWriter = conn
c.lg = newLogWriter(conn)
return nil
}
func (c *connWriter) needToConnectOnMsg() bool {<|fim▁hole|>
if c.innerWriter == nil {
return true
}
return c.ReconnectOnMsg
}
func init() {
Register(AdapterConn, NewConn)
}<|fim▁end|>
|
if c.Reconnect {
c.Reconnect = false
return true
}
|
<|file_name|>EntityOperationServiceException.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/**
* @author bkompis
*/
public class EntityOperationServiceException extends MushroomHunterServiceDataAccessException {
public <T> EntityOperationServiceException(String what, String operation, T entity, Throwable e) {
super("Could not " + operation + " " + what + " (" + entity + ").", e);
}
public EntityOperationServiceException(String msg) {
super(msg);
}
public EntityOperationServiceException(String msg, Throwable cause) {
super(msg, cause);
}
}<|fim▁end|>
|
package cz.muni.fi.pa165.mushrooms.service.exceptions;
|
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|>import copy
from django.conf import settings
from django.test import override_settings
def override_openstack_settings(**kwargs):
os_settings = copy.deepcopy(settings.WALDUR_OPENSTACK)
os_settings.update(kwargs)<|fim▁hole|><|fim▁end|>
|
return override_settings(WALDUR_OPENSTACK=os_settings)
|
<|file_name|>MapCppEMRSpacePoints.cc<|end_file_name|><|fim▁begin|>/* This file is part of MAUS: http://micewww.pp.rl.ac.uk:8080/projects/maus
*
* MAUS is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* MAUS is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with MAUS. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include "src/map/MapCppEMRSpacePoints/MapCppEMRSpacePoints.hh"
namespace MAUS {
PyMODINIT_FUNC init_MapCppEMRSpacePoints(void) {
PyWrapMapBase<MAUS::MapCppEMRSpacePoints>::PyWrapMapBaseModInit
("MapCppEMRSpacePoints", "", "", "", "");
}
MapCppEMRSpacePoints::MapCppEMRSpacePoints()
: MapBase<MAUS::Data>("MapCppEMRSpacePoints") {
}
void MapCppEMRSpacePoints::_birth(const std::string& argJsonConfigDocument) {
_classname = "MapCppEMRSpacePoints";
char* pMAUS_ROOT_DIR = getenv("MAUS_ROOT_DIR");
if (!pMAUS_ROOT_DIR) {
throw MAUS::Exceptions::Exception(Exceptions::recoverable,
"Could not resolve ${MAUS_ROOT_DIR} environment variable",
"MapCppEMRSpacePoints::birth");
}
// JsonCpp setup
Json::Value configJSON;
Json::Value map_file_name;
Json::Value xEnable_V1731_Unpacking;
Json::Value xEnable_DBB_Unpacking;
configJSON = JsonWrapper::StringToJson(argJsonConfigDocument);
// Fetch variables
_number_of_planes = configJSON["EMRnumberOfPlanes"].asInt();
_number_of_bars = configJSON["EMRnumberOfBars"].asInt();
_tot_func_p1 = configJSON["EMRtotFuncP1"].asDouble();
_tot_func_p2 = configJSON["EMRtotFuncP2"].asDouble();
_tot_func_p3 = configJSON["EMRtotFuncP3"].asDouble();
// Load the EMR calibration map
bool loaded = _calibMap.InitializeFromCards(configJSON);
if ( !loaded )
throw(Exceptions::Exception(Exceptions::recoverable,
"Could not find EMR calibration map",
"MapCppEMRRecon::birth"));
// Load the EMR attenuation map
loaded = _attenMap.InitializeFromCards(configJSON);
if ( !loaded )
throw(Exceptions::Exception(Exceptions::recoverable,
"Could not find EMR attenuation map",
"MapCppEMRReccon::birth"));
// Load the EMR geometry map
loaded = _geoMap.InitializeFromCards(configJSON);
if ( !loaded )
throw(Exceptions::Exception(Exceptions::recoverable,
"Could not find EMR geometry map",
"MapCppEMRReccon::birth"));
}
void MapCppEMRSpacePoints::_death() {
}
void MapCppEMRSpacePoints::_process(Data *data) const {
// Routine data checks before processing it
if ( !data )
throw Exceptions::Exception(Exceptions::recoverable, "Data was NULL",
"MapCppEMRSpacePoints::_process");
Spill* spill = data->GetSpill();
if ( !spill )
throw Exceptions::Exception(Exceptions::recoverable, "Spill was NULL",
"MapCppEMRSpacePoints::_process");
if ( spill->GetDaqEventType() != "physics_event" )
return;
size_t nPartEvents = spill->GetReconEventSize();
if ( !nPartEvents )
return;
if ( !spill->GetEMRSpillData() )
return;
bool emrData = false;
for (size_t iPe = 0; iPe < nPartEvents; iPe++) {
EMREvent *evt = spill->GetReconEvents()->at(iPe)->GetEMREvent();
if ( evt->GetMotherPtr() ) {
if ( evt->GetMotherPtr()->GetEMRPlaneHitArraySize() ) {
emrData = true;
break;
}
}
}
if ( !emrData )
return;
// Create a temporary array containing n+n' plane hit arrays (1 per trigger + spill data)
size_t nSeconPartEvents = spill->GetEMRSpillData()->GetEMREventTrackArraySize();
EMREventVector emr_events_tmp(nPartEvents+nSeconPartEvents);
// Remove the crosstalk hits from each plane hit
clean_crosstalk(spill, emr_events_tmp);
// Reconstruct the coordinates of the spacepoint for each plane hit
reconstruct_coordinates(emr_events_tmp);
// Correct the SAPMT charge and reconstruct the MAPMT charge for the fibre attenuation
correct_charge(emr_events_tmp, nPartEvents);
// Fill the Recon event array with spill information (1 per trigger + daughter candidates)
fill(spill, emr_events_tmp, nPartEvents);
}
void MapCppEMRSpacePoints::clean_crosstalk(MAUS::Spill* spill,
EMREventVector& emr_events_tmp) const {
size_t nPartEvents = spill->GetReconEvents()->size();
size_t nSeconPartEvents = spill->GetEMRSpillData()->GetEMREventTrackArraySize();
for (size_t iPe = 0; iPe < nPartEvents+nSeconPartEvents; iPe++) {
// Skip events without an EventTrack (mother or candidate)
EMREventTrack* evtTrack;
if ( iPe < nPartEvents ) {
evtTrack = spill->GetReconEvents()->at(iPe)->GetEMREvent()->GetMotherPtr();
} else {
evtTrack = spill->GetEMRSpillData()->GetEMREventTrackArray()[iPe-nPartEvents];
}
if ( !evtTrack )
continue;
// Reject arrays that do not contain both projections (cannot reconstruct a SP)
EMRPlaneHitArray plArray = evtTrack->GetEMRPlaneHitArray();
bool HitXY[2] = {false, false};
for (size_t iPlane = 0; iPlane < plArray.size(); iPlane++) {
int xOri = plArray[iPlane]->GetOrientation();
if ( !HitXY[xOri] && plArray[iPlane]->GetEMRBarHitArraySize() )
HitXY[xOri] = true;
if ( HitXY[0] && HitXY[1] )
break;
}
if ( !HitXY[0] || !HitXY[1] )
continue;
// Reject the crosstalk in each plane
for (size_t iPlane = 0; iPlane < plArray.size(); iPlane++) {
EMRBarHitArray barHitArray = plArray[iPlane]->GetEMRBarHitArray();
std::vector<EMRBarHitArray> barHitGroupVector;
if ( !barHitArray.size() ) { // Skip the plane if there is no bar
continue;
} else if ( barHitArray.size() == 1 ) { // Select automatically if there is only 1 hit
barHitGroupVector.push_back(EMRBarHitArray());
barHitGroupVector[0].push_back(barHitArray[0]);
} else { // Keep only the most energetic bunch if there is >= 2 hits
// Sort the vector in with respect to the bar channel ID
sort(barHitArray.begin(), barHitArray.end(),
[] (const EMRBarHit& a, const EMRBarHit& b) {
return a.GetChannel() < b.GetChannel();
});
// Create groups of adjacent hits
barHitGroupVector.push_back(EMRBarHitArray());
barHitGroupVector[0].push_back(barHitArray[0]);
for (size_t iHit = 1; iHit < barHitArray.size(); iHit++) {
int aBar = barHitArray[iHit-1].GetChannel()%_number_of_bars;
int bBar = barHitArray[iHit].GetChannel()%_number_of_bars;
if ( abs(bBar-aBar) == 1 ) {
barHitGroupVector.back().push_back(barHitArray[iHit]);
} else {
barHitGroupVector.push_back(EMRBarHitArray());
barHitGroupVector.back().push_back(barHitArray[iHit]);
}
}
}
// Only keep the group with the highest energy deposition (gets rid of XT)
size_t mGroup(0);
double mCharge(0.);
for (size_t iGroup = 0; iGroup < barHitGroupVector.size(); iGroup++) {
double aCharge(0);
for (size_t iHit = 0; iHit < barHitGroupVector[iGroup].size(); iHit++) {
double xTot = barHitGroupVector[iGroup][iHit].GetTot();
double xCharge = exp(xTot/_tot_func_p1-log(_tot_func_p2))
- _tot_func_p3/_tot_func_p2;
aCharge += xCharge;
}
if ( aCharge > mCharge ) {
mGroup = iGroup;
mCharge = aCharge;
}
}
// Fill the temporary array with the selected hits (XT cleaned) and plane information
EMRPlaneTmp plane;
for (size_t iHit = 0; iHit < barHitGroupVector[mGroup].size(); iHit++)
plane._barhits.push_back(barHitGroupVector[mGroup][iHit]);
plane._plane = plArray[iPlane]->GetPlane();
plane._charge = plArray[iPlane]->GetCharge();
emr_events_tmp[iPe].push_back(plane);
}
}
}
void MapCppEMRSpacePoints::reconstruct_coordinates(EMREventVector& emr_events_tmp)
const {
for (size_t iPe = 0; iPe < emr_events_tmp.size(); iPe++) {
// Sort the temporary planes from upstreammost to the downstreammost
sort(emr_events_tmp[iPe].begin(), emr_events_tmp[iPe].end(),
[] (const EMRPlaneTmp& a, const EMRPlaneTmp& b) {
return a._plane < b._plane;
});
// Look for hits in the other projection to reconstruct the missing coordinate
for (size_t iPlane = 0; iPlane < emr_events_tmp[iPe].size(); iPlane++) {
if ( !emr_events_tmp[iPe][iPlane]._barhits.size() )
continue;
int xPlane = emr_events_tmp[iPe][iPlane]._plane;
int xOri = xPlane%2;
ThreeVector v0, v1, v2;
bool Hit1(false), Hit2(false);
double a(-1.), b(-1.), xi(-1.);
// Look backwards for hits in the other projection
int aPlane(0);
for (aPlane = iPlane-1; aPlane >= 0; aPlane--) {
if ( emr_events_tmp[iPe][aPlane]._barhits.size() &&
emr_events_tmp[iPe][aPlane]._plane%2 != xOri ) {
v1 = get_weighted_position(emr_events_tmp[iPe][aPlane]._barhits);
Hit1 = true;
break;
}
}
// Look forwards for hits in the other projection
for (size_t bPlane = iPlane+1; bPlane < emr_events_tmp[iPe].size(); bPlane++) {
if ( emr_events_tmp[iPe][bPlane]._barhits.size() &&
emr_events_tmp[iPe][bPlane]._plane%2 != xOri ) {
if ( !Hit2 ) {
v2 = get_weighted_position(emr_events_tmp[iPe][bPlane]._barhits);
Hit2 = true;
if ( Hit1 )
break;
} else if ( Hit2 && !Hit1 ) {
v1 = get_weighted_position(emr_events_tmp[iPe][bPlane]._barhits);
Hit1 = true;
break;
}
}
}
// Look backwards for the second hit if nothing found in the forward direction
if ( Hit1 && !Hit2 ) {
for (int cPlane = aPlane-1; cPlane >= 0; cPlane--) {
if ( emr_events_tmp[iPe][cPlane]._barhits.size() &&
emr_events_tmp[iPe][cPlane]._plane%2 != xOri ) {
v2 = get_weighted_position(emr_events_tmp[iPe][cPlane]._barhits);
Hit2 = true;
break;
}
}
}
// Calculate the parameters of the line between the two complementary planes
if ( Hit1 && Hit2 ) {
if ( !xOri ) {
a = (v2.y() - v1.y())/(v2.z() - v1.z());
b = v1.y() - a*v1.z();
} else {
a = (v2.x() - v1.x())/(v2.z() - v1.z());
b = v1.x() - a*v1.z();
}
} else if ( Hit1 && !Hit2 ) {
if ( !xOri ) {
b = v1.y();
} else {
b = v1.x();
}
a = 0.;
} else if ( !Hit1 && Hit2 ) {
if ( !xOri ) {
b = v2.y();
} else {
b = v2.x();
}
a = 0.;
}
// Set the transverse (x or y) and longitudinal (z) errors
xi = (v0.z() - v1.z())/(v2.z() - v1.z());
ThreeVector dim = _geoMap.Dimensions(); // (w, h, l) of an EMR bar
double etrans = dim.y()/(2*sqrt(6)); // Transverse uncertainty
double elong = dim.z()/(3*sqrt(2)); // Longitudinal uncertainty
for (size_t iHit = 0; iHit < emr_events_tmp[iPe][iPlane]._barhits.size(); iHit++) {
// Find the position of the bar in local coordinates
EMRBarHit barHit = emr_events_tmp[iPe][iPlane]._barhits[iHit];
int xBar = barHit.GetChannel()%_number_of_bars;
EMRChannelKey xKey(xPlane, xOri, xBar, "emr");
v0 = _geoMap.LocalPosition(xKey);
// Set the reconstructed (y or x) errors
if ( Hit1 && Hit2 ) {
xi = (v0.z() - v1.z())/(v2.z() - v1.z());
} else {
xi = 0.;
}
double erecon = sqrt((pow(xi, 2) // Recon uncertainty !!!TODO!!! (FIX)
+ pow(1-xi, 2)) * pow(etrans, 2)
+ pow(a, 2) * (pow(xi, 2) + pow(1-xi, 2)) * pow(elong, 2));
// Add the missing coordinate and set the appropriate errors
// Interpolate or extrapolate y for an x plane and x for a y plane
ThreeVector errors;
if ( !xOri ) {
v0.SetY(a*v0.z() + b);
errors = ThreeVector(etrans, erecon, elong);
} else {
v0.SetX(a*v0.z() + b);
errors = ThreeVector(erecon, etrans, elong);
}
// Add a spacepoint to the array
EMRSpacePointTmp spacePoint;
spacePoint._pos = v0;
spacePoint._errors = errors;
spacePoint._ch = barHit.GetChannel();
spacePoint._time = barHit.GetTime()-_attenMap.fibreDelay(xKey, v0.x(), v0.y(), "MA");
spacePoint._deltat = barHit.GetDeltaT()-_attenMap.fibreDelay(xKey, v0.x(), v0.y(), "MA");
spacePoint._chargema = -1;
spacePoint._chargesa = -1;
emr_events_tmp[iPe][iPlane]._spacepoints.push_back(spacePoint);
}
}
}
}
void MapCppEMRSpacePoints::correct_charge(EMREventVector& emr_events_tmp,
size_t nPartEvents) const {
for (size_t iPe = 0; iPe < emr_events_tmp.size(); iPe++) {
for (size_t iPlane = 0; iPlane < emr_events_tmp[iPe].size(); iPlane++) {
int xPlane = emr_events_tmp[iPe][iPlane]._plane;
EMRSpacePointVector spacePointVector = emr_events_tmp[iPe][iPlane]._spacepoints;
// Reconstruct the MAPMT charge in each bar hit
double xPlaneChargeMA(0.);
for (size_t iSP = 0; iSP < spacePointVector.size(); iSP++) {
int xBar = spacePointVector[iSP]._ch%_number_of_bars;
EMRChannelKey xKey(xPlane, xPlane%2, xBar, "emr");
double x = spacePointVector[iSP]._pos.x(); // [mm]
double y = spacePointVector[iSP]._pos.y(); // [mm]
double alphMA = _attenMap.fibreAtten(xKey, x, y, "MA"); // Fibre attenuation
double epsMA = _calibMap.Eps(xKey, "MA"); // Calibration factor
double xTot = emr_events_tmp[iPe][iPlane]._barhits[iSP].GetTot();
double xCharge = exp(xTot/_tot_func_p1-log(_tot_func_p2))-_tot_func_p3/_tot_func_p2;
xCharge /= alphMA*epsMA;
spacePointVector[iSP]._chargema = xCharge;
emr_events_tmp[iPe][iPlane]._spacepoints[iSP]._chargema = xCharge;
xPlaneChargeMA += xCharge;
}
// Correct and split the SAPMT charge in each bar, reconstruct it for candidates
double xPlaneChargeSA(0.), xFactorSA(0.), xFactorMA(0.);
for (size_t iSP = 0; iSP < spacePointVector.size(); iSP++) {
int xBar = spacePointVector[iSP]._ch%_number_of_bars;
EMRChannelKey xKey(xPlane, xPlane%2, xBar, "emr");
double x = spacePointVector[iSP]._pos.x(); // [mm]<|fim▁hole|> double y = spacePointVector[iSP]._pos.y(); // [mm]
double alphSA = _attenMap.fibreAtten(xKey, x, y, "SA");
double epsSA = _calibMap.Eps(xKey, "SA");
double alphMA = _attenMap.fibreAtten(xKey, x, y, "MA");
double epsMA = _calibMap.Eps(xKey, "MA");
double phi = spacePointVector[iSP]._chargema/xPlaneChargeMA; // Fraction of the total charge
xFactorSA += alphSA*epsSA*phi;
xFactorMA += alphMA*epsMA*phi;
}
if ( iPe < nPartEvents ) {
xPlaneChargeSA = emr_events_tmp[iPe][iPlane]._charge/xFactorSA;
} else {
xPlaneChargeSA = xPlaneChargeMA*xFactorSA/xFactorMA;
}
for ( size_t iSP = 0; iSP < spacePointVector.size(); iSP++)
emr_events_tmp[iPe][iPlane]._spacepoints[iSP]._chargesa =
xPlaneChargeSA*spacePointVector[iSP]._chargema/xPlaneChargeMA;
// Correct the error on the spacepoint for its charge fraction,
// A spacepoint that has comparatively less charge is less definite
for (size_t iSP = 0; iSP < spacePointVector.size(); iSP++) {
double xFactor = spacePointVector[iSP]._chargema/xPlaneChargeMA;
ThreeVector xErrors = spacePointVector[iSP]._errors;
if ( !(xPlane%2) ) {
xErrors.SetX(xErrors.x()/sqrt(fabs(xFactor)));
} else {
xErrors.SetY(xErrors.y()/sqrt(fabs(xFactor)));
}
emr_events_tmp[iPe][iPlane]._spacepoints[iSP]._errors = xErrors;
}
}
}
}
void MapCppEMRSpacePoints::fill(MAUS::Spill* spill,
EMREventVector emr_events_tmp,
size_t nPartEvents) const {
// Get the EMR recon events and the spill data
ReconEventPArray *recEvts = spill->GetReconEvents();
EMRSpillData *emrSpill = spill->GetEMRSpillData();
for (size_t iPe = 0; iPe < emr_events_tmp.size(); iPe++) {
EMRSpacePointArray spacePointArray;
for (size_t iPlane = 0; iPlane < emr_events_tmp[iPe].size(); iPlane++) {
EMRSpacePointVector spacePointVector = emr_events_tmp[iPe][iPlane]._spacepoints;
for (size_t iSP = 0; iSP < spacePointVector.size(); iSP++) {
EMRSpacePoint *spacePoint = new EMRSpacePoint;
spacePoint->SetChannel(spacePointVector[iSP]._ch);
spacePoint->SetPosition(spacePointVector[iSP]._pos);
spacePoint->SetGlobalPosition(_geoMap.MakeGlobal(spacePointVector[iSP]._pos));
spacePoint->SetPositionErrors(spacePointVector[iSP]._errors);
spacePoint->SetTime(spacePointVector[iSP]._time);
spacePoint->SetDeltaT(spacePointVector[iSP]._deltat);
spacePoint->SetChargeMA(spacePointVector[iSP]._chargema);
spacePoint->SetChargeSA(spacePointVector[iSP]._chargesa);
spacePointArray.push_back(spacePoint);
}
}
EMREventTrack *evtTrack;
if ( iPe < nPartEvents ) {
evtTrack = recEvts->at(iPe)->GetEMREvent()->GetMotherPtr();
} else {
evtTrack = emrSpill->GetEMREventTrackArray()[iPe-nPartEvents];
}
if ( evtTrack )
evtTrack->SetEMRSpacePointArray(spacePointArray);
}
}
ThreeVector MapCppEMRSpacePoints::get_weighted_position(EMRBarHitArray barHitArray) const {
double wx(0.), wy(0.), wz(0.), w(0.);
for (size_t iHit = 0; iHit < barHitArray.size(); iHit++) {
int xPlane = barHitArray[iHit].GetChannel()/_number_of_bars;
int xBar = barHitArray[iHit].GetChannel()%_number_of_bars;
EMRChannelKey xKey(xPlane, xPlane%2, xBar, "emr");
ThreeVector xPos = _geoMap.LocalPosition(xKey);
double xTot = barHitArray[iHit].GetTot();
double xCharge= exp(xTot/_tot_func_p1-log(_tot_func_p2))
- _tot_func_p3/_tot_func_p2;
wx += xPos.x()*xCharge;
wy += xPos.y()*xCharge;
wz += xPos.z()*xCharge;
w += xCharge;
}
if (w) {
return ThreeVector(wx/w, wy/w, wz/w);
} else {
return ThreeVector(0., 0., 0.);
}
}
} // namespace MAUS<|fim▁end|>
| |
<|file_name|>test_atom.rs<|end_file_name|><|fim▁begin|>use rustler::{Atom, Binary, Env, NifResult, Term};
mod atoms {
rustler::atoms! { ok }
}
#[rustler::nif]
pub fn atom_to_string(atom: Term) -> NifResult<String> {
atom.atom_to_string()
}<|fim▁hole|>pub fn atom_equals_ok(atom: Atom) -> bool {
atoms::ok() == atom
}
#[rustler::nif]
pub fn binary_to_atom(env: Env, binary: Binary) -> NifResult<Atom> {
let atom = Atom::from_bytes(env, binary.as_slice())?;
Ok(atom)
}
#[rustler::nif]
pub fn binary_to_existing_atom(env: Env, binary: Binary) -> NifResult<Option<Atom>> {
let atom = Atom::try_from_bytes(env, binary.as_slice())?;
Ok(atom)
}<|fim▁end|>
|
#[rustler::nif]
|
<|file_name|>Main.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2012 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @file main.cpp
* @brief Authentication Server main program
*
* This file contains the main program for the
* authentication server
*/
#include <ace/Dev_Poll_Reactor.h>
#include <ace/TP_Reactor.h>
#include <ace/ACE.h>
#include <ace/Sig_Handler.h>
#include <openssl/opensslv.h><|fim▁hole|>#include <openssl/crypto.h>
#include "Common.h"
#include "Database/DatabaseEnv.h"
#include "Configuration/Config.h"
#include "Log.h"
#include "SystemConfig.h"
#include "Util.h"
#include "SignalHandler.h"
#include "RealmList.h"
#include "RealmAcceptor.h"
#ifndef _TRINITY_REALM_CONFIG
# define _TRINITY_REALM_CONFIG "authserver.conf"
#endif
bool StartDB();
void StopDB();
bool stopEvent = false; // Setting it to true stops the server
LoginDatabaseWorkerPool LoginDatabase; // Accessor to the auth server database
/// Handle authserver's termination signals
class AuthServerSignalHandler : public Trinity::SignalHandler
{
public:
virtual void HandleSignal(int SigNum)
{
switch (SigNum)
{
case SIGINT:
case SIGTERM:
stopEvent = true;
break;
}
}
};
/// Print out the usage string for this program on the console.
void usage(const char *prog)
{
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Usage: \n %s [<options>]\n"
" -c config_file use config_file as configuration file\n\r",
prog);
}
/// Launch the auth server
extern int main(int argc, char **argv)
{
// Command line parsing to get the configuration file name
char const* cfg_file = _TRINITY_REALM_CONFIG;
int c = 1;
while (c < argc)
{
if (strcmp(argv[c], "-c") == 0)
{
if (++c >= argc)
{
printf("Runtime-Error: -c option requires an input argument");
usage(argv[0]);
return 1;
}
else
cfg_file = argv[c];
}
++c;
}
if (!ConfigMgr::Load(cfg_file))
{
printf("Invalid or missing configuration file : %s", cfg_file);
printf("Verify that the file exists and has \'[authserver]\' written in the top of the file!");
return 1;
}
sLog->outInfo(LOG_FILTER_AUTHSERVER, "%s (authserver)", _FULLVERSION);
sLog->outInfo(LOG_FILTER_AUTHSERVER, "<Ctrl-C> to stop.\n");
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Using configuration file %s.", cfg_file);
sLog->outWarn(LOG_FILTER_AUTHSERVER, "%s (Library: %s)", OPENSSL_VERSION_TEXT, SSLeay_version(SSLEAY_VERSION));
#if defined (ACE_HAS_EVENT_POLL) || defined (ACE_HAS_DEV_POLL)
ACE_Reactor::instance(new ACE_Reactor(new ACE_Dev_Poll_Reactor(ACE::max_handles(), 1), 1), true);
#else
ACE_Reactor::instance(new ACE_Reactor(new ACE_TP_Reactor(), true), true);
#endif
sLog->outDebug(LOG_FILTER_AUTHSERVER, "Max allowed open files is %d", ACE::max_handles());
// authserver PID file creation
std::string pidfile = ConfigMgr::GetStringDefault("PidFile", "");
if (!pidfile.empty())
{
uint32 pid = CreatePIDFile(pidfile);
if (!pid)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Cannot create PID file %s.\n", pidfile.c_str());
return 1;
}
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Daemon PID: %u\n", pid);
}
// Initialize the database connection
if (!StartDB())
return 1;
sLog->SetRealmID(0); // ensure we've set realm to 0 (authserver realmid)
// Get the list of realms for the server
sRealmList->Initialize(ConfigMgr::GetIntDefault("RealmsStateUpdateDelay", 20));
if (sRealmList->size() == 0)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "No valid realms specified.");
return 1;
}
// Launch the listening network socket
RealmAcceptor acceptor;
int32 rmport = ConfigMgr::GetIntDefault("RealmServerPort", 3724);
if (rmport < 0 || rmport > 0xFFFF)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Specified port out of allowed range (1-65535)");
return 1;
}
std::string bind_ip = ConfigMgr::GetStringDefault("BindIP", "0.0.0.0");
ACE_INET_Addr bind_addr(uint16(rmport), bind_ip.c_str());
if (acceptor.open(bind_addr, ACE_Reactor::instance(), ACE_NONBLOCK) == -1)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Auth server can not bind to %s:%d", bind_ip.c_str(), rmport);
return 1;
}
// Initialise the signal handlers
AuthServerSignalHandler SignalINT, SignalTERM;
// Register authservers's signal handlers
ACE_Sig_Handler Handler;
Handler.register_handler(SIGINT, &SignalINT);
Handler.register_handler(SIGTERM, &SignalTERM);
///- Handle affinity for multiple processors and process priority on Windows
#ifdef _WIN32
{
HANDLE hProcess = GetCurrentProcess();
uint32 Aff = ConfigMgr::GetIntDefault("UseProcessors", 0);
if (Aff > 0)
{
ULONG_PTR appAff;
ULONG_PTR sysAff;
if (GetProcessAffinityMask(hProcess, &appAff, &sysAff))
{
ULONG_PTR curAff = Aff & appAff; // remove non accessible processors
if (!curAff)
sLog->outError(LOG_FILTER_AUTHSERVER, "Processors marked in UseProcessors bitmask (hex) %x not accessible for authserver. Accessible processors bitmask (hex): %x", Aff, appAff);
else if (SetProcessAffinityMask(hProcess, curAff))
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Using processors (bitmask, hex): %x", curAff);
else
sLog->outError(LOG_FILTER_AUTHSERVER, "Can't set used processors (hex): %x", curAff);
}
}
bool Prio = ConfigMgr::GetBoolDefault("ProcessPriority", false);
if (Prio)
{
if (SetPriorityClass(hProcess, HIGH_PRIORITY_CLASS))
sLog->outInfo(LOG_FILTER_AUTHSERVER, "The auth server process priority class has been set to HIGH");
else
sLog->outError(LOG_FILTER_AUTHSERVER, "Can't set auth server process priority class.");
}
}
#endif
// maximum counter for next ping
uint32 numLoops = (ConfigMgr::GetIntDefault("MaxPingTime", 30) * (MINUTE * 1000000 / 100000));
uint32 loopCounter = 0;
// Wait for termination signal
while (!stopEvent)
{
// dont move this outside the loop, the reactor will modify it
ACE_Time_Value interval(0, 100000);
if (ACE_Reactor::instance()->run_reactor_event_loop(interval) == -1)
break;
if ((++loopCounter) == numLoops)
{
loopCounter = 0;
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Ping MySQL to keep connection alive");
LoginDatabase.KeepAlive();
}
}
// Close the Database Pool and library
StopDB();
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Halting process...");
return 0;
}
/// Initialize connection to the database
bool StartDB()
{
MySQL::Library_Init();
std::string dbstring = ConfigMgr::GetStringDefault("LoginDatabaseInfo", "");
if (dbstring.empty())
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Database not specified");
return false;
}
int32 worker_threads = ConfigMgr::GetIntDefault("LoginDatabase.WorkerThreads", 1);
if (worker_threads < 1 || worker_threads > 32)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Improper value specified for LoginDatabase.WorkerThreads, defaulting to 1.");
worker_threads = 1;
}
int32 synch_threads = ConfigMgr::GetIntDefault("LoginDatabase.SynchThreads", 1);
if (synch_threads < 1 || synch_threads > 32)
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Improper value specified for LoginDatabase.SynchThreads, defaulting to 1.");
synch_threads = 1;
}
// NOTE: While authserver is singlethreaded you should keep synch_threads == 1. Increasing it is just silly since only 1 will be used ever.
if (!LoginDatabase.Open(dbstring.c_str(), uint8(worker_threads), uint8(synch_threads)))
{
sLog->outError(LOG_FILTER_AUTHSERVER, "Cannot connect to database");
return false;
}
sLog->outInfo(LOG_FILTER_AUTHSERVER, "Started auth database connection pool.");
sLog->EnableDBAppenders();
return true;
}
/// Close the connection to the database
void StopDB()
{
LoginDatabase.Close();
MySQL::Library_End();
}<|fim▁end|>
| |
<|file_name|>t_coupling_tools.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from openturns import coupling_tools
import os
import time
import sys
wanted_lines = '# ooo\nE=@E\nE1=@E\nFE1=@F#oo\nZ=@Z@Z\n# ooo\n'
semi_parsed = '# ooo\nE=2\nE1=2\nFE1=@F#oo\nZ=@Z@Z\n# ooo\n'
parsed = '# ooo\nE=1.6\nE1=1.6\nFE1=5#oo\nZ=66\n# ooo\n'
# how many Mo for perf file
#howbig = 3024
howbig = 256
max_time = divmod(howbig, 5)[0]
def create_template():
template_name = 'template.in'
template_handle = open(template_name, 'wb')
template_handle.write(wanted_lines.encode())
template_handle.close()
return template_name
def create_big_template():
template_name = 'template_big.in'
template_handle = open(template_name, 'wb')
print('create template file of ' + str(howbig) + 'Mo')
template_handle.write(wanted_lines.encode())
for i in range(howbig):
for i in range(1024):
# line of 1024 octets
template_handle.write(b'u'*1024)
template_handle.write(b'\n')
template_handle.write(b'# ooo\n')
template_handle.close()
return template_name
def remove_file(filename, quiet=False):
if quiet:
try:
os.remove(filename)
except:
pass
else:
os.remove(filename)
def check_outfile(filename, wanted_result):
""" wanted_result: a string """
is_ok = True
handle = open(filename)
for wanted_line, result_line in zip(wanted_result.splitlines(True), handle):
if wanted_line != result_line:
print('Aaaaarg, result is not what we wanted (result:' + \
result_line + ', should be:' + wanted_line.decode() + ')')
is_ok = False
handle.close()
if is_ok:
print('check ' + filename + ': ok')
else:
exit(1)
#return is_ok
def check_replace():
print("=== " + sys._getframe().f_code.co_name)
print("= check replace std")
template = create_template()
template_out = template + ".replaced"
coupling_tools.replace(infile=template, outfile=template_out,
tokens=["@E"], values=[2])
check_outfile(template_out, semi_parsed)
remove_file(template_out)
remove_file(template)
print("= check replace more vars")
template = create_template()
coupling_tools.replace(infile=template, outfile=template_out,
tokens=["@E", "@F", "@Z"],
values=[1.6, 5, 6])
check_outfile(template_out, parsed)
remove_file(template_out)
remove_file(template)
print("= check replace inplace")
template = create_template()
coupling_tools.replace(infile=template, outfile=template,
tokens=["@E", "@F", "@Z"], values=[1.6, 5, 6])
check_outfile(template, parsed)
remove_file(template)
print("= check replace inplace with None")
template = create_template()
coupling_tools.replace(infile=template, outfile=None,
tokens=["@E", "@F", "@Z"], values=[1.6, 5, 6])
check_outfile(template, parsed)
remove_file(template)
print("= check replace big template")
start_time = time.time()
template = create_big_template()
sys.stderr.write( "big template created in : " + str(time.time() - start_time) + "s\n" )
template_out = template + ".replaced"
start_time = time.time()
coupling_tools.replace(infile=template, outfile=template_out,
tokens=["@E"], values=[2])
time_to_parse = str(int(time.time() - start_time))
check_outfile(template_out, semi_parsed)
remove_file(template_out)
remove_file(template)
sys.stderr.write( "parsed template in: " + time_to_parse + "s\n" )
# parsed template=3G -> 25s on bx (ssd, core [email protected])
if int(time_to_parse) > max_time:
print('time to get token took too long (should be ' + str(max_time)+'s max)')
exit(1)
else:
print('check replace big template: ok')
def create_results(tokens, values=None, big=False):
filename = "results.out"
handle = open(filename, "wb")
if big:
print("create file of " + str(howbig) + "Mo")
for i in range(howbig):
for i in range(1024):
# line of 1024 octets
handle.write(b'u'*1024)
handle.write(b'\n')
handle.write(b'# ooo\n')
if values == None:
handle.write(tokens.encode())
else:
n = 0
for t, v in zip(tokens, values):
handle.write((t + str(v)).encode())
# go to next line sometimes
if n%3 == 0:
handle.write(b'\n')
n += 1
handle.close()
return filename
def check_results(ok_values, values):
if ok_values == values:
print("ok")
else:
print("Error: found: " + str(values) + " should be: " + str(ok_values))
exit(1)
def check_get_line_col():
print("=== " + sys._getframe().f_code.co_name)
content = """01 02 03 04 05 06 07 08 09
11 12 13 14 15 16 17 18 19
21 22 23 24 25 26 27 28 29
31 32 33 34 35 36 37 38 39
"""
result_file = create_results(content)
value = 1
result = coupling_tools.get_line_col(result_file)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 3
result = coupling_tools.get_line_col(result_file, skip_col=2)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 11
result = coupling_tools.get_line_col(result_file, 1)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 16
result = coupling_tools.get_line_col(result_file, 1, 5)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 9
result = coupling_tools.get_line_col(result_file, skip_col=-1)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 17
result = coupling_tools.get_line_col(result_file, 1, -3)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 31
result = coupling_tools.get_line_col(result_file, -1)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 14
result = coupling_tools.get_line_col(result_file, -3, -6)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 3
result = coupling_tools.get_line_col(result_file, seek=6)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 23
result = coupling_tools.get_line_col(result_file, skip_line=1, skip_col=2, seek=30)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 31
result = coupling_tools.get_line_col(result_file, skip_line=-1, seek=-100)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
#coupling_tools.debug = True
value = 21
just_before_line_ret = 80
sys.stderr.write( 'char at pos ' + str(just_before_line_ret) + ':->' + \
content[just_before_line_ret] + '<-\n' )
result = coupling_tools.get_line_col(result_file, skip_line=-1,
seek=-just_before_line_ret)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 21
just_after_line_ret = just_before_line_ret + 1
sys.stderr.write( 'char at pos ' + str(just_after_line_ret) + ':->' + \
content[just_after_line_ret] + '<-\n')
result = coupling_tools.get_line_col(result_file, skip_line=-2,
seek=-just_after_line_ret)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
try:
result = coupling_tools.get_line_col(result_file, skip_line=4)
except:<|fim▁hole|> try:
result = coupling_tools.get_line_col(result_file, skip_line=-5)
except:
pass
else:
raise Exception("! should have fail !")
os.remove(result_file)
# test with a last empty line
content = """01 02 03 04 05 06 07 08 09
11 12 13 14 15 16 17 18 19
"""
result_file = create_results(content)
value = 19
result = coupling_tools.get_line_col(result_file, skip_line=-2, skip_col=-1)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
value = 1
result = coupling_tools.get_line_col(result_file, skip_line=-3)
if value != result: raise Exception("! got " + str(result) + ' instead of ' +
str(value))
try:
result = coupling_tools.get_line_col(result_file, skip_line=-1)
except:
pass
else:
raise Exception("! should have fail !")
os.remove(result_file)
print("ok")
def check_get():
print("=== " + sys._getframe().f_code.co_name)
tokens = ["@Y1=", "@Y2="]
values = [5.4, 6.5]
result_file = create_results(tokens, values)
results = coupling_tools.get(filename=result_file, tokens=tokens)
check_results(values, results)
remove(result_file)
#Y = coupling_tools.get(outfile="output.py", tokens=["@Y1=", "@Y2="],
# occurence=[0, 0], col=[0, 3], line=[0, 2])
def check_get_regex():
print("=== " + sys._getframe().f_code.co_name)
tokens = ["@E=", "02=", " 01 = "]
values = [-9.55555E5, 8, 5.4]
result_file = create_results(tokens, values)
results = coupling_tools.get_regex(filename=result_file,
patterns=['@E=(\R)',
'02\s*=\s*(\I)\s*',
'01 =\s*(\R)']
)
check_results(values, results)
remove_file(result_file)
def check_get_regex_perf():
print("=== " + sys._getframe().f_code.co_name)
tokens = ["@E=", "02=", " 01 = "]
values = [-9.55555E5, 8, 5.4]
start_time = time.time()
result_file = create_results(tokens, values, big=True)
sys.stderr.write("big file created in : " + str(time.time() - start_time) + "s\n")
start_time = time.time()
results = coupling_tools.get_regex(filename=result_file,
patterns=['@E=(\R)',
'02\s*=\s*(\I)\s*',
'01 =\s*(\R)']
)
time_to_parse = str(int(time.time() - start_time))
check_results(values, results)
remove_file(result_file)
# get file=3G -> 16s on bx (ssd, core [email protected])
sys.stderr.write("get regex in file in: " + time_to_parse + "s\n")
if int(time_to_parse) > max_time:
print('time to get token took too long (should be ' + str(max_time)+'s max)')
exit(1)
else:
print("get regex in file: ok")
def check_get_tokens():
print("=== " + sys._getframe().f_code.co_name)
tokens = ["@E=", " pp", ",virg", " normal="]
values = [-9.55555E6, 56.666, -12345678912.2, 0]
result_file = create_results(tokens, values)
results = coupling_tools.get(filename=result_file,
tokens=tokens)
check_results(values, results)
remove_file(result_file)
def check_get_tokens_skip():
print("=== " + sys._getframe().f_code.co_name)
content = "@E=99 @E=-9.55555E6 pp88 pp 56.666,virg-12345678912.2 normal=0"
values = [-9.55555E6, 56.666, -12345678912.2, 0]
result_file = create_results(content)
results = coupling_tools.get(filename=result_file,
tokens=["@E=", "pp", ",virg",
"normal="],
skip_tokens=[1, -1, 0, 0]
)
check_results(values, results)
remove_file(result_file)
print("=== " + sys._getframe().f_code.co_name + "2")
tokens = '@E=99 @E=7899 pp88 pp pp\n'\
'pp999 pp56.666E-9pp,virg-12345678912.2 uu88 uuuu\n'\
'uu999uu\n'
values = [99, 56.666E-9, -12345678912.2, 999]
result_file = create_results(tokens)
results = coupling_tools.get(filename=result_file,
tokens=["@E=", "pp", ",virg",
"uu"],
skip_tokens=[0, 4, -1, 3]
)
check_results(values, results)
remove_file(result_file)
def check_get_array():
print("=== " + sys._getframe().f_code.co_name)
tokens = '11.0E-9 22.0crap 33.0 44.0 55.0\n'\
'11.1 22.1 33.1 44.1\n'\
'11.2 22.2 33.2 44.2'
values = [11.0E-9, 22.0, 55.0, 11.1, 33.2, 22.2, 33.2]
result_file = create_results(tokens)
results = coupling_tools.get(filename=result_file,
skip_lines=[0, 0, 0, 1, 2, 2, -1],
skip_cols= [0, 1, -1, 0, 2, 1, -2]
)
check_results(values, results)
remove_file(result_file)
def check_get_tokens_line_col():
print("=== " + sys._getframe().f_code.co_name)
tokens = '11.0E-9 22.0crap 33.0 44.0 55.0\n'\
'11.1 22.1 33.1 44.1 middle\n'\
'11.2 22.2 33.2 44.2\n'\
'@E=1111.1E11 666'
values = [1111.1E11, 22.0, 33.1, 666, 33.2, 44.1, 55.0]
result_file = create_results(tokens)
results = coupling_tools.get(filename=result_file,
tokens=['@E=', None, '@E=', '@E=',
'middle', 'middle', 'middle'],
skip_lines=[0, 0, -2, 0, 1, 0, -1],
skip_cols= [0, 1, 2, 1,-2, -1, -1]
)
check_results(values, results)
remove_file(result_file)
def check_get_tokens_skip_line_col():
print("=== " + sys._getframe().f_code.co_name)
tokens = '11.0E-9 22.0crap 33.0 44.0 55.0\n'\
'11.1 22.1 33.1 44.1 middle\n'\
'11.2 22.2 middle 33.2 44.2\n'\
'@E=1111.1E11 666\n'\
'@E=999 8888 @E=95\n'
values = [1111.1E11, 33.2, 666, 8888, 8888, 666]
result_file = create_results(tokens)
results = coupling_tools.get(filename=result_file,
tokens=['@E=', 'middle', 'middle', '@E=', '@E=', '@E='],
skip_tokens=[0, 1, -1, -2, -1, -2],
skip_lines=[0, 0, 1, 0, 0, -1],
skip_cols= [0, 0, 1, 1, -1, -1]
)
check_results(values, results)
remove_file(result_file)
def check_get_tokens_perf():
print("=== " + sys._getframe().f_code.co_name)
tokens = ["@E=", " pp", ",virg", " normal="]
values = [-9.55555E6, 56.666, -12345678912.2, 0]
tokens = ["@E="]
values = [-9.55555E6]
start_time = time.time()
result_file = create_results(tokens, values, big=True)
sys.stderr.write("big file created in : " + str(time.time() - start_time) + "s\n")
start_time = time.time()
results = coupling_tools.get(filename=result_file,
tokens=tokens)
time_to_parse = str(int(time.time() - start_time))
check_results(values, results)
remove_file(result_file)
# get file=3G -> 18s on bx (ssd, core [email protected])
sys.stderr.write( 'get token in file in: ' + time_to_parse + 's\n' )
if int(time_to_parse) > max_time:
print('time to get token took too long (should be ' + str(max_time)+'s max)')
exit(1)
else:
print('get tokens: ok')
def check_get_tokens_skip_perf():
print("=== " + sys._getframe().f_code.co_name)
content = "@E=99 @E=-9.55555E6 pp88 pp 56.666,virg-12345678912.2 normal=0"
values = [-9.55555E6, 56.666, -12345678912.2, 0]
content = "@E=99 @E=-9.55555E6 pp88 pp 56.666,virg-12345678912.2 normal=0"
values = [-9.55555E6]
start_time = time.time()
result_file = create_results(content, big=True)
sys.stderr.write("big file created in : " + str(time.time() - start_time) + "s\n")
start_time = time.time()
results = coupling_tools.get(filename=result_file,
tokens=["@E="],
skip_tokens=[-1]
)
time_to_parse = str(int(time.time() - start_time))
check_results(values, results)
remove_file(result_file)
# get file=3G -> 21s on bx (ssd, core [email protected])
sys.stderr.write("get token skip in file in: " + time_to_parse + "s\n")
if int(time_to_parse) > max_time:
print('time to get token took too long (should be ' + str(max_time)+'s max)')
exit(1)
else:
print('get tokens skip: ok')
def check_get_line_col_perf():
print("=== " + sys._getframe().f_code.co_name)
tokens = '11.0E-9 22.0 33.0 44.0 55.0\n'\
'11.1 22.1 33.1 44.1\n'\
'11.2 22.2 33.2 44.2'\
'\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'\
'\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'\
'\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'
values = [22.0]
start_time = time.time()
result_file = create_results(tokens, big=True)
sys.stderr.write('big file created in : ' + str(time.time() - start_time) + 's\n')
start_time = time.time()
results = coupling_tools.get(filename=result_file,
skip_lines=[-92],
skip_cols= [1]
)
time_to_parse = str(int(time.time() - start_time))
check_results(values, results)
remove_file(result_file)
# get file=3G -> 15s on bx (ssd, core [email protected])
sys.stderr.write('get token skip line col in file in: ' + time_to_parse + 's\n')
if int(time_to_parse) > max_time:
print('time to get token took too long (should be ' + str(max_time)+'s max)')
exit(1)
else:
print('get line col: ok')
def check_execute():
print("=== " + sys._getframe().f_code.co_name)
# ensure previous print is print before following command output
sys.stdout.flush()
if 'win' not in sys.platform:
coupling_tools.execute('/bin/ls /bin/kill')
coupling_tools.execute('echo "hi"', is_shell=True)
coupling_tools.execute('echo "hi"', is_shell=True,
shell_exe='/bin/bash')
ret, stdout = coupling_tools.execute('/bin/ls /bin/kill',
get_stdout=True)
if stdout != b'/bin/kill\n':
raise Exception("coupling_tools.execute error!")
ret, stdout, stderr = coupling_tools.execute('/bin/ls /bin/kill',
get_stdout=True, get_stderr=True)
if stdout != b'/bin/kill\n' and stderr != b'':
raise Exception("coupling_tools.execute error!")
ret, stderr = coupling_tools.execute('/bin/ls /bin/kill 1>&2',
is_shell=True,
get_stderr=True)
if stderr != b'/bin/kill\n':
raise Exception("coupling_tools.execute error!")
else:
coupling_tools.execute('cmd.exe /c echo /bin/kill')
exec_in_wine = os.path.exists('/boot')
if exec_in_wine:
# command 'echo' do not work in python on wine for an unknown reason
print('hi')
print('hi')
else:
# native windows
coupling_tools.execute('echo hi', is_shell=True)
coupling_tools.execute('echo hi', is_shell=True, hide_win=False)
ret, stdout = coupling_tools.execute('echo hello', is_shell=True,
get_stdout=True)
if ret != 0 or not str(stdout).startswith('hello'):
raise Exception("coupling_tools.execute error!")
print("execute ok")
check_execute()
check_replace()
check_get_regex()
check_get_regex_perf()
check_get_line_col()
check_get_tokens()
check_get_tokens_skip()
check_get_array()
check_get_tokens_line_col()
check_get_tokens_skip_line_col()
check_get_tokens_perf()
check_get_tokens_skip_perf()
check_get_line_col_perf()
exit(0)<|fim▁end|>
|
pass
else:
raise Exception("! should have fail !")
|
<|file_name|>propsValidators.js<|end_file_name|><|fim▁begin|>import PropTypes from 'prop-types';
export const validAxisType = PropTypes.oneOf([
'category',
'linear',
'logarithmic',
'datetime'
]);
export const validChartType = PropTypes.oneOf([
'area',
'arearange',
'areaspline',
'areasplinerange',
'bar',
'boxplot',
'bubble',
'candlestick',
'column',
'columnrange',
'errorbar',
'flags',
'funnel',
'line',
'ohlc',<|fim▁hole|> 'scatter',
'solidgauge',
'spline',
'waterfall'
]);<|fim▁end|>
|
'pie',
'polygon',
'pyramid',
|
<|file_name|>UserExtTokenAlreadyRegistered.js<|end_file_name|><|fim▁begin|>module.exports = {
'code': 1100,
'type': 'UserExtTokenAlreadyRegistered',
<|fim▁hole|><|fim▁end|>
|
'message': 'The provided external authentification is already used',
'http': 400
};
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = function (args, opts) {
if (!opts) opts = {};
var flags = { bools : {}, strings : {}, unknownFn: null };
if (typeof opts['unknown'] === 'function') {
flags.unknownFn = opts['unknown'];
}
if (typeof opts['boolean'] === 'boolean' && opts['boolean']) {
flags.allBools = true;
} else {
[].concat(opts['boolean']).filter(Boolean).forEach(function (key) {
flags.bools[key] = true;
});
}
var aliases = {};
Object.keys(opts.alias || {}).forEach(function (key) {
aliases[key] = [].concat(opts.alias[key]);
aliases[key].forEach(function (x) {
aliases[x] = [key].concat(aliases[key].filter(function (y) {
return x !== y;
}));
});
});
[].concat(opts.string).filter(Boolean).forEach(function (key) {
flags.strings[key] = true;
if (aliases[key]) {
flags.strings[aliases[key]] = true;
}
});
var defaults = opts['default'] || {};
var argv = { _ : [] };
Object.keys(flags.bools).forEach(function (key) {
setArg(key, defaults[key] === undefined ? false : defaults[key]);
});
var notFlags = [];
if (args.indexOf('--') !== -1) {
notFlags = args.slice(args.indexOf('--')+1);
args = args.slice(0, args.indexOf('--'));
}
function argDefined(key, arg) {
return (flags.allBools && /^--[^=]+$/.test(arg)) ||
flags.strings[key] || flags.bools[key] || aliases[key];
}
function setArg (key, val, arg) {
if (arg && flags.unknownFn && !argDefined(key, arg)) {
if (flags.unknownFn(arg) === false) return;
}
var value = !flags.strings[key] && isNumber(val)
? Number(val) : val
;
setKey(argv, key.split('.'), value);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), value);
});
}
for (var i = 0; i < args.length; i++) {
var arg = args[i];
if (/^--.+=/.test(arg)) {
// Using [\s\S] instead of . because js doesn't support the
// 'dotall' regex modifier. See:
// http://stackoverflow.com/a/1068308/13216
var m = arg.match(/^--([^=]+)=([\s\S]*)$/);
setArg(m[1], m[2], arg);
}
else if (/^--no-.+/.test(arg)) {
var key = arg.match(/^--no-(.+)/)[1];
setArg(key, false, arg);
}
else if (/^--.+/.test(arg)) {
var key = arg.match(/^--(.+)/)[1];
var next = args[i + 1];
if (next !== undefined && !/^-/.test(next)
&& !flags.bools[key]
&& !flags.allBools
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
setArg(key, next, arg);
i++;
}
else if (/^(true|false)$/.test(next)) {
setArg(key, next === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);<|fim▁hole|> }
}
else if (/^-[^-]+/.test(arg)) {
var letters = arg.slice(1,-1).split('');
var broken = false;
for (var j = 0; j < letters.length; j++) {
var next = arg.slice(j+2);
if (next === '-') {
setArg(letters[j], next, arg)
continue;
}
if (/[A-Za-z]/.test(letters[j])
&& /-?\d+(\.\d*)?(e-?\d+)?$/.test(next)) {
setArg(letters[j], next, arg);
broken = true;
break;
}
if (letters[j+1] && letters[j+1].match(/\W/)) {
setArg(letters[j], arg.slice(j+2), arg);
broken = true;
break;
}
else {
setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg);
}
}
var key = arg.slice(-1)[0];
if (!broken && key !== '-') {
if (args[i+1] && !/^(-|--)[^-]/.test(args[i+1])
&& !flags.bools[key]
&& (aliases[key] ? !flags.bools[aliases[key]] : true)) {
setArg(key, args[i+1], arg);
i++;
}
else if (args[i+1] && /true|false/.test(args[i+1])) {
setArg(key, args[i+1] === 'true', arg);
i++;
}
else {
setArg(key, flags.strings[key] ? '' : true, arg);
}
}
}
else {
if (!flags.unknownFn || flags.unknownFn(arg) !== false) {
argv._.push(
flags.strings['_'] || !isNumber(arg) ? arg : Number(arg)
);
}
if (opts.stopEarly) {
argv._.push.apply(argv._, args.slice(i + 1));
break;
}
}
}
Object.keys(defaults).forEach(function (key) {
if (!hasKey(argv, key.split('.'))) {
setKey(argv, key.split('.'), defaults[key]);
(aliases[key] || []).forEach(function (x) {
setKey(argv, x.split('.'), defaults[key]);
});
}
});
if (opts['--']) {
argv['--'] = new Array();
notFlags.forEach(function(key) {
argv['--'].push(key);
});
}
else {
notFlags.forEach(function(key) {
argv._.push(key);
});
}
return argv;
};
function hasKey (obj, keys) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
o = (o[key] || {});
});
var key = keys[keys.length - 1];
return key in o;
}
function setKey (obj, keys, value) {
var o = obj;
keys.slice(0,-1).forEach(function (key) {
if (o[key] === undefined) o[key] = {};
o = o[key];
});
var key = keys[keys.length - 1];
if (o[key] === undefined || typeof o[key] === 'boolean') {
o[key] = value;
}
else if (Array.isArray(o[key])) {
o[key].push(value);
}
else {
o[key] = [ o[key], value ];
}
}
function isNumber (x) {
if (typeof x === 'number') return true;
if (/^0x[0-9a-f]+$/i.test(x)) return true;
return /^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/.test(x);
}<|fim▁end|>
| |
<|file_name|>0018_auto_20170721_0611.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
<|fim▁hole|>from django.db import migrations, models
import datetime
class Migration(migrations.Migration):
dependencies = [
('ccx', '0017_auto_20170721_0437'),
]
operations = [
migrations.AlterField(
model_name='customcourseforedx',
name='time',
field=models.DateTimeField(default=datetime.datetime(2017, 7, 21, 6, 10, 51, 471098)),
),
]<|fim▁end|>
| |
<|file_name|>PROC_O_IBK_WSYH_ECCIF.py<|end_file_name|><|fim▁begin|>#coding=UTF-8
from pyspark import SparkContext, SparkConf, SQLContext, Row, HiveContext
from pyspark.sql.types import *
from datetime import date, datetime, timedelta
import sys, re, os
st = datetime.now()
conf = SparkConf().setAppName('PROC_O_IBK_WSYH_ECCIF').setMaster(sys.argv[2])
sc = SparkContext(conf = conf)
sc.setLogLevel('WARN')
if len(sys.argv) > 5:
if sys.argv[5] == "hive":
sqlContext = HiveContext(sc)
else:
sqlContext = SQLContext(sc)
hdfs = sys.argv[3]
dbname = sys.argv[4]
#处理需要使用的日期
etl_date = sys.argv[1]
#etl日期
V_DT = etl_date
#上一日日期
V_DT_LD = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8])) + timedelta(-1)).strftime("%Y%m%d")
#月初日期
V_DT_FMD = date(int(etl_date[0:4]), int(etl_date[4:6]), 1).strftime("%Y%m%d")
#上月末日期
V_DT_LMD = (date(int(etl_date[0:4]), int(etl_date[4:6]), 1) + timedelta(-1)).strftime("%Y%m%d")
#10位日期
V_DT10 = (date(int(etl_date[0:4]), int(etl_date[4:6]), int(etl_date[6:8]))).strftime("%Y-%m-%d")
V_STEP = 0
O_TX_WSYH_ECCIF = sqlContext.read.parquet(hdfs+'/O_TX_WSYH_ECCIF/*')
O_TX_WSYH_ECCIF.registerTempTable("O_TX_WSYH_ECCIF")
#任务[12] 001-01::
V_STEP = V_STEP + 1
#先删除原表所有数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_TX_WSYH_ECCIF/*.parquet")
#从昨天备表复制一份全量过来
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_TX_WSYH_ECCIF_BK/"+V_DT_LD+".parquet /"+dbname+"/F_TX_WSYH_ECCIF/"+V_DT+".parquet")
F_TX_WSYH_ECCIF = sqlContext.read.parquet(hdfs+'/F_TX_WSYH_ECCIF/*')
F_TX_WSYH_ECCIF.registerTempTable("F_TX_WSYH_ECCIF")
sql = """
SELECT A.CIFSEQ AS CIFSEQ
,A.MODULEID AS MODULEID
,A.CIFENGNAME AS CIFENGNAME
,A.CIFNAMEPY AS CIFNAMEPY
,A.CIFNAME AS CIFNAME
,A.CIFLEVEL AS CIFLEVEL
,A.CORECIFLEVEL AS CORECIFLEVEL
,A.COREDEPTSEQ AS COREDEPTSEQ
,A.CIFTYPE AS CIFTYPE
,A.CIFCONTROL AS CIFCONTROL
,A.CIFMONITOR AS CIFMONITOR
,A.CIFEXEMPT AS CIFEXEMPT
,A.CIFLOANFLG AS CIFLOANFLG
,A.CIFFINVIPFLG AS CIFFINVIPFLG
,A.CCQUERYPWD AS CCQUERYPWD
,A.TRANSFERTYPE AS TRANSFERTYPE
,A.CIFDEPTSEQ AS CIFDEPTSEQ
,A.CIFSTATE AS CIFSTATE
,A.CREATEUSERSEQ AS CREATEUSERSEQ
,A.CREATEDEPTSEQ AS CREATEDEPTSEQ
,A.CREATETIME AS CREATETIME
,A.UPDATEUSERSEQ AS UPDATEUSERSEQ
,A.UPDATEDEPTSEQ AS UPDATEDEPTSEQ
,A.UPDATETIME AS UPDATETIME
,A.UPDATEMCHANNEL AS UPDATEMCHANNEL
,A.UPDATEJNLNO AS UPDATEJNLNO
,A.UPDATECIFSEQ AS UPDATECIFSEQ
,A.FR_ID AS FR_ID
,'IBK' AS ODS_SYS_ID
,V_DT AS ODS_ST_DATE
FROM O_TX_WSYH_ECCIF A --电子银行参与方信息表
"""
sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_TX_WSYH_ECCIF_INNTMP1 = sqlContext.sql(sql)
F_TX_WSYH_ECCIF_INNTMP1.registerTempTable("F_TX_WSYH_ECCIF_INNTMP1")
#F_TX_WSYH_ECCIF = sqlContext.read.parquet(hdfs+'/F_TX_WSYH_ECCIF/*')
#F_TX_WSYH_ECCIF.registerTempTable("F_TX_WSYH_ECCIF")
sql = """
SELECT DST.CIFSEQ --客户顺序号:src.CIFSEQ
,DST.MODULEID --模块代号:src.MODULEID
,DST.CIFENGNAME --客户英文名称:src.CIFENGNAME
,DST.CIFNAMEPY --客户名称拼音:src.CIFNAMEPY
,DST.CIFNAME --客户名称:src.CIFNAME
,DST.CIFLEVEL --电子银行等级:src.CIFLEVEL
,DST.CORECIFLEVEL --核心系统银行等级:src.CORECIFLEVEL
,DST.COREDEPTSEQ --核心客户所属机构顺序号号:src.COREDEPTSEQ
,DST.CIFTYPE --客户类别:src.CIFTYPE
,DST.CIFCONTROL --是否受控客户:src.CIFCONTROL
,DST.CIFMONITOR --是否受监视客户:src.CIFMONITOR
,DST.CIFEXEMPT --是否豁免客户:src.CIFEXEMPT
,DST.CIFLOANFLG --是否贷款户:src.CIFLOANFLG
,DST.CIFFINVIPFLG --是否理财VIP客户:src.CIFFINVIPFLG
,DST.CCQUERYPWD --信用卡查询密码:src.CCQUERYPWD
,DST.TRANSFERTYPE --对外转帐属性:src.TRANSFERTYPE
,DST.CIFDEPTSEQ --客户归属机构:src.CIFDEPTSEQ
,DST.CIFSTATE --状态:src.CIFSTATE
,DST.CREATEUSERSEQ --创建用户顺序号:src.CREATEUSERSEQ
,DST.CREATEDEPTSEQ --创建机构顺序号:src.CREATEDEPTSEQ
,DST.CREATETIME --创建时间:src.CREATETIME
,DST.UPDATEUSERSEQ --更新用户顺序号:src.UPDATEUSERSEQ
,DST.UPDATEDEPTSEQ --更新机构顺序号:src.UPDATEDEPTSEQ
,DST.UPDATETIME --更新时间:src.UPDATETIME
,DST.UPDATEMCHANNEL --维护模块渠道:src.UPDATEMCHANNEL
,DST.UPDATEJNLNO --维护流水号:src.UPDATEJNLNO
,DST.UPDATECIFSEQ --维护客户号:src.UPDATECIFSEQ
,DST.FR_ID --法人代码:src.FR_ID
,DST.ODS_SYS_ID --源系统代码:src.ODS_SYS_ID
,DST.ODS_ST_DATE --系统平台日期:src.ODS_ST_DATE
FROM F_TX_WSYH_ECCIF DST
LEFT JOIN F_TX_WSYH_ECCIF_INNTMP1 SRC
ON SRC.CIFSEQ = DST.CIFSEQ
WHERE SRC.CIFSEQ IS NULL """<|fim▁hole|>sql = re.sub(r"\bV_DT\b", "'"+V_DT10+"'", sql)
F_TX_WSYH_ECCIF_INNTMP2 = sqlContext.sql(sql)
dfn="F_TX_WSYH_ECCIF/"+V_DT+".parquet"
F_TX_WSYH_ECCIF_INNTMP2=F_TX_WSYH_ECCIF_INNTMP2.unionAll(F_TX_WSYH_ECCIF_INNTMP1)
F_TX_WSYH_ECCIF_INNTMP1.cache()
F_TX_WSYH_ECCIF_INNTMP2.cache()
nrowsi = F_TX_WSYH_ECCIF_INNTMP1.count()
nrowsa = F_TX_WSYH_ECCIF_INNTMP2.count()
F_TX_WSYH_ECCIF_INNTMP2.write.save(path = hdfs + '/' + dfn, mode='overwrite')
F_TX_WSYH_ECCIF_INNTMP1.unpersist()
F_TX_WSYH_ECCIF_INNTMP2.unpersist()
et = datetime.now()
print("Step %d start[%s] end[%s] use %d seconds, insert F_TX_WSYH_ECCIF lines %d, all lines %d") % (V_STEP, st.strftime("%H:%M:%S"), et.strftime("%H:%M:%S"), (et-st).seconds, nrowsi, nrowsa)
ret = os.system("hdfs dfs -mv /"+dbname+"/F_TX_WSYH_ECCIF/"+V_DT_LD+".parquet /"+dbname+"/F_TX_WSYH_ECCIF_BK/")
#先删除备表当天数据
ret = os.system("hdfs dfs -rm -r /"+dbname+"/F_TX_WSYH_ECCIF_BK/"+V_DT+".parquet")
#从当天原表复制一份全量到备表
ret = os.system("hdfs dfs -cp -f /"+dbname+"/F_TX_WSYH_ECCIF/"+V_DT+".parquet /"+dbname+"/F_TX_WSYH_ECCIF_BK/"+V_DT+".parquet")<|fim▁end|>
| |
<|file_name|>calib.py<|end_file_name|><|fim▁begin|>#
# My first attempt at python
# calibrate accelerometer
#
import re
import scipy
from scipy import optimize
from scipy import linalg
from pylab import *
#
# parse the log
#
def read_log(ac_id, filename, sensor):
f = open(filename, 'r')
pattern = re.compile("(\S+) "+ac_id+" IMU_"+sensor+"_RAW (\S+) (\S+) (\S+)")
list_meas = []
while 1:
line = f.readline().strip()
if line == '':
break
m=re.match(pattern, line)
if m:
list_meas.append([float(m.group(2)), float(m.group(3)), float(m.group(4))])
return scipy.array(list_meas)
#
# select only non-noisy data
#
def filter_meas(meas, window_size, noise_threshold):
filtered_meas = []
filtered_idx = []
for i in range(window_size,len(meas)-window_size):
noise = meas[i-window_size:i+window_size,:].std(axis=0)
if linalg.norm(noise) < noise_threshold:
filtered_meas.append(meas[i,:])
filtered_idx.append(i)
return scipy.array(filtered_meas), filtered_idx
#
# initial boundary based calibration
#
def get_min_max_guess(meas, scale):
max_meas = meas[:,:].max(axis=0)
min_meas = meas[:,:].min(axis=0)
n = (max_meas + min_meas) / 2
sf = 2*scale/(max_meas - min_meas)
return scipy.array([n[0], n[1], n[2], sf[0], sf[1], sf[2]])
#
# scale the set of measurements
#
def scale_measurements(meas, p):
l_comp = [];
l_norm = [];
for m in meas[:,]:
sm = (m - p[0:3])*p[3:6]
l_comp.append(sm)
l_norm.append(linalg.norm(sm))
return scipy.array(l_comp), scipy.array(l_norm)
#
# print xml for airframe file
#
def print_xml(p, sensor, res):
print ""
print "<define name=\""+sensor+"_X_NEUTRAL\" value=\""+str(int(round(p[0])))+"\"/>"
print "<define name=\""+sensor+"_Y_NEUTRAL\" value=\""+str(int(round(p[1])))+"\"/>"
print "<define name=\""+sensor+"_Z_NEUTRAL\" value=\""+str(int(round(p[2])))+"\"/>"
print "<define name=\""+sensor+"_X_SENS\" value=\""+str(p[3]*2**res)+"\" integer=\"16\"/>"
print "<define name=\""+sensor+"_Y_SENS\" value=\""+str(p[4]*2**res)+"\" integer=\"16\"/>"
print "<define name=\""+sensor+"_Z_SENS\" value=\""+str(p[5]*2**res)+"\" integer=\"16\"/>"
<|fim▁hole|>ac_id = "151"
if 1:
sensor = "ACCEL"
sensor_ref = 9.81
sensor_res = 10
noise_window = 20;
noise_threshold = 40;
else:
sensor = "MAG"
sensor_ref = 1.
sensor_res = 11
noise_window = 10;
noise_threshold = 1000;
print "reading file "+filename+" for aircraft "+ac_id+" and sensor "+sensor
measurements = read_log(ac_id, filename, sensor)
print "found "+str(len(measurements))+" records"
flt_meas, flt_idx = filter_meas(measurements, noise_window, noise_threshold)
print "remaining "+str(len(flt_meas))+" after low pass"
p0 = get_min_max_guess(flt_meas, sensor_ref)
cp0, np0 = scale_measurements(flt_meas, p0)
print "initial guess : "+str(np0.mean())+" "+str(np0.std())
print p0
def err_func(p,meas,y):
cp, np = scale_measurements(meas, p)
err = y*scipy.ones(len(meas)) - np
return err
p1, success = optimize.leastsq(err_func, p0[:], args=(flt_meas, sensor_ref))
cp1, np1 = scale_measurements(flt_meas, p1)
print "optimized guess : "+str(np1.mean())+" "+str(np1.std())
print p1
print_xml(p1, sensor, sensor_res)
subplot(3,1,1)
plot(measurements[:,0])
plot(measurements[:,1])
plot(measurements[:,2])
plot(flt_idx, flt_meas[:,0], 'ro')
plot(flt_idx, flt_meas[:,1], 'ro')
plot(flt_idx, flt_meas[:,2], 'ro')
subplot(3,2,3)
plot(cp0[:,0]);
plot(cp0[:,1]);
plot(cp0[:,2]);
plot(-sensor_ref*scipy.ones(len(flt_meas)));
plot(sensor_ref*scipy.ones(len(flt_meas)));
subplot(3,2,4)
plot(np0);
plot(sensor_ref*scipy.ones(len(flt_meas)));
subplot(3,2,5)
plot(cp1[:,0]);
plot(cp1[:,1]);
plot(cp1[:,2]);
plot(-sensor_ref*scipy.ones(len(flt_meas)));
plot(sensor_ref*scipy.ones(len(flt_meas)));
subplot(3,2,6)
plot(np1);
plot(sensor_ref*scipy.ones(len(flt_meas)));
show();<|fim▁end|>
|
filename = 'log_accel_booz2_a2'
|
<|file_name|>CustomInstanceConfigurer.tsx<|end_file_name|><|fim▁begin|>import React from 'react';
import Select, { Option } from 'react-select';
import { HelpField } from '@spinnaker/core';
export interface ICustomInstanceConfig {
vCpuCount: number;
memory: number;
instanceFamily: string;
}
export interface ICustomInstanceConfigurerProps {
vCpuList: number[];
memoryList: number[];
instanceFamilyList: string[];
selectedVCpuCount: number;
selectedMemory: number;
selectedInstanceFamily: string;
onChange: (config: ICustomInstanceConfig) => void;
}
export class CustomInstanceConfigurer extends React.Component<ICustomInstanceConfigurerProps> {
public render() {
const instanceFamilyOptions: Option[] = (this.props.instanceFamilyList || []).map((instanceFamily) => ({
label: instanceFamily,
value: instanceFamily,
}));
const vCpuOptions: Option[] = (this.props.vCpuList || []).map((vCpu) => ({ label: vCpu + '', value: vCpu }));
const memoryOptions: Option[] = (this.props.memoryList || []).map((memory) => ({
label: memory + '',
value: memory,
}));
const selectedVCpuCountLabel = this.props.selectedVCpuCount ? this.props.selectedVCpuCount + '' : null;
const selectedMemoryLabel = this.props.selectedMemory ? this.props.selectedMemory + '' : null;
const selectedInstanceFamilyLabel = this.props.selectedInstanceFamily ? this.props.selectedInstanceFamily : null;
return (
<div>
<div className="row">
<div className="col-md-5 sm-label-right">
<b>Family </b>
</div>
<div className="col-md-3">
<Select
options={instanceFamilyOptions}
clearable={false}
value={{ label: selectedInstanceFamilyLabel, value: this.props.selectedInstanceFamily }}
onChange={this.handleInstanceFamilyChange}
/>
</div>
</div>
<div className="row">
<div className="col-md-5 sm-label-right">
<b>Cores </b>
<HelpField id="gce.instance.customInstance.cores" />
</div>
<div className="col-md-3">
<Select
options={vCpuOptions}
clearable={false}
value={{ label: selectedVCpuCountLabel, value: this.props.selectedVCpuCount }}
onChange={this.handleVCpuChange}
/>
</div>
</div>
<div className="row" style={{ marginTop: '5px' }}>
<div className="col-md-5 sm-label-right">
<b>Memory (Gb) </b><|fim▁hole|> <Select
options={memoryOptions}
clearable={false}
value={{ label: selectedMemoryLabel, value: this.props.selectedMemory }}
onChange={this.handleMemoryChange}
/>
</div>
</div>
</div>
);
}
private handleVCpuChange = (option: Option) => {
const value = (option ? option.value : null) as number;
this.props.onChange({
instanceFamily: this.props.selectedInstanceFamily,
vCpuCount: value,
memory: this.props.selectedMemory,
});
};
private handleMemoryChange = (option: Option) => {
const value = (option ? option.value : null) as number;
this.props.onChange({
instanceFamily: this.props.selectedInstanceFamily,
vCpuCount: this.props.selectedVCpuCount,
memory: value,
});
};
private handleInstanceFamilyChange = (option: Option) => {
const value = (option ? option.value : null) as string;
this.props.onChange({
instanceFamily: value,
vCpuCount: this.props.selectedVCpuCount,
memory: this.props.selectedMemory,
});
};
}<|fim▁end|>
|
<HelpField id="gce.instance.customInstance.memory" />
</div>
<div className="col-md-3">
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::io::BufReader;
use std::fs::File;
use std::str::FromStr;
use std::io;
use std::io::prelude::*;
use std::collections::BinaryHeap;
use std::cmp::Reverse;
#[derive(Debug)]
struct MaxHeap{
heap:BinaryHeap<u64>,
count: usize
}<|fim▁hole|>impl MaxHeap {
fn new() -> MaxHeap{
let heap = BinaryHeap::new();
let count = 0;
MaxHeap {
heap,
count
}
}
fn push(&mut self, new_value: u64) {
self.count = self.count + 1;
self.heap.push(new_value);
}
fn top(&self) -> u64 {
let v = self.heap.peek().expect("max heap underflow");
v.clone()
}
fn pop(&mut self) -> u64 {
let v = self.heap.pop().expect("max heap underflow");
self.count = self.count - 1;
v
}
}
#[derive(Debug)]
struct MinHeap {
heap:BinaryHeap<Reverse<u64>>,
count: usize
}
impl MinHeap {
fn new() -> MinHeap{
let heap = BinaryHeap::new();
let count = 0;
MinHeap {
heap,
count
}
}
fn top(&self) -> u64 {
let Reverse(v) = self.heap.peek().expect("min heap underflow");
v.clone()
}
fn push(&mut self, new_value: u64) {
self.count = self.count + 1;
self.heap.push(Reverse(new_value));
}
fn pop(&mut self) -> u64 {
let Reverse(v) = self.heap.pop().expect("min heap underflow");
self.count = self.count - 1;
v
}
}
#[test]
fn min_heap_test1() {
let mut h1 = MinHeap::new();
h1.push(21);
h1.push(10);
h1.push(20);
h1.push(30);
h1.push(15);
assert_eq!(5, h1.count);
assert_eq!(h1.pop(), 10);
assert_eq!(h1.pop(), 15);
assert_eq!(h1.pop(), 20);
assert_eq!(h1.pop(), 21);
assert_eq!(h1.count, 1);
}
#[test]
fn max_heap_test1() {
let mut h1 = MaxHeap::new();
h1.push(21);
h1.push(10);
h1.push(20);
h1.push(30);
h1.push(15);
assert_eq!(5, h1.count);
assert_eq!(30, h1.pop());
assert_eq!(21, h1.pop());
assert_eq!(20, h1.pop());
assert_eq!(15, h1.pop());
assert_eq!(h1.count, 1);
}
fn insert_to_heaps(min_heap:&mut MinHeap, max_heap:&mut MaxHeap, value: u64, median: u64){
if value > median {
min_heap.push(value);
}
else {
max_heap.push(value);
}
if min_heap.count > max_heap.count {
let mut diff = min_heap.count - max_heap.count;
while diff > 1 {
let v = min_heap.pop();
max_heap.push(v);
diff = min_heap.count - max_heap.count;
}
}
else {
if max_heap.count > min_heap.count {
let mut diff = max_heap.count - min_heap.count;
while diff > 1 {
let v = max_heap.pop();
min_heap.push(v);
diff = max_heap.count - min_heap.count;
}
}
}
}
/*
#[test]
fn insert_test1()
{
let mut mn = MinHeap::new();
let mut mx = MaxHeap::new();
for i in 0..35 {
insert_to_heaps(&mut mn, &mut mx, i);
if mn.count > mx.count {
assert_eq!(mn.count - 1, mx.count);
}
if mx.count > mn.count {
assert_eq!(mx.count - 1, mn.count);
}
}
println!("min {:?} max = {:?}", mn, mx);
}*/
fn median_sum(v:&Vec<u64>) -> u64 {
let mut junk = Vec::new();
if v.len() < 1 {
return 0;
}
let mut median = v[0] as u64;
let mut curr_median = median;
let mut mn = MinHeap::new();
let mut mx = MaxHeap::new();
mx.push(v[0]);
junk.push(v[0]);
for i in 1..v.len() {
let value = v[i];
junk.push(value);
insert_to_heaps(&mut mn, &mut mx, value, curr_median);
if mn.count > mx.count {
curr_median = mn.top();
}
else {
curr_median = mx.top();
}
median = median + curr_median;
assert!(i+1 == (mn.count + mx.count));
// println!("array = {:?} min = {:?}, max {:?}", junk, mn, mx);
}
median % 10000
}
#[test]
fn medians_sum_test1() {
let v = vec![1,666,10,667,100,2,3];
let sum = median_sum(&v);
assert_eq!(142, sum);
}
#[test]
fn medians_sum_test2() {
let v = vec![6331,2793,1640,9290,225,625,6195,2303,5685,1354];
let sum = median_sum(&v);
assert_eq!(9335, sum);
}
fn print_medians(file_name: &str) -> io::Result<u64> {
let f = File::open(file_name)?;
let reader = BufReader::new(f);
let mut v = Vec::new();
for line in reader.lines() {
let line = line.unwrap();
let value = u64::from_str(&line).expect("error parsing value");
v.push(value);
}
Ok(median_sum(&v))
}
fn main() {
for arg in std::env::args().skip(1) {
let value = print_medians(&arg).expect("failed to read");
println!("answer = {}", value);
}
}<|fim▁end|>
| |
<|file_name|>highlight.tsx<|end_file_name|><|fim▁begin|>/**
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
import * as React from 'react';
import { OptionsState } from '../state/optionsState';
import { Parser } from './parseText';
function createRegExp(highlights: string[]): RegExp {
let regex: string;
highlights.forEach((h: string) => {
if (!regex) {
regex = '(?:^|\\s)@?' + h + ':?(?:$|\\s)';
} else {
regex += '|(?:^|\\s)@?' + h + ':?(?:$|\\s)';
}
});
return new RegExp(regex, 'g');
}
export function parseHighlight(text: string, opts: OptionsState, next: Parser): [string, JSX.Element] {
if (!opts.parsing.highlight || opts.parsing.highlightKeywords.length <= 0) {
return [text, null];
}
const regex = createRegExp(opts.parsing.highlightKeywords);
const match = regex.exec(text);
if (!match) {
return [text, null];
}<|fim▁hole|> <span key={genID()} className={'chat-room-highlight'}>{next(text, opts, next)}</span>
];
}<|fim▁end|>
|
// game.trigger('chat-play-sound-highlight');
return [
null,
|
<|file_name|>ip.py<|end_file_name|><|fim▁begin|>from netfields import InetAddressField, CidrAddressField
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from nodeshot.core.base.models import BaseAccessLevel
from ..managers import NetAccessLevelManager
from choices import IP_PROTOCOLS
class Ip(BaseAccessLevel):
""" IP Address Model """
interface = models.ForeignKey('net.Interface', verbose_name=_('interface'))
address = InetAddressField(verbose_name=_('ip address'), unique=True, db_index=True)
protocol = models.CharField(_('IP Protocol Version'), max_length=4, choices=IP_PROTOCOLS, default=IP_PROTOCOLS[0][0], blank=True)
netmask = CidrAddressField(_('netmask (CIDR, eg: 10.40.0.0/24)'), blank=True, null=True)
objects = NetAccessLevelManager()
class Meta:
app_label = 'net'
permissions = (('can_view_ip', 'Can view ip'),)
verbose_name = _('ip address')
verbose_name_plural = _('ip addresses')
def __unicode__(self):
return '%s: %s' % (self.protocol, self.address)
def clean(self, *args, **kwargs):
""" TODO """
# netaddr.IPAddress('10.40.2.1') in netaddr.IPNetwork('10.40.0.0/24')
pass
def save(self, *args, **kwargs):
"""
Determines ip protocol version automatically.
Stores address in interface shortcuts for convenience.
"""<|fim▁hole|>
# TODO: do we really need this?
# save shortcut on interfaces
#ip_cached_list = self.interface.ip_addresses
## if not present in interface shorctus add it to the list
#if str(self.address) not in ip_cached_list:
# # recalculate cached_ip_list
# recalculated_ip_cached_list = []
# for ip in self.interface.ip_set.all():
# recalculated_ip_cached_list.append(str(ip.address))
# # rebuild string in format "<ip_1>, <ip_2>"
# self.interface.data['ip_addresses'] = recalculated_ip_cached_list
# self.interface.save()
@property
def owner(self):
return self.interface.owner
if 'grappelli' in settings.INSTALLED_APPS:
@staticmethod
def autocomplete_search_fields():
return ('address__icontains',)<|fim▁end|>
|
self.protocol = 'ipv%d' % self.address.version
# save
super(Ip, self).save(*args, **kwargs)
|
<|file_name|>csv_input.py<|end_file_name|><|fim▁begin|>import numpy as np
from assorted.GraphInput import GraphInput
from model.component.component_model import ComponentModel
class CsvInput(ComponentModel):
name = "CsvInput"
default_out_sockets = [{'position': [0, -20],
'name': 'Output'}]
default_attributes = {'path': '<argument>',
'n_columns': '3',
'separator': ','}
graph_input = None
def __init__(self, manifest=None, identifier=None):
ComponentModel.__init__(self, manifest=manifest, identifier=identifier)
self.graph_input = GraphInput(self.get_name()+"_input", [3])
def parse_column_string(self, string):
return np.fromstring(string, sep=' ', dtype=np.int32)
<|fim▁hole|> return [self.graph_input]
def compile_theano(self):
self.graph_input.compile_theano()
self.push_by_index(0, self.graph_input.variable)<|fim▁end|>
|
def theano_inputs(self):
|
<|file_name|>auto_config.py<|end_file_name|><|fim▁begin|>"""
Auto Configuration Helper
"""
import logging
import os
import requests
from urlparse import urlparse
from constants import InsightsConstants as constants
from cert_auth import rhsmCertificate
from connection import InsightsConnection
from config import CONFIG as config
logger = logging.getLogger(__name__)
APP_NAME = constants.app_name
def verify_connectivity():
"""
Verify connectivity to satellite server
"""
logger.debug("Verifying Connectivity")
ic = InsightsConnection()
try:
branch_info = ic.branch_info()
except requests.ConnectionError as e:
logger.debug(e)
logger.debug("Failed to connect to satellite")
return False
except LookupError as e:
logger.debug(e)
logger.debug("Failed to parse response from satellite")
return False
try:
remote_leaf = branch_info['remote_leaf']
return remote_leaf
except LookupError as e:
logger.debug(e)
logger.debug("Failed to find accurate branch_info")
return False
def set_auto_configuration(hostname, ca_cert, proxy):
"""
Set config based on discovered data
"""
logger.debug("Attempting to auto configure!")
logger.debug("Attempting to auto configure hostname: %s", hostname)
logger.debug("Attempting to auto configure CA cert: %s", ca_cert)
logger.debug("Attempting to auto configure proxy: %s", proxy)
saved_base_url = config['base_url']
if ca_cert is not None:
saved_cert_verify = config['cert_verify']
config['cert_verify'] = ca_cert
if proxy is not None:
saved_proxy = config['proxy']
config['proxy'] = proxy
config['base_url'] = hostname + '/r/insights'
if not verify_connectivity():
logger.warn("Could not auto configure, falling back to static config")
logger.warn("See %s for additional information",
constants.default_log_file)
config['base_url'] = saved_base_url
if proxy is not None:
if saved_proxy is not None and saved_proxy.lower() == 'none':
saved_proxy = None
config['proxy'] = saved_proxy
if ca_cert is not None:
config['cert_verify'] = saved_cert_verify
def _try_satellite6_configuration():
"""
Try to autoconfigure for Satellite 6
"""
try:
from rhsm.config import initConfig
rhsm_config = initConfig()
logger.debug('Trying to autoconf Satellite 6')
cert = file(rhsmCertificate.certpath(), 'r').read()
key = file(rhsmCertificate.keypath(), 'r').read()
rhsm = rhsmCertificate(key, cert)
# This will throw an exception if we are not registered
logger.debug('Checking if system is subscription-manager registered')
rhsm.getConsumerId()
logger.debug('System is subscription-manager registered')
rhsm_hostname = rhsm_config.get('server', 'hostname')
rhsm_hostport = rhsm_config.get('server', 'port')
rhsm_proxy_hostname = rhsm_config.get('server', 'proxy_hostname').strip()
rhsm_proxy_port = rhsm_config.get('server', 'proxy_port').strip()
rhsm_proxy_user = rhsm_config.get('server', 'proxy_user').strip()
rhsm_proxy_pass = rhsm_config.get('server', 'proxy_password').strip()
proxy = None
if rhsm_proxy_hostname != "":
logger.debug("Found rhsm_proxy_hostname %s", rhsm_proxy_hostname)
proxy = "http://"
if rhsm_proxy_user != "" and rhsm_proxy_pass != "":
logger.debug("Found user and password for rhsm_proxy")
proxy = proxy + rhsm_proxy_user + ":" + rhsm_proxy_pass + "@"
proxy = proxy + rhsm_proxy_hostname + ':' + rhsm_proxy_port
logger.debug("RHSM Proxy: %s", proxy)
logger.debug("Found Satellite Server Host: %s, Port: %s",
rhsm_hostname, rhsm_hostport)
rhsm_ca = rhsm_config.get('rhsm', 'repo_ca_cert')
logger.debug("Found CA: %s", rhsm_ca)
logger.debug("Setting authmethod to CERT")
config['authmethod'] = 'CERT'
# Directly connected to Red Hat, use cert auth directly with the api<|fim▁hole|> if (rhsm_hostname == 'subscription.rhn.redhat.com' or
rhsm_hostname == 'subscription.rhsm.redhat.com'):
logger.debug("Connected to Red Hat Directly, using cert-api")
rhsm_hostname = 'cert-api.access.redhat.com'
rhsm_ca = None
else:
# Set the host path
# 'rhsm_hostname' should really be named ~ 'rhsm_host_base_url'
rhsm_hostname = rhsm_hostname + ':' + rhsm_hostport + '/redhat_access'
logger.debug("Trying to set auto_configuration")
set_auto_configuration(rhsm_hostname, rhsm_ca, proxy)
return True
except Exception as e:
logger.debug(e)
logger.debug('System is NOT subscription-manager registered')
return False
def _read_systemid_file(path):
with open(path, "r") as systemid:
data = systemid.read().replace('\n', '')
return data
def _try_satellite5_configuration():
"""
Attempt to determine Satellite 5 Configuration
"""
logger.debug("Trying Satellite 5 auto_config")
rhn_config = '/etc/sysconfig/rhn/up2date'
systemid = '/etc/sysconfig/rhn/systemid'
if os.path.isfile(rhn_config):
if os.path.isfile(systemid):
config['systemid'] = _read_systemid_file(systemid)
else:
logger.debug("Could not find Satellite 5 systemid file.")
return False
logger.debug("Found Satellite 5 Config")
rhn_conf_file = file(rhn_config, 'r')
hostname = None
for line in rhn_conf_file:
if line.startswith('serverURL='):
url = urlparse(line.split('=')[1])
hostname = url.netloc + '/redhat_access'
logger.debug("Found hostname %s", hostname)
if line.startswith('sslCACert='):
rhn_ca = line.strip().split('=')[1]
# Auto discover proxy stuff
if line.startswith('enableProxy='):
proxy_enabled = line.strip().split('=')[1]
if line.startswith('httpProxy='):
proxy_host_port = line.strip().split('=')[1]
if line.startswith('proxyUser='):
proxy_user = line.strip().split('=')[1]
if line.startswith('proxyPassword='):
proxy_password = line.strip().split('=')[1]
if hostname:
proxy = None
if proxy_enabled == "1":
proxy = "http://"
if proxy_user != "" and proxy_password != "":
logger.debug("Found user and password for rhn_proxy")
proxy = proxy + proxy_user + ':' + proxy_password
proxy = proxy + "@" + proxy_host_port
else:
proxy = proxy + proxy_host_port
logger.debug("RHN Proxy: %s", proxy)
set_auto_configuration(hostname, rhn_ca, proxy)
else:
logger.debug("Could not find hostname")
return False
return True
else:
logger.debug("Could not find rhn config")
return False
def try_auto_configuration():
"""
Try to auto-configure if we are attached to a sat5/6
"""
if config['auto_config'] and not config['offline']:
if not _try_satellite6_configuration():
_try_satellite5_configuration()<|fim▁end|>
| |
<|file_name|>IotMinerApplicationTests.java<|end_file_name|><|fim▁begin|>package de.tum.in.i17.iotminer;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;<|fim▁hole|>@RunWith(SpringRunner.class)
@SpringBootTest
public class IotMinerApplicationTests {
@Test
public void contextLoads() {
}
}<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.