prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>ipython.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2011 Pierre Raybaut
# Licensed under the terms of the MIT License
# (see spyderlib/__init__.py for details)
"""IPython v0.11+ Plugin"""
from spyderlib.qt.QtGui import QHBoxLayout
# Local imports
from spyderlib.widgets.ipython import create_widget
from spyderlib.plugins import SpyderPluginWidget
class IPythonPlugin(SpyderPluginWidget):
"""Find in files DockWidget"""
CONF_SECTION = 'ipython'
def __init__(self, parent, args, kernel_widget, kernel_name):
super(IPythonPlugin, self).__init__(parent)
self.kernel_widget = kernel_widget
self.kernel_name = kernel_name
self.ipython_widget = create_widget(argv=args.split())
layout = QHBoxLayout()
layout.addWidget(self.ipython_widget)
self.setLayout(layout)
# Initialize plugin
self.initialize_plugin()
def toggle(self, state):
"""Toggle widget visibility"""
if self.dockwidget:
self.dockwidget.setVisible(state)
#------ SpyderPluginWidget API ---------------------------------------------
def get_plugin_title(self):
"""Return widget title"""
return "IPython (%s) - Experimental!" % self.kernel_name
def get_focus_widget(self):
"""
Return the widget to give focus to when
this plugin's dockwidget is raised on top-level
"""
return self.ipython_widget._control
def get_plugin_actions(self):
"""Return a list of actions related to plugin"""
return []
def register_plugin(self):
<|fim▁hole|> self.main.add_dockwidget(self)
def refresh_plugin(self):
"""Refresh widget"""
pass
def closing_plugin(self, cancelable=False):
"""Perform actions before parent main window is closed"""
return True<|fim▁end|> | """Register plugin in Spyder's main window"""
|
<|file_name|>IOMDataService.py<|end_file_name|><|fim▁begin|>import shelve
"""
Currently unused. All mysql queries are now done via IomDataModels.
May be resurrected to help with shelve and pickles
"""
from USCProjectDAOs import IOMProjectDAO
class IOMService(IOMProjectDAO):
"""
This handles interactions with the IOM data database and storage files.
All user applications should work off of this
"""
def __init__(self):
"""
Will hold the identifiers for records
"""
self.names = []
"""
Will hold the positive sentiment scores
"""
self.posSent = []
"""
Will hold the negative sentiment scores
"""
self.negSent = []
"""
Will hold the net sentiment scores
"""
self.netSent = []
"""
Will hold the sums of the absolute values of the sentiment scores
"""
self.absumSent = []
def connect_to_mysql(self, test):
"""
Test should be boolean
"""
IOMProjectDAO.__init__(self, test, 'true')
def get_sentiment_data_from_file(self, datafile):
"""
This is the generic file data loader.
datafile shold be a path to file
"""
# Open data file and push into lists
db = shelve.open(datafile)
self.keys = list(db.keys())
for k in self.keys:
s = db[k]
self.names.append(s['quote_id'])
self.posSent.append(s['avgPos'])
self.negSent.append(s['avgNeg'])
self.netSent.append(s['netSent'])
self.absumSent.append(abs(s['avgPos']) + abs(s['avgNeg']))
db.close()
def save_sentiment_data_to_file(self, datafile, label):
"""
This is a generic file data saver.
datafile should be a path to file
@param datafile: The path to the datafile
@type datafile: C{string}
"""
# try:
db = shelve.open(datafile)
db[label] = self.to_save
db.close()
print(self.to_save)
return self.to_save
# Check whether the problem was there not being a dictionary availble to save
#except:
# try:
# self.to_save
# print ('Problem saving')
# except:
# print ('No variable self.to_save set')
<|fim▁hole|># """
# This executes a parameterized query of the mysql database, stores the results in a list of dictionaries called self.dbdata.
#
# @return Also returns dbdata
#
# @param query A mysql query with %s in place of all substitution variables
# @type query string
# @param val A list containing all substition parameters or empty if no substitutions are needed
# @type val list
#
# TODO Should have something to check whether a connection exists
# """
# self.connect_to_mysql('false')
# self.query = query
# self.val = val
# self.returnAll()
# self.dbdata = list(self.results)
#
#
# class QueryShell(IOMService):
# """
# This is just a shell to easily run queries on the database and get the results as a list of dictionaries
#
# @return Returns list of dictionaries
# """
#
# def __init__(self):
# IOMService.__init__(self)
#
# def query(self, query, val):
# self.get_data_from_database(query, val)
# return self.dbdata
#
#
# class DHShell(IOMService):
# """
# This is a shell for use in public events to avoid cluttering up the page with each step of the query
# It resets all its values after returning an array of dictionaries and thus need not be reinvoked.
# Note that These queries are not parameterized
#
# @return Returns list of dictionaries
# """
#
# def __init__(self, query_string):
# """
# @param query_string The query string
# @type string
# """
# IOMService.__init__(self)
# self.q(query_string)
#
# def q(self, query_string):
# # Get rid of previous queries
# # self.results = []
# # self.dbdata = None
# #These queries are not parameterized
# val = []
# self.get_data_from_database(query_string, val)
# return self.dbdata
class ShelveDataHandler(IOMService):
def __init__(self):
import shelve
self.datafolder = 'storedData/'
def openData(self, file_name):
"""
Opens shelve file and returns the list
"""
db = shelve.open(self.datafolder + file_name)
list_to_populate = list(db.values())
db.close()
return list_to_populate[0]
def bagSaver(self, list_to_save, file_name):
"""
Saves a list of raw data into a shelve file.
@param list_to_save A list of items to be saved into shelf file
@type list_to_save list
@param file_name The name of the file into which the items should be saved
@type string
"""
try:
label = file_name
to_save = list_to_save
db = shelve.open(self.datafolder + file_name)
db[label] = to_save
db.close()
except:
print('Error saving to shelve file %s' % file_name)
else:
print('Successfully saved to shelve file %s ' % file_name)<|fim▁end|> | # def get_data_from_database(self, query, val): |
<|file_name|>console.js<|end_file_name|><|fim▁begin|>'use strict';
const util = require('util');
const colors = require('colors/safe');
Object.entries({
info: colors.blue,
warn: colors.yellow,
error: colors.red
}).map(([method, color]) => {
const _ = global.console[method];
global.console[method] = (...args) => {
if (args.length) {
let msg = args.shift();
if ('string' == typeof msg) {
msg = color(msg);
}<|fim▁hole|> args.unshift(msg);
}
_(...args);
};
});<|fim▁end|> | |
<|file_name|>e1138_unsupported_delete_operation.py<|end_file_name|><|fim▁begin|>from typing import List
class NamedList:
"""A contaner class for storing a list of named integers."""
def __init__(self, names: List[str], values: List[int]) -> None:
self._names = names
self._values = values
def __getitem__(self, name: str) -> int:<|fim▁hole|>
def __contains__(self, name: str) -> bool:
return name in self._names
named_list = NamedList(['a', 'b', 'c'], [1, 2, 3])
print('c' in named_list) # Prints True
del named_list['c'] # Error on this line
print('c' in named_list)<|fim▁end|> | idx = self._names.index(name)
return self._values[idx] |
<|file_name|>fetchbot.py<|end_file_name|><|fim▁begin|>from praw import Reddit
import json
import os
import time
import datetime
from .tools import storage
from .tools import display
class FetchBot:
"""Bot to fetch the subreddit data."""
def __init__(self, user_agent, subreddit, data_file):
"""Basic constructor"""
self._user_agent = user_agent
self._subreddit = subreddit
self._data_file = data_file
try:
with open(self._data_file) as df:
self._data = json.load(df)
except (FileNotFoundError,json.decoder.JSONDecodeError):
self._data = json.loads('{"comments":{},"posts":{}}')
try:
if self._data['subreddit'] != self._subreddit:
raise ValueError('The data file does not correspond the subreddit r/'+self._subreddit+' (found "'+self._data['subreddit']+'")')
except KeyError:
self._data['subreddit'] = self._subreddit
self._praw = Reddit(self._user_agent)
def __del__(self):
"""Destructor"""
if not os.path.exists(os.path.dirname(self._data_file)):
os.makedirs(os.path.dirname(self._data_file))
with open(self._data_file, 'w') as df:
json.dump(self._data, df)
def fetch(self):
"""Fetching function"""
self._fetch(self._praw.get_comments(self._subreddit, limit=500), 'comments')
self._fetch(self._praw.get_subreddit(self._subreddit).get_new(limit=500), 'posts')
def _fetch(self, submissions, key):
"""Generic fetching function"""
is_first = True
storage.dict_check_key(self._data[key], 'first', float(round(time.time())))
try:
new_creation_limit = self._data[key]['last']
except:
self._data[key]['last'] = new_creation_limit = 0
for it in submissions:
if is_first:
is_first = False
new_creation_limit = it.created
if it.created <= self._data[key]['last']:
break
storage.dictvar(self._data[key], 'count', 1, 1)
try:
if str(it.author) not in self._data['unique-users']:
self._data['unique-users'][str(it.author)] = {'flair': it.author_flair_text, key: 1}
else:
try:
self._data['unique-users'][str(it.author)][key] += 1
except KeyError:
self._data['unique-users'][str(it.author)][key] = 1
except KeyError:
self._data['unique-users'] = dict()
self._data['unique-users'][str(it.author)] = {'flair': it.author_flair_text, key: 1}
if it.author_flair_text:
storage.dict_check_key(self._data[key], 'flair-presence', dict())
storage.dictvar(self._data[key]['flair-presence'], str(it.author_flair_text), 1, 1)
if key == 'posts':
storage.dict_check_key(self._data[key], 'subject-presence', dict())
storage.dictvar(self._data[key]['subject-presence'], str(it.link_flair_text), 1, 1)
storage.dict_check_key(self._data['unique-users'][str(it.author)], 'subject-presence', dict())
storage.dictvar(self._data['unique-users'][str(it.author)]['subject-presence'], str(it.link_flair_text), 1, 1)
storage.dict_check_key(self._data[key], 'domain-presence', dict())
storage.dictvar(self._data[key]['domain-presence'], str(it.domain), 1, 1)
<|fim▁hole|> storage.dict_check_key(self._data[key]['time'], '1', dict())
storage.dict_check_key(self._data[key]['time'], '2', dict())
storage.dict_check_key(self._data[key]['time'], '3', dict())
storage.dict_check_key(self._data[key]['time'], '4', dict())
storage.dict_check_key(self._data[key]['time'], '5', dict())
storage.dict_check_key(self._data[key]['time'], '6', dict())
time_datetime = datetime.datetime.fromtimestamp(float(it.created_utc))
time_str = str(time_datetime.hour).zfill(2)+str(time_datetime.minute).zfill(2)
storage.dictvar(self._data[key]['time']['all'], time_str, 1, 1)
storage.dictvar(self._data[key]['time'][str(time_datetime.weekday())], time_str, 1, 1)
self._data[key]['last'] = new_creation_limit
def FetchBotGenerator(config_file):
"""Generate a list-like container of FetchBot objects"""
with open(config_file) as cf:
json_config = json.load(cf)
user_agent = json_config['user-agent']
for i in json_config['bots']:
yield FetchBot(user_agent, i['subreddit'], i['data-file'])
def autorun():
"""Autorun function of this module"""
home = os.getenv('HOME')
config_file = os.path.join(home, '.config/flairstats/config.json')
if not os.path.exists(config_file):
raise FileNotFoundError(config_file)
fetchbots = FetchBotGenerator(config_file)
for bot in fetchbots:
bot.fetch()
if __name__ == "__main__":
autorun()<|fim▁end|> | storage.dict_check_key(self._data[key], 'time', dict())
storage.dict_check_key(self._data[key]['time'], 'all', dict())
storage.dict_check_key(self._data[key]['time'], '0', dict()) |
<|file_name|>cookies.js<|end_file_name|><|fim▁begin|>// eslint-disable-next-line
function getCookie(cname) {
const name = `${cname}=`;
const ca = document.cookie.split(';');
for (let i = 0; i < ca.length; i += 1) {
let c = ca[i];
while (c.charAt(0) === ' ') c = c.substring(1);
if (c.indexOf(name) !== -1) return c.substring(name.length, c.length);<|fim▁hole|><|fim▁end|> | }
return '';
} |
<|file_name|>table-list.controller.ts<|end_file_name|><|fim▁begin|>interface ITableListBindings {
addButton: any;
config: any;
data: any;
extra: any;
filters: any;
onAction: Function;
onSort: Function;
openFiltersPanel: Function;
rowClickable: boolean;
sort: any;
title: string;
}
interface ITableListController extends ITableListBindings {
isFunction(field: any): boolean;
isString(field: any): boolean;
parseField(model: any, field: string): ng.ICompiledExpression;
}
export class TableListController implements ITableListController {
/**
* Bindings
*/
addButton: any;
config: any;
data: any;
extra: any;
filters: any;
onAction: Function;
onSort: Function;
openFiltersPanel: Function;
rowClickable: boolean;
sort: any;
title: string;
/**
* Properties
*/
constructor(
private $log: ng.ILogService,
private $parse: ng.IParseService
) { }
$onChange(changes) {
if (changes.config) {
this.config = angular.copy(this.config);
}
<|fim▁hole|> }
if (changes.extra) {
this.extra = angular.copy(this.extra);
}
if (changes.filters) {
this.filters = angular.copy(this.filters);
}
if (changes.rowClickable) {
this.rowClickable = angular.copy(this.rowClickable);
}
if (changes.sort) {
this.sort = angular.copy(this.sort);
}
}
handleTdClick(td, $event) {
if (td.disableRowClick) {
$event.stopPropagation();
}
}
isFunction(field) {
return angular.isFunction(field);
}
isString(field) {
return angular.isString(field);
}
parseField(model, field): ng.ICompiledExpression {
const getter = this.$parse(field);
return getter(model);
}
}
TableListController.$inject = [
'$log',
'$parse',
];<|fim▁end|> | if (changes.data) {
this.data = angular.copy(this.data); |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>'use strict';
import Component from './component';
import VolumeAttachment from './volume-attachment';
import Port from './port';
import {isString} from './util';
const Server = function (properties) {
if (!(this instanceof Server)) {
return new Server(properties);
}
Component.call(this, {
ports: [],
...properties
});
};<|fim▁hole|>Server.prototype.getDependencies = function () {
return [
...this.dependencies,
...this.properties.ports
]
};
Server.prototype.attachVolume = function (volume, mountPoint) {
const attachment = new VolumeAttachment({
id: `${isString(volume) ? volume : volume.properties.id}-attachment`,
server: this,
volume,
mountPoint
});
this.dependencies.push(attachment);
return this;
};
Server.prototype.attachPort = function (port) {
this.properties.ports.push(port);
return this;
};
Server.prototype.getSchema = function () {
return {
zone: {
type: String
},
name: {
type: String
},
image: {
type: String,
required: true
},
flavor: {
type: String,
required: true
},
keyPair: {
type: String
},
ports: {
type: Array,
items: [String, Port]
}
};
};
Server.prototype.getResources = function () {
const {
id, zone, name, flavor,
keyPair, image, ports
} = this.properties;
const networks = ports.map(port => ({
port: Component.resolve(port)
}));
const properties = {
flavor,
image
};
Object.assign(
properties,
name ? {name} : {},
zone ? {zone} : {},
keyPair ? {key_name: keyPair} : {},
networks.length ? {networks} : {}
);
return {
[id]: {
type: 'OS::Nova::Server',
properties
}
};
};
export default Server;<|fim▁end|> |
Server.prototype = Object.create(Component.prototype);
Server.prototype.constructor = Server;
|
<|file_name|>helptopics.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .cql3handling import simple_cql_types
class CQLHelpTopics(object):
def get_help_topics(self):
return [ t[5:] for t in dir(self) if t.startswith('help_') ]
def print_help_topic(self, topic):
getattr(self, 'help_' + topic.lower())()
def help_types(self):
print "\n CQL types recognized by this version of cqlsh:\n"
for t in simple_cql_types:
print ' ' + t
print """
For information on the various recognizable input formats for these
types, or on controlling the formatting of cqlsh query output, see
one of the following topics:
HELP TIMESTAMP_INPUT
HELP BLOB_INPUT
HELP UUID_INPUT
HELP BOOLEAN_INPUT
HELP TEXT_OUTPUT
HELP TIMESTAMP_OUTPUT
"""
def help_timestamp_input(self):
print """
Timestamp input
CQL supports any of the following ISO 8601 formats for timestamp
specification:
yyyy-mm-dd HH:mm
yyyy-mm-dd HH:mm:ss
yyyy-mm-dd HH:mmZ
yyyy-mm-dd HH:mm:ssZ
yyyy-mm-dd'T'HH:mm
yyyy-mm-dd'T'HH:mmZ
yyyy-mm-dd'T'HH:mm:ss
yyyy-mm-dd'T'HH:mm:ssZ
yyyy-mm-dd
yyyy-mm-ddZ
The Z in these formats refers to an RFC-822 4-digit time zone,
expressing the time zone's difference from UTC. For example, a
timestamp in Pacific Standard Time might be given thus:
2012-01-20 16:14:12-0800
If no time zone is supplied, the current time zone for the Cassandra
server node will be used.
"""
def help_blob_input(self):
print """
Blob input
CQL blob data must be specified in a string literal as hexidecimal
data. Example: to store the ASCII values for the characters in the
string "CQL", use '43514c'.
"""
def help_uuid_input(self):
print """
UUID input
UUIDs may be specified in CQL using 32 hexidecimal characters,
split up using dashes in the standard UUID format:
XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
"""
def help_boolean_input(self):
print """
Boolean input
CQL accepts the strings 'true' and 'false' (case insensitive)
as input for boolean types.
"""
def help_timestamp_output(self):
print """
Timestamp output
Cqlsh will display timestamps in the following format by default:
yyyy-mm-dd HH:mm:ssZ
which is a format acceptable as CQL timestamp input as well.
The output format can be changed by setting 'time_format' property
in the [ui] section of .cqlshrc file.
"""
def help_text_output(self):
print """
Textual output
When control characters, or other characters which can't be encoded
in your current locale, are found in values of 'text' or 'ascii'
types, it will be shown as a backslash escape. If color is enabled,
any such backslash escapes will be shown in a different color from
the surrounding text.
Unicode code points in your data will be output intact, if the
encoding for your locale is capable of decoding them. If you prefer
that non-ascii characters be shown with Python-style "\\uABCD"
escape sequences, invoke cqlsh with an ASCII locale (for example,
by setting the $LANG environment variable to "C").
"""
help_ascii_output = help_text_output
def help_create_index(self):
print """
CREATE INDEX [<indexname>] ON <cfname> ( <colname> );
A CREATE INDEX statement is used to create a new, automatic secondary
index on the given CQL table, for the named column. A name for the
index itself can be specified before the ON keyword, if desired. A
single column name must be specified inside the parentheses. It is not
necessary for the column to exist on any current rows (Cassandra is
schema-optional), but the column must already have a type (specified
during the CREATE TABLE, or added afterwards with ALTER TABLE).
"""
def help_drop(self):
print """
There are different variants of DROP. For more information, see
one of the following:
HELP DROP_KEYSPACE;
HELP DROP_TABLE;
HELP DROP_INDEX;
"""
def help_drop_keyspace(self):
print """
DROP KEYSPACE <keyspacename>;
A DROP KEYSPACE statement results in the immediate, irreversible
removal of a keyspace, including all column families in it, and all
data contained in those column families.
"""
def help_drop_table(self):
print """
DROP TABLE <tablename>;
A DROP TABLE statement results in the immediate, irreversible
removal of a CQL table and the underlying column family, including all
data contained in it.
"""
help_drop_columnfamily = help_drop_table
def help_drop_index(self):
print """<|fim▁hole|> A DROP INDEX statement is used to drop an existing secondary index.
"""
def help_truncate(self):
print """
TRUNCATE <tablename>;
TRUNCATE accepts a single argument for the table name, and permanently
removes all data from it.
"""
def help_create(self):
print """
There are different variants of CREATE. For more information, see
one of the following:
HELP CREATE_KEYSPACE;
HELP CREATE_TABLE;
HELP CREATE_INDEX;
"""
def help_use(self):
print """
USE <keyspacename>;
Tells cqlsh and the connected Cassandra instance that you will be
working in the given keyspace. All subsequent operations on tables
or indexes will be in the context of this keyspace, unless otherwise
specified, until another USE command is issued or the connection
terminates.
As always, when a keyspace name does not work as a normal identifier or
number, it can be quoted using single quotes (CQL 2) or double quotes
(CQL 3).
"""
def help_create_table(self):
print """
CREATE TABLE <cfname> ( <colname> <type> PRIMARY KEY [,
<colname> <type> [, ...]] )
[WITH <optionname> = <val> [AND <optionname> = <val> [...]]];
CREATE TABLE statements create a new CQL table under the current
keyspace. Valid table names are strings of alphanumeric characters and
underscores, which begin with a letter.
Each table requires a primary key, which will correspond to the
underlying columnfamily key and key validator. It's important to
note that the key type you use must be compatible with the partitioner
in use. For example, OrderPreservingPartitioner and
CollatingOrderPreservingPartitioner both require UTF-8 keys.
In cql3 mode, a table can have multiple columns composing the primary
key (see HELP COMPOUND_PRIMARY_KEYS).
For more information, see one of the following:
HELP CREATE_TABLE_TYPES;
HELP CREATE_TABLE_OPTIONS;
"""
help_create_columnfamily = help_create_table
def help_compound_primary_keys(self):
print """
CREATE TABLE <cfname> ( <partition_key> <type>, <clustering_key1> type, <clustering_key2> type,
[, ...]], PRIMARY KEY (<partition_key>, <clustering_key1>, <clustering_key2>);
CREATE TABLE allows a primary key composed of multiple columns. When this is the case, specify
the columns that take part in the compound key after all columns have been specified.
, PRIMARY KEY( <key1>, <key2>, ... )
The partitioning key itself can be a compound key, in which case the first element of the PRIMARY KEY
phrase should be parenthesized, as
PRIMARY KEY ((<partition_key_part1>, <partition_key_part2>), <clustering_key>)
"""
def help_create_table_types(self):
print """
CREATE TABLE: Specifying column types
CREATE ... (KEY <type> PRIMARY KEY,
othercol <type>) ...
It is possible to assign columns a type during table creation. Columns
configured with a type are validated accordingly when a write occurs,
and intelligent CQL drivers and interfaces will be able to decode the
column values correctly when receiving them. Column types are specified
as a parenthesized, comma-separated list of column term and type pairs.
See HELP TYPES; for the list of recognized types.
"""
help_create_columnfamily_types = help_create_table_types
def help_create_table_options(self):
print """
CREATE TABLE: Specifying columnfamily options
CREATE TABLE blah (...)
WITH optionname = val AND otheroption = val2;
A number of optional keyword arguments can be supplied to control the
configuration of a new CQL table, such as the size of the associated
row and key caches for the underlying Cassandra columnfamily. Consult
your CQL reference for the complete list of options and possible
values.
"""
help_create_columnfamily_options = help_create_table_options
def help_alter_alter(self):
print """
ALTER TABLE: altering existing typed columns
ALTER TABLE addamsFamily ALTER lastKnownLocation TYPE uuid;
ALTER TABLE ... ALTER changes the expected storage type for a column.
The column must already have a type in the column family metadata. The
column may or may not already exist in current rows-- but be aware that
no validation of existing data is done. The bytes stored in values for
that column will remain unchanged, and if existing data is not
deserializable according to the new type, this may cause your CQL
driver or interface to report errors.
"""
def help_alter_add(self):
print """
ALTER TABLE: adding a typed column
ALTER TABLE addamsFamily ADD gravesite varchar;
The ALTER TABLE ... ADD variant adds a typed column to a column
family. The column must not already have a type in the column family
metadata. See the warnings on HELP ALTER_ALTER regarding the lack of
validation of existing data; they apply here as well.
"""
def help_alter_drop(self):
print """
ALTER TABLE: dropping a typed column
ALTER TABLE addamsFamily DROP gender;
An ALTER TABLE ... DROP statement removes the type of a column
from the column family metadata. Note that this does _not_ remove the
column from current rows; it just removes the metadata saying that the
bytes stored under that column are expected to be deserializable
according to a certain type.
"""
def help_alter_with(self):
print """
ALTER TABLE: changing column family properties
ALTER TABLE addamsFamily WITH comment = 'Glad to be here!'
AND read_repair_chance = 0.2;
An ALTER TABLE ... WITH statement makes adjustments to the
table properties, as defined when the table was created (see
HELP CREATE_TABLE_OPTIONS and your Cassandra documentation for
information about the supported parameter names and values).
"""
def help_delete_columns(self):
print """
DELETE: specifying columns
DELETE col1, col2, col3 FROM ...
Following the DELETE keyword is an optional comma-delimited list of
column name terms. When no column names are given, the remove applies
to the entire row(s) matched by the WHERE clause.
When column names do not parse as valid CQL identifiers, they can be
quoted in single quotes (CQL 2) or double quotes (CQL 3).
"""
def help_delete_where(self):
print """
DELETE: specifying rows
DELETE ... WHERE keycol = 'some_key_value';
DELETE ... WHERE keycol1 = 'val1' AND keycol2 = 'val2';
DELETE ... WHERE keycol IN (key1, key2);
The WHERE clause is used to determine to which row(s) a DELETE
applies. The first form allows the specification of a precise row
by specifying a particular primary key value (if the primary key has
multiple columns, values for each must be given). The second form
allows a list of key values to be specified using the IN operator
and a parenthesized list of comma-delimited key values.
"""
def help_update_set(self):
print """
UPDATE: Specifying Columns and Row
UPDATE ... SET name1 = value1, name2 = value2
WHERE <key> = keyname;
UPDATE ... SET name1 = value1, name2 = value2
WHERE <key> IN ('<key1>', '<key2>', ...)
Rows are created or updated by supplying column names and values in
term assignment format. Multiple columns can be set by separating the
name/value pairs using commas.
"""
def help_update_counters(self):
print """
UPDATE: Updating Counter Columns
UPDATE ... SET name1 = name1 + <value> ...
UPDATE ... SET name1 = name1 - <value> ...
Counter columns can be incremented or decremented by an arbitrary
numeric value though the assignment of an expression that adds or
subtracts the value.
"""
def help_update_where(self):
print """
UPDATE: Selecting rows to update
UPDATE ... WHERE <keyname> = <keyval>;
UPDATE ... WHERE <keyname> IN (<keyval1>, <keyval2>, ...);
UPDATE ... WHERE <keycol1> = <keyval1> AND <keycol2> = <keyval2>;
Each update statement requires a precise set of keys to be specified
using a WHERE clause.
If the table's primary key consists of multiple columns, an explicit
value must be given for each for the UPDATE statement to make sense.
"""
def help_select_table(self):
print """
SELECT: Specifying Table
SELECT ... FROM [<keyspace>.]<tablename> ...
The FROM clause is used to specify the CQL table applicable to a SELECT
query. The keyspace in which the table exists can optionally be
specified along with the table name, separated by a dot (.). This will
not change the current keyspace of the session (see HELP USE).
"""
help_select_columnfamily = help_select_table
def help_select_where(self):
print """
SELECT: Filtering rows
SELECT ... WHERE <key> = keyname AND name1 = value1
SELECT ... WHERE <key> >= startkey and <key> =< endkey AND name1 = value1
SELECT ... WHERE <key> IN ('<key>', '<key>', '<key>', ...)
The WHERE clause provides for filtering the rows that appear in
results. The clause can filter on a key name, or range of keys, and in
the case of indexed columns, on column values. Key filters are
specified using the KEY keyword or key alias name, a relational
operator (one of =, >, >=, <, and <=), and a term value. When terms
appear on both sides of a relational operator it is assumed the filter
applies to an indexed column. With column index filters, the term on
the left of the operator is the name, the term on the right is the
value to filter _on_.
Note: The greater-than and less-than operators (> and <) result in key
ranges that are inclusive of the terms. There is no supported notion of
"strictly" greater-than or less-than; these operators are merely
supported as aliases to >= and <=.
"""
def help_select_limit(self):
print """
SELECT: Limiting results
SELECT ... WHERE <clause> [LIMIT n] ...
Limiting the number of rows returned can be achieved by adding the
LIMIT option to a SELECT expression. LIMIT defaults to 10,000 when left
unset.
"""
class CQL3HelpTopics(CQLHelpTopics):
def help_create_keyspace(self):
print """
CREATE KEYSPACE <ksname>
WITH replication = {'class':'<strategy>' [,'<option>':<val>]};
The CREATE KEYSPACE statement creates a new top-level namespace (aka
"keyspace"). Valid names are any string constructed of alphanumeric
characters and underscores. Names which do not work as valid
identifiers or integers should be quoted as string literals. Properties
such as replication strategy and count are specified during creation
as key-value pairs in the 'replication' map:
class [required]: The name of the replication strategy class
which should be used for the new keyspace. Some often-used classes
are SimpleStrategy and NetworkTopologyStrategy.
other options [optional]: Most strategies require additional arguments
which can be supplied as key-value pairs in the 'replication' map.
Examples:
To create a keyspace with NetworkTopologyStrategy and strategy option of "DC1"
with a value of "1" and "DC2" with a value of "2" you would use
the following statement:
CREATE KEYSPACE <ksname>
WITH replication = {'class':'NetworkTopologyStrategy', 'DC1':1, 'DC2':2};
To create a keyspace with SimpleStrategy and "replication_factor" option
with a value of "3" you would use this statement:
CREATE KEYSPACE <ksname>
WITH replication = {'class':'SimpleStrategy', 'replication_factor':3};
"""
def help_begin(self):
print """
BEGIN [UNLOGGED|COUNTER] BATCH [USING TIMESTAMP <timestamp>]
<insert or update or delete statement> ;
[ <another insert or update or delete statement ;
[...]]
APPLY BATCH;
BATCH supports setting a client-supplied optional global timestamp
which will be used for each of the operations included in the batch.
Only data modification statements (specifically, UPDATE, INSERT,
and DELETE) are allowed in a BATCH statement. BATCH is _not_ an
analogue for SQL transactions.
_NOTE: Counter mutations are allowed only within COUNTER batches._
_NOTE: While there are no isolation guarantees, UPDATE queries are
atomic within a given record._
"""
help_apply = help_begin
def help_select(self):
print """
SELECT <selectExpr>
FROM [<keyspace>.]<table>
[WHERE <clause>]
[ORDER BY <colname> [DESC]]
[LIMIT m];
SELECT is used to read one or more records from a CQL table. It returns
a set of rows matching the selection criteria specified.
For more information, see one of the following:
HELP SELECT_EXPR
HELP SELECT_TABLE
HELP SELECT_WHERE
HELP SELECT_LIMIT
"""
def help_delete(self):
print """
DELETE [<col1> [, <col2>, ...] FROM [<keyspace>.]<tablename>
[USING TIMESTAMP <timestamp>]
WHERE <keyname> = <keyvalue>;
A DELETE is used to perform the removal of one or more columns from one
or more rows. Each DELETE statement requires a precise set of row keys
to be specified using a WHERE clause and the KEY keyword or key alias.
For more information, see one of the following:
HELP DELETE_USING
HELP DELETE_COLUMNS
HELP DELETE_WHERE
"""
def help_delete_using(self):
print """
DELETE: the USING clause
DELETE ... USING TIMESTAMP <timestamp>;
<timestamp> defines the optional timestamp for the new tombstone
record. It must be an integer. Cassandra timestamps are generally
specified using milliseconds since the Unix epoch (1970-01-01 00:00:00
UTC).
"""
def help_update(self):
print """
UPDATE [<keyspace>.]<columnFamily>
[USING [TIMESTAMP <timestamp>]
[AND TTL <timeToLive>]]
SET name1 = value1, name2 = value2 WHERE <keycol> = keyval;
An UPDATE is used to write one or more columns to a record in a table.
No results are returned. The record's primary key must be completely
and uniquely specified; that is, if the primary key includes multiple
columns, all must be explicitly given in the WHERE clause.
Statements begin with the UPDATE keyword followed by the name of the
table to be updated.
For more information, see one of the following:
HELP UPDATE_USING
HELP UPDATE_SET
HELP UPDATE_COUNTERS
HELP UPDATE_WHERE
"""
def help_update_using(self):
print """
UPDATE: the USING clause
UPDATE ... USING TIMESTAMP <timestamp>;
UPDATE ... USING TTL <timeToLive>;
The USING clause allows setting of certain query and data parameters.
If multiple parameters need to be set, these may be joined using AND.
Example:
UPDATE ... USING TTL 43200 AND TIMESTAMP 1351620509603
<timestamp> defines the optional timestamp for the new column value(s).
It must be an integer. Cassandra timestamps are generally specified
using milliseconds since the Unix epoch (1970-01-01 00:00:00 UTC).
<timeToLive> defines the optional time to live (TTL) in seconds for the
new column value(s). It must be an integer.
"""
def help_insert(self):
print """
INSERT INTO [<keyspace>.]<tablename>
( <colname1>, <colname2> [, <colname3> [, ...]] )
VALUES ( <colval1>, <colval2> [, <colval3> [, ...]] )
[USING TIMESTAMP <timestamp>]
[AND TTL <timeToLive>];
An INSERT is used to write one or more columns to a record in a
CQL table. No results are returned.
Values for all component columns in the table's primary key must
be given. Also, there must be at least one non-primary-key column
specified (Cassandra rows are not considered to exist with only
a key and no associated columns).
Unlike in SQL, the semantics of INSERT and UPDATE are identical.
In either case a record is created if none existed before, and
udpated when it does. For more information, see one of the
following:
HELP UPDATE
HELP UPDATE_USING
"""
def help_select_expr(self):
print """
SELECT: Specifying Columns
SELECT name1, name2, name3 FROM ...
SELECT COUNT(*) FROM ...
The SELECT expression determines which columns will appear in the
results and takes the form of a comma separated list of names.
It is worth noting that unlike the projection in a SQL SELECT, there is
no guarantee that the results will contain all of the columns
specified. This is because Cassandra is schema-less and there are no
guarantees that a given column exists.
When the COUNT aggregate function is specified as a column to fetch, a
single row will be returned, with a single column named "count" whose
value is the number of rows from the pre-aggregation resultset.
Currently, COUNT is the only function supported by CQL.
"""
def help_alter_drop(self):
print """
ALTER TABLE: dropping a typed column
ALTER TABLE addamsFamily DROP gender;
An ALTER TABLE ... DROP statement removes the type of a column
from the column family metadata. Dropped columns will immediately
become unavailable in the queries and will not be included in
compacted sstables in the future. If a column is readded, queries
won't return values written before the column was last dropped.
It is assumed that timestamps represent actual time, so if this
is not your case, you should NOT readd previously dropped columns.
Columns can't be dropped from tables defined with COMPACT STORAGE.
"""
def help_create(self):
super(CQL3HelpTopics, self).help_create()
print " HELP CREATE_USER;\n"
def help_alter(self):
print """
ALTER TABLE <tablename> ALTER <columnname> TYPE <type>;
ALTER TABLE <tablename> ADD <columnname> <type>;
ALTER TABLE <tablename> RENAME <columnname> TO <columnname>
[AND <columnname> TO <columnname>]
ALTER TABLE <tablename> WITH <optionname> = <val> [AND <optionname> = <val> [...]];
An ALTER statement is used to manipulate table metadata. It allows you
to add new typed columns, drop existing columns, change the data
storage type of existing columns, or change table properties.
No results are returned.
See one of the following for more information:
HELP ALTER_ALTER;
HELP ALTER_ADD;
HELP ALTER_DROP;
HELP ALTER_RENAME;
HELP ALTER_WITH;
"""
def help_alter_rename(self):
print """
ALTER TABLE: renaming a column
ALTER TABLE <tablename> RENAME <columnname> TO <columnname>
[AND <columnname> TO <columnname>]
The ALTER TABLE ... RENAME variant renames a typed column in a column
family.
"""
def help_drop(self):
super(CQL3HelpTopics, self).help_drop()
print " HELP DROP_USER;\n"
def help_list(self):
print """
There are different variants of LIST. For more information, see
one of the following:
HELP LIST_USERS;
HELP LIST_PERMISSIONS;
"""
def help_create_user(self):
print """
CREATE USER <username> [WITH PASSWORD 'password'] [NOSUPERUSER | SUPERUSER];
CREATE USER creates a new Cassandra user account.
Only superusers can issue CREATE USER requests.
To create a superuser account use SUPERUSER option (NOSUPERUSER is the default).
WITH PASSWORD clause should only be used with password-based authenticators,
e.g. PasswordAuthenticator, SimpleAuthenticator.
"""
def help_alter_user(self):
print """
ALTER USER <username> [WITH PASSWORD 'password'] [NOSUPERUSER | SUPERUSER];
Use ALTER USER to change a user's superuser status and/or password (only
with password-based authenticators).
Superusers can change a user's password or superuser status (except their own).
Users cannot change their own superuser status. Ordinary users can only change their
password (if the configured authenticator is password-based).
"""
def help_drop_user(self):
print """
DROP USER <username>;
DROP USER removes an existing user. You have to be logged in as a superuser
to issue a DROP USER statement. A user cannot drop themselves.
"""
def help_list_users(self):
print """
LIST USERS;
List existing users and their superuser status.
"""
def help_grant(self):
print """
GRANT (<permission> [PERMISSION] | ALL [PERMISSIONS])
ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>
TO <username>
Grant the specified permission (or all permissions) on a resource
to a user.
To be able to grant a permission on some resource you have to
have that permission yourself and also AUTHORIZE permission on it,
or on one of its parent resources.
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_revoke(self):
print """
REVOKE (<permission> [PERMISSION] | ALL [PERMISSIONS])
ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>
FROM <username>
Revokes the specified permission (or all permissions) on a resource
from a user.
To be able to revoke a permission on some resource you have to
have that permission yourself and also AUTHORIZE permission on it,
or on one of its parent resources.
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_list_permissions(self):
print """
LIST (<permission> [PERMISSION] | ALL [PERMISSIONS])
[ON ALL KEYSPACES
| KEYSPACE <keyspace>
| [TABLE] [<keyspace>.]<table>]
[OF <username>]
[NORECURSIVE]
Omitting ON <resource> part will list permissions on ALL KEYSPACES,
every keyspace and table.
Omitting OF <username> part will list permissions of all users.
Omitting NORECURSIVE specifier will list permissions of the resource
and all its parents (table, table's keyspace and ALL KEYSPACES).
See HELP PERMISSIONS for more info on the available permissions.
"""
def help_permissions(self):
print """
PERMISSIONS
Cassandra has 6 permissions:
ALTER: required for ALTER KEYSPCE, ALTER TABLE, CREATE INDEX, DROP INDEX
AUTHORIZE: required for GRANT, REVOKE
CREATE: required for CREATE KEYSPACE, CREATE TABLE
DROP: required for DROP KEYSPACE, DROP TABLE
MODIFY: required for INSERT, DELETE, UPDATE, TRUNCATE
SELECT: required for SELECT
"""<|fim▁end|> | DROP INDEX <indexname>;
|
<|file_name|>eval.py<|end_file_name|><|fim▁begin|>"""
Author: RedFantom
Contributors: Daethyra (Naiii) and Sprigellania (Zarainia)
License: GNU GPLv3 as in LICENSE
Copyright (C) 2016-2018 RedFantom<|fim▁hole|>
def config_eval(value):
"""
Safely evaluate a string that can be a in a configuration file to a
valid Python value. Performs error handling and checks special
cases.
"""
try:
literal = literal_eval(value)
except (ValueError, SyntaxError):
return value
if literal == 1:
return True
elif literal == 0:
return False
else:
return literal<|fim▁end|> | """
from ast import literal_eval |
<|file_name|>ci_server_poller.py<|end_file_name|><|fim▁begin|>import logging
import time
import threading
try:
import ConfigParser as config
except:
import configparser as config
from pydispatch import dispatcher
import requests
import ci_screen.service.ci_server_loader as ci_loader
logger = logging.getLogger(__name__)
class CIServerPoller(object):
def __init__(self):
self._stop = threading.Event()
self._update = threading.Event()
self._poll_rate = self.get_poll_rate()
self.polling_thread = None
self.ci_servers = ci_loader.get_ci_servers()
<|fim▁hole|> self.stop_polling()
def start_polling_async(self):
self._stop.clear()
self._update.clear()
self.polling_thread = threading.Thread(target=self.poll_for_changes)
self.polling_thread.daemon = True
self.polling_thread.start()
def stop_polling(self):
self._stop.set()
self.polling_thread = None
def poll_for_changes(self):
while not self._stop.isSet():
errors = {}
responses = {}
for ci_server in self.ci_servers:
name = ci_server['name']
url = ci_server['url']
username = ci_server.get('username')
token = ci_server.get('token')
auth = None
if username is not None and token is not None:
auth = requests.auth.HTTPBasicAuth(username, token)
try:
response = requests.get('{}/cc.xml'.format(url), auth=auth)
if response.status_code == 200:
responses[name] = response
else:
raise Exception('ci server {} returned {}: {}'.format(url, response, response.text))
except Exception as ex:
logger.warning(ex)
errors[name] = ex
dispatcher.send(signal="CI_UPDATE", sender=self, responses=responses, errors=errors)
time.sleep(self._poll_rate)
def get_poll_rate(self):
config_parser = config.SafeConfigParser(allow_no_value=False)
with open('ci_screen.cfg') as config_file:
config_parser.readfp(config_file)
return int(config_parser.get('general', 'poll_rate_seconds'))<|fim▁end|> | def __del__(self): |
<|file_name|>abstractRule.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as ts from "typescript";
import {IOptions} from "../../lint";
import {RuleWalker} from "../walker/ruleWalker";
import {IRule, IDisabledInterval, RuleFailure} from "./rule";
export abstract class AbstractRule implements IRule {
private value: any;
private options: IOptions;
constructor(ruleName: string, value: any, disabledIntervals: IDisabledInterval[]) {
let ruleArguments: any[] = [];<|fim▁hole|> ruleArguments = value.slice(1);
}
this.value = value;
this.options = {
disabledIntervals: disabledIntervals,
ruleArguments: ruleArguments,
ruleName: ruleName
};
}
public getOptions(): IOptions {
return this.options;
}
public abstract apply(sourceFile: ts.SourceFile): RuleFailure[];
public applyWithWalker(walker: RuleWalker): RuleFailure[] {
walker.walk(walker.getSourceFile());
return walker.getFailures();
}
public isEnabled(): boolean {
const value = this.value;
if (typeof value === "boolean") {
return value;
}
if (Array.isArray(value) && value.length > 0) {
return value[0];
}
return false;
}
}<|fim▁end|> |
if (Array.isArray(value) && value.length > 1) { |
<|file_name|>constants.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.9.0
exports.MSGBYPAGE = 30;
exports.LIMIT_DESTROY = 200;
exports.LIMIT_UPDATE = 30;
exports.CONCURRENT_DESTROY = 1;
<|fim▁hole|>exports.FETCH_AT_ONCE = 1000;<|fim▁end|> | |
<|file_name|>test_tag_woopra.py<|end_file_name|><|fim▁begin|>"""
Tests for the Woopra template tags and filters.
"""
import pytest
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django.template import Context
from django.test.utils import override_settings
from utils import TagTestCase
from analytical.templatetags.woopra import WoopraNode
from analytical.utils import AnalyticalException
@override_settings(WOOPRA_DOMAIN='example.com')
class WoopraTagTestCase(TagTestCase):
"""
Tests for the ``woopra`` template tag.
"""
def test_tag(self):<|fim▁hole|> def test_node(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com"};' in r
@override_settings(WOOPRA_DOMAIN=None)
def test_no_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_DOMAIN='this is not a domain')
def test_wrong_domain(self):
with pytest.raises(AnalyticalException):
WoopraNode()
@override_settings(WOOPRA_IDLE_TIMEOUT=1234)
def test_idle_timeout(self):
r = WoopraNode().render(Context({}))
assert 'var woo_settings = {"domain": "example.com", "idle_timeout": "1234"};' in r
def test_custom(self):
r = WoopraNode().render(Context({
'woopra_var1': 'val1',
'woopra_var2': 'val2',
}))
assert 'var woo_visitor = {"var1": "val1", "var2": "val2"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_name_and_email(self):
r = WoopraNode().render(Context({
'user': User(username='test',
first_name='Firstname',
last_name='Lastname',
email="[email protected]"),
}))
assert 'var woo_visitor = '
'{"email": "[email protected]", "name": "Firstname Lastname"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_username_no_email(self):
r = WoopraNode().render(Context({'user': User(username='test')}))
assert 'var woo_visitor = {"name": "test"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_name(self):
r = WoopraNode().render(Context({
'woopra_name': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"name": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_no_identify_when_explicit_email(self):
r = WoopraNode().render(Context({
'woopra_email': 'explicit',
'user': User(username='implicit'),
}))
assert 'var woo_visitor = {"email": "explicit"};' in r
@override_settings(ANALYTICAL_AUTO_IDENTIFY=True)
def test_identify_anonymous_user(self):
r = WoopraNode().render(Context({'user': AnonymousUser()}))
assert 'var woo_visitor = {};' in r
@override_settings(ANALYTICAL_INTERNAL_IPS=['1.1.1.1'])
def test_render_internal_ip(self):
req = HttpRequest()
req.META['REMOTE_ADDR'] = '1.1.1.1'
context = Context({'request': req})
r = WoopraNode().render(context)
assert r.startswith('<!-- Woopra disabled on internal IP address')
assert r.endswith('-->')<|fim▁end|> | r = self.render_tag('woopra', 'woopra')
assert 'var woo_settings = {"domain": "example.com"};' in r
|
<|file_name|>s_213.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | search_result['213']=["topic_0000000000000068_events--.html","ChatTokenResponseDto Events",""]; |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from importlib import import_module
def import_object(object_path):<|fim▁hole|> :param object_path: path to the object for import
:return: imported object
"""
module_path, class_name = object_path.rsplit('.', 1)
module = import_module(module_path)
return getattr(module, class_name)<|fim▁end|> | """
Import class or function by path |
<|file_name|>share_snapshots.py<|end_file_name|><|fim▁begin|># Copyright 2012 NetApp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Interface for shares extension."""
try:
from urllib import urlencode # noqa
except ImportError:
from urllib.parse import urlencode # noqa
from manilaclient import api_versions
from manilaclient import base
from manilaclient.common import constants
from manilaclient.openstack.common.apiclient import base as common_base
class ShareSnapshot(common_base.Resource):
"""Represent a snapshot of a share."""
def __repr__(self):
return "<ShareSnapshot: %s>" % self.id
def update(self, **kwargs):
"""Update this snapshot."""
self.manager.update(self, **kwargs)
def reset_state(self, state):
"""Update the snapshot with the privided state."""
self.manager.reset_state(self, state)
def delete(self):
"""Delete this snapshot."""
self.manager.delete(self)
def force_delete(self):
"""Delete the specified snapshot ignoring its current state."""
self.manager.force_delete(self)
class ShareSnapshotManager(base.ManagerWithFind):
"""Manage :class:`ShareSnapshot` resources."""
resource_class = ShareSnapshot
def create(self, share, force=False, name=None, description=None):
"""Create a snapshot of the given share.
:param share_id: The ID of the share to snapshot.
:param force: If force is True, create a snapshot even if the
share is busy. Default is False.
:param name: Name of the snapshot
:param description: Description of the snapshot
:rtype: :class:`ShareSnapshot`
"""
body = {'snapshot': {'share_id': common_base.getid(share),
'force': force,
'name': name,
'description': description}}
return self._create('/snapshots', body, 'snapshot')
def get(self, snapshot):
"""Get a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
snapshot_id = common_base.getid(snapshot)
return self._get('/snapshots/%s' % snapshot_id, 'snapshot')
def list(self, detailed=True, search_opts=None, sort_key=None,
sort_dir=None):
"""Get a list of snapshots of shares.
:param search_opts: Search options to filter out shares.
:param sort_key: Key to be sorted.
:param sort_dir: Sort direction, should be 'desc' or 'asc'.
:rtype: list of :class:`ShareSnapshot`
"""
if search_opts is None:
search_opts = {}
if sort_key is not None:
if sort_key in constants.SNAPSHOT_SORT_KEY_VALUES:
search_opts['sort_key'] = sort_key
else:
raise ValueError(
'sort_key must be one of the following: %s.'
% ', '.join(constants.SNAPSHOT_SORT_KEY_VALUES))
if sort_dir is not None:
if sort_dir in constants.SORT_DIR_VALUES:
search_opts['sort_dir'] = sort_dir
else:
raise ValueError(
'sort_dir must be one of the following: %s.'
% ', '.join(constants.SORT_DIR_VALUES))
if search_opts:
query_string = urlencode(
sorted([(k, v) for (k, v) in list(search_opts.items()) if v]))
if query_string:
query_string = "?%s" % (query_string,)
else:
query_string = ''
if detailed:
path = "/snapshots/detail%s" % (query_string,)
else:
path = "/snapshots%s" % (query_string,)
return self._list(path, 'snapshots')
def delete(self, snapshot):
"""Delete a snapshot of a share.
:param snapshot: The :class:`ShareSnapshot` to delete.
"""
self._delete("/snapshots/%s" % common_base.getid(snapshot))
def _do_force_delete(self, snapshot, action_name="force_delete"):
"""Delete the specified snapshot ignoring its current state."""
return self._action(action_name, common_base.getid(snapshot))
<|fim▁hole|>
@api_versions.wraps("2.7") # noqa
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "force_delete")
def update(self, snapshot, **kwargs):
"""Update a snapshot.
:param snapshot: The :class:`ShareSnapshot` instance or string with ID
of snapshot to delete.
:rtype: :class:`ShareSnapshot`
"""
if not kwargs:
return
body = {'snapshot': kwargs, }
snapshot_id = common_base.getid(snapshot)
return self._update("/snapshots/%s" % snapshot_id, body)
def _do_reset_state(self, snapshot, state, action_name="reset_status"):
"""Update the specified share snapshot with the provided state."""
return self._action(action_name, snapshot, {"status": state})
@api_versions.wraps("1.0", "2.6")
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "os-reset_status")
@api_versions.wraps("2.7") # noqa
def reset_state(self, snapshot, state):
return self._do_reset_state(snapshot, state, "reset_status")
def _action(self, action, snapshot, info=None, **kwargs):
"""Perform a snapshot 'action'."""
body = {action: info}
self.run_hooks('modify_body_for_action', body, **kwargs)
url = '/snapshots/%s/action' % common_base.getid(snapshot)
return self.api.client.post(url, body=body)<|fim▁end|> | @api_versions.wraps("1.0", "2.6")
def force_delete(self, snapshot):
return self._do_force_delete(snapshot, "os-force_delete") |
<|file_name|>stats_gopher.presence_monitor.js<|end_file_name|><|fim▁begin|>StatsGopher.PresenceMonitor = function PresenceMonitor (opts) {
opts = opts || {};
this.statsGopher = opts.statsGopher;
this.key = opts.key;
this.send = this.executeNextSend;
this.paused = false;
}
StatsGopher.PresenceMonitor.prototype = {
ignoreNextSend: function () {
},
queueNextSend: function () {
this.request.done(function () {
this.send()
}.bind(this))
this.send = this.ignoreNextSend
},
executeNextSend: function () {
var executeNextSend = function () {
this.send = this.executeNextSend
}.bind(this);
if (this.paused) return;
this.request = this.statsGopher.send({
code: this.code,
key: this.key
}).done(executeNextSend).fail(executeNextSend);
this.send = this.queueNextSend
},
pause: function () {
this.paused = true
},
resume: function () {
this.paused = false
}
}
StatsGopher.Heartbeat = function (opts) {
StatsGopher.PresenceMonitor.apply(this, arguments)
this.timeout = (typeof opts.timeout) === 'number' ? opts.timeout : 10000;
}
StatsGopher.Heartbeat.prototype = new StatsGopher.PresenceMonitor()
StatsGopher.Heartbeat.prototype.code = 'heartbeat'
StatsGopher.Heartbeat.prototype.start = function () {
this.send()
setTimeout(this.start.bind(this), 10000)
}
<|fim▁hole|> StatsGopher.PresenceMonitor.apply(this, arguments)
}
StatsGopher.UserActivity.prototype = new StatsGopher.PresenceMonitor()
StatsGopher.UserActivity.prototype.code = 'user-activity'
StatsGopher.UserActivity.prototype.listen = function () {
var events = [
'resize',
'click',
'mousedown',
'scroll',
'mousemove',
'keydown'
];
events.forEach(function (eventName) {
window.addEventListener(eventName, function () {
this.send();
}.bind(this))
}.bind(this));
}<|fim▁end|> | StatsGopher.UserActivity = function () { |
<|file_name|>test_csv.py<|end_file_name|><|fim▁begin|>from ddt import ddt, data
from django.core.urlresolvers import reverse
from django.test import TestCase
import mock
from analyticsclient.exceptions import NotFoundError
from courses.tests import SwitchMixin
from courses.tests.test_views import ViewTestMixin, DEMO_COURSE_ID, DEPRECATED_DEMO_COURSE_ID
from courses.tests.utils import convert_list_of_dicts_to_csv, get_mock_api_enrollment_geography_data, \
get_mock_api_enrollment_data, get_mock_api_course_activity, get_mock_api_enrollment_age_data, \
get_mock_api_enrollment_education_data, get_mock_api_enrollment_gender_data
@ddt
# pylint: disable=abstract-method
class CourseCSVTestMixin(ViewTestMixin):
client = None
column_headings = None
base_file_name = None
def assertIsValidCSV(self, course_id, csv_data):
response = self.client.get(self.path(course_id=course_id))
# Check content type
self.assertResponseContentType(response, 'text/csv')
# Check filename
csv_prefix = u'edX-DemoX-Demo_2014' if course_id == DEMO_COURSE_ID else u'edX-DemoX-Demo_Course'
filename = '{0}--{1}.csv'.format(csv_prefix, self.base_file_name)
self.assertResponseFilename(response, filename)
# Check data
self.assertEqual(response.content, csv_data)
def assertResponseContentType(self, response, content_type):
self.assertEqual(response['Content-Type'], content_type)
def assertResponseFilename(self, response, filename):
self.assertEqual(response['Content-Disposition'], 'attachment; filename="{0}"'.format(filename))
def _test_csv(self, course_id, csv_data):
with mock.patch(self.api_method, return_value=csv_data):
self.assertIsValidCSV(course_id, csv_data)
@data(DEMO_COURSE_ID, DEPRECATED_DEMO_COURSE_ID)
def test_response_no_data(self, course_id):
# Create an "empty" CSV that only has headers
csv_data = convert_list_of_dicts_to_csv([], self.column_headings)
self._test_csv(course_id, csv_data)
@data(DEMO_COURSE_ID, DEPRECATED_DEMO_COURSE_ID)
def test_response(self, course_id):
csv_data = self.get_mock_data(course_id)
csv_data = convert_list_of_dicts_to_csv(csv_data)
self._test_csv(course_id, csv_data)
def test_404(self):
course_id = 'fakeOrg/soFake/Fake_Course'
self.grant_permission(self.user, course_id)
path = reverse(self.viewname, kwargs={'course_id': course_id})
with mock.patch(self.api_method, side_effect=NotFoundError):
response = self.client.get(path, follow=True)
self.assertEqual(response.status_code, 404)
class CourseEnrollmentByCountryCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment_geography'
column_headings = ['count', 'country', 'course_id', 'date']
base_file_name = 'enrollment-location'
api_method = 'analyticsclient.course.Course.enrollment'
def get_mock_data(self, course_id):
return get_mock_api_enrollment_geography_data(course_id)
class CourseEnrollmentCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment'
column_headings = ['count', 'course_id', 'date']
base_file_name = 'enrollment'
api_method = 'analyticsclient.course.Course.enrollment'
def get_mock_data(self, course_id):
return get_mock_api_enrollment_data(course_id)
class CourseEnrollmentModeCSVViewTests(SwitchMixin, CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment'
column_headings = ['count', 'course_id', 'date', 'audit', 'honor', 'professional', 'verified']
base_file_name = 'enrollment'
api_method = 'analyticsclient.course.Course.enrollment'
@classmethod
def setUpClass(cls):
cls.toggle_switch('display_verified_enrollment', True)
def get_mock_data(self, course_id):
return get_mock_api_enrollment_data(course_id)
class CourseEnrollmentDemographicsByAgeCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment_demographics_age'
column_headings = ['birth_year', 'count', 'course_id', 'created', 'date']
base_file_name = 'enrollment-by-birth-year'
api_method = 'analyticsclient.course.Course.enrollment'
def get_mock_data(self, course_id):
return get_mock_api_enrollment_age_data(course_id)
class CourseEnrollmentDemographicsByEducationCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment_demographics_education'
column_headings = ['count', 'course_id', 'created', 'date', 'education_level.name', 'education_level.short_name']
base_file_name = 'enrollment-by-education'
api_method = 'analyticsclient.course.Course.enrollment'
def get_mock_data(self, course_id):
return get_mock_api_enrollment_education_data(course_id)<|fim▁hole|>class CourseEnrollmentByDemographicsGenderCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:enrollment_demographics_gender'
column_headings = ['count', 'course_id', 'created', 'date', 'gender']
base_file_name = 'enrollment-by-gender'
api_method = 'analyticsclient.course.Course.enrollment'
def get_mock_data(self, course_id):
return get_mock_api_enrollment_gender_data(course_id)
class CourseEngagementActivityTrendCSVViewTests(CourseCSVTestMixin, TestCase):
viewname = 'courses:csv:engagement_activity_trend'
column_headings = ['any', 'attempted_problem', 'course_id', 'interval_end', 'interval_start',
'played_video', 'posted_forum']
base_file_name = 'engagement-activity'
api_method = 'analyticsclient.course.Course.activity'
def get_mock_data(self, course_id):
return get_mock_api_course_activity(course_id)<|fim▁end|> | |
<|file_name|>UserAccountDefinition.java<|end_file_name|><|fim▁begin|>package com.tkmdpa.taf.definitions.pantheon;
import com.tkmdpa.taf.steps.pantheon.UserAccountSteps;
import net.thucydides.core.annotations.Steps;
import org.jbehave.core.annotations.Given;
import org.jbehave.core.annotations.Then;
import org.jbehave.core.annotations.When;
<|fim▁hole|> @Steps
UserAccountSteps userAccountPage;
@When("navigate to Pantheon Edit Profile page from User Account page")
public void navigateToEditProfile(){
userAccountPage.navigateToEditProfilePage();
}
@Given("navigate to Pantheon Add New App page from User Account page")
@When("navigate to Pantheon Add New App page from User Account page")
public void navigateToAddNewApp(){
userAccountPage.navigateToAddNewAppPage();
}
@Given("all the applications were deleted")
public void allAppsWereDeleted(){
userAccountPage.deleteAllApps();
}
@Then("check general page elements for Pantheon User Account page")
public void checkGeneralPageElements(){
userAccountPage.checkIfTitleIsCorrect();
userAccountPage.checkGeneralPageElements();
}
}<|fim▁end|> | public class UserAccountDefinition {
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>// Copyright © 2014, 2015, Travis Snoozy
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
#include "cuisyntax.hpp"
#include <boost/spirit/include/qi_parse.hpp>
#include <cstdlib>
#include <iostream>
#include <iomanip>
// Boost modules
// Asio
// Program Options
namespace aha {
namespace pawn {
namespace testtool {
std::ostream& operator<<(std::ostream& os, const aha::pawn::testtool::hex128_t& hex)
{
std::ios save(NULL);
save.copyfmt(os);
os << std::hex << std::setfill('0') << std::setw(8) << hex.byte1
<< std::setw(8) << hex.byte2
<< std::setw(8) << hex.byte3
<< std::setw(8) << hex.byte4;
os.copyfmt(save);
return os;
}
std::ostream& operator<<(std::ostream& os, const aha::pawn::testtool::node_data_t& data)
{
std::ios save(NULL);
save.copyfmt(os);
os << std::setfill(' ') << std::left
<< std::setw(22) << "Key" << data.key << "\n"
<< std::setw(22) << "TX IV" << data.txiv << "\n"<|fim▁hole|> save2.copyfmt(os);
os << std::setw(22) << "Timeslot"
<< std::hex << std::setfill('0') << std::setw(4) << data.timeslot << "\n";
os.copyfmt(save2);
os << std::setw(22) << "Timeslot enable" << data.enable_timeslot << "\n"
<< std::setw(22) << "TX encryption enable" << data.enable_tx_encryption << "\n"
<< std::setw(22) << "RX decryption enable" << data.enable_rx_decryption << "\n";
os.copyfmt(save);
return os;
}
enum class option_visitor_result {
SUCCESS,
DOES_NOT_EXIST,
ALREADY_EXISTS,
INVALID_COMMAND,
UNEXPECTED_DATA
};
class option_visitor
: public boost::static_visitor<option_visitor_result>
{
private:
boost::spirit::qi::symbols<char, node_data_t> nodes;
public:
option_visitor_result operator()(const option_display_command_t& cmd)
{
nodes.for_each([](std::string& key, node_data_t& value){
std::cout << "Node " << key << "\n" << value << std::endl;
});
return option_visitor_result::SUCCESS;
}
option_visitor_result operator()(const option_display_node_command_t& cmd)
{
node_data_t* node = nodes.find(cmd.node);
if(node == NULL)
{
return option_visitor_result::DOES_NOT_EXIST;
}
std::cout << "Node " << cmd.node << "\n" << *node << std::endl;
return option_visitor_result::SUCCESS;
}
option_visitor_result operator()(const option_set_node_command_t& cmd)
{
node_data_t* node = nodes.find(cmd.node);
if(node == NULL)
{
return option_visitor_result::DOES_NOT_EXIST;
}
switch(cmd.param)
{
case option_set_node_param::KEY:
node->key = boost::get<hex128_t>(cmd.data);
break;
case option_set_node_param::RXDECRYPT:
node->enable_rx_decryption = boost::get<bool>(cmd.data);
break;
case option_set_node_param::RXIV:
node->rxiv = boost::get<hex128_t>(cmd.data);
break;
case option_set_node_param::TIMESLOT:
if(cmd.data.type() == typeid(bool)) {
node->enable_timeslot = boost::get<bool>(cmd.data);
}
else if(cmd.data.type() == typeid(hex128_t)) {
node->timeslot = (uint16_t)boost::get<hex128_t>(cmd.data).byte4;
}
else {
return option_visitor_result::UNEXPECTED_DATA;
}
break;
case option_set_node_param::TXENCRYPT:
node->enable_tx_encryption = boost::get<bool>(cmd.data);
break;
case option_set_node_param::TXIV:
node->txiv = boost::get<hex128_t>(cmd.data);
break;
default:
return option_visitor_result::INVALID_COMMAND;
}
return option_visitor_result::SUCCESS;
}
option_visitor_result operator()(const option_create_node_command_t& cmd)
{
if(nodes.find(cmd.node) != NULL)
{
return option_visitor_result::ALREADY_EXISTS;
}
nodes.at(cmd.node);
return option_visitor_result::SUCCESS;
}
option_visitor_result operator()(const transmit_command_t& cmd)
{
node_data_t* node = nodes.find(cmd.node);
if(node == NULL)
{
return option_visitor_result::DOES_NOT_EXIST;
}
// TODO: Write this out to the designated serial port.
return option_visitor_result::SUCCESS;
}
};
}}}
using aha::pawn::testtool::option_visitor_result;
int main(int argc, char** argv)
{
std::string input;
std::string message;
aha::pawn::testtool::command_t* command;
std::string::const_iterator begin, end;
aha::pawn::testtool::grammar::cui_grammar<std::string::const_iterator> grammar;
std::getline(std::cin, input);
aha::pawn::testtool::option_visitor option_visitor;
while(input != "quit")
{
command = new aha::pawn::testtool::command_t();
begin = input.begin();
end = input.end();
bool result = boost::spirit::qi::phrase_parse(
begin,
end,
grammar,
boost::spirit::ascii::space,
(*command));
if(result && begin == end)
{
switch(boost::apply_visitor(option_visitor, *command))
{
case option_visitor_result::SUCCESS:
std::cout << "OK.";
break;
case option_visitor_result::DOES_NOT_EXIST:
std::cout << "Node does not exist.";
break;
case option_visitor_result::ALREADY_EXISTS:
std::cout << "Node already exists.";
break;
case option_visitor_result::INVALID_COMMAND:
case option_visitor_result::UNEXPECTED_DATA:
std::cout << "An unexpected error occurred.";
break;
}
std::cout << std::endl;
}
else
{
std::cout << "Could not parse that." << std::endl;
}
delete command;
std::getline(std::cin, input);
}
return EXIT_SUCCESS;
}
// vim: set expandtab ts=4 sts=4 sw=4 fileencoding=utf-8:<|fim▁end|> | << std::setw(22) << "RX IV" << data.rxiv << "\n";
std::ios save2(NULL); |
<|file_name|>test_services_status.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from urllib import request
from tests.integrated import base
class StatusTestCase(base.IntegrationTest):
def _get_config(self):
port = base.get_free_port()
self.url = "http://localhost:%s" % port
conf = {
"service": {
"name": "status",
"module": "rallyci.services.status",
"listen": ["localhost", port],<|fim▁hole|> }
}
return [[conf], [port]]
def test_index(self):
r = request.urlopen(self.url)
self.assertIsNotNone(r)<|fim▁end|> | |
<|file_name|>GwtDeviceConfig.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2011, 2020 Eurotech and/or its affiliates and others
*
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Eurotech
*******************************************************************************/
package org.eclipse.kura.web.shared.model;
import java.io.Serializable;
import java.util.Date;
import org.eclipse.kura.web.shared.DateUtils;
public class GwtDeviceConfig extends GwtBaseModel implements Serializable {
private static final long serialVersionUID = 1708831984640005284L;
public GwtDeviceConfig() {
}
@Override
@SuppressWarnings({ "unchecked" })
public <X> X get(String property) {
if ("lastEventOnFormatted".equals(property)) {
return (X) DateUtils.formatDateTime((Date) get("lastEventOn"));
} else if ("uptimeFormatted".equals(property)) {
if (getUptime() == -1) {
return (X) "Unknown";
} else {
return (X) String.valueOf(getUptime());
}
} else {
return super.get(property);
}
}
public String getAccountName() {
return get("accountName");
}
public void setAccountName(String accountName) {
set("accountName", accountName);
}
public String getClientId() {
return (String) get("clientId");
}
public void setClientId(String clientId) {
set("clientId", clientId);
}
public Long getUptime() {
return (Long) get("uptime");
}
public String getUptimeFormatted() {
return (String) get("uptimeFormatted");
}
public void setUptime(Long uptime) {
set("uptime", uptime);
}
public String getGwtDeviceStatus() {
return (String) get("gwtDeviceStatus");
}
public void setGwtDeviceStatus(String gwtDeviceStatus) {
set("gwtDeviceStatus", gwtDeviceStatus);
}
public String getDisplayName() {
return (String) get("displayName");
}
public void setDisplayName(String displayName) {
set("displayName", displayName);
}
public String getModelName() {
return (String) get("modelName");
}
public void setModelName(String modelName) {
set("modelName", modelName);
}
public String getModelId() {
return (String) get("modelId");
}
public void setModelId(String modelId) {
set("modelId", modelId);
}
public String getPartNumber() {
return (String) get("partNumber");
}
public void setPartNumber(String partNumber) {
set("partNumber", partNumber);
}
public String getSerialNumber() {
return (String) get("serialNumber");
}
public void setSerialNumber(String serialNumber) {
set("serialNumber", serialNumber);
}
public String getAvailableProcessors() {
return (String) get("availableProcessors");
}
public void setAvailableProcessors(String availableProcessors) {
set("availableProcessors", availableProcessors);
}
public String getTotalMemory() {
return (String) get("totalMemory");
}
public void setTotalMemory(String totalMemory) {
set("totalMemory", totalMemory);
}
public String getFirmwareVersion() {
return (String) get("firmwareVersion");
}
public void setFirmwareVersion(String firmwareVersion) {
set("firmwareVersion", firmwareVersion);
}
public String getBiosVersion() {
return (String) get("biosVersion");
}
public void setBiosVersion(String biosVersion) {
set("biosVersion", biosVersion);
}
public String getOs() {
return (String) get("os");
}
public void setOs(String os) {
set("os", os);
}
public String getOsVersion() {
return (String) get("osVersion");
}
public void setOsVersion(String osVersion) {
set("osVersion", osVersion);
}
public String getOsArch() {
return (String) get("osArch");
}
public void setOsArch(String osArch) {
set("osArch", osArch);
}
public String getJvmName() {
return (String) get("jvmName");
}
public void setJvmName(String jvmName) {
set("jvmName", jvmName);
}
public String getJvmVersion() {
return (String) get("jvmVersion");
}
public void setJvmVersion(String jvmVersion) {
set("jvmVersion", jvmVersion);
}
public String getJvmProfile() {
return (String) get("jvmProfile");
}
public void setJvmProfile(String jvmProfile) {
set("jvmProfile", jvmProfile);
}
public String getOsgiFramework() {
return (String) get("osgiFramework");
}
public void setOsgiFramework(String osgiFramework) {
set("osgiFramework", osgiFramework);
}
public String getOsgiFrameworkVersion() {
return (String) get("osgiFrameworkVersion");
}
public void setOsgiFrameworkVersion(String osgiFrameworkVersion) {
set("osgiFrameworkVersion", osgiFrameworkVersion);
}
public String getConnectionInterface() {
return (String) get("connectionInterface");
}
public void setConnectionInterface(String connectionInterface) {
set("connectionInterface", connectionInterface);
}
public String getConnectionIp() {
return (String) get("connectionIp");
}
public void setConnectionIp(String connectionIp) {
set("connectionIp", connectionIp);
}
public String getAcceptEncoding() {
return (String) get("acceptEncoding");
}
public void setAcceptEncoding(String acceptEncoding) {
set("acceptEncoding", acceptEncoding);
}
public String getApplicationIdentifiers() {
return (String) get("applicationIdentifiers");
}
public void setApplicationIdentifiers(String applicationIdentifiers) {
set("applicationIdentifiers", applicationIdentifiers);
}
public Double getGpsLatitude() {
return (Double) get("gpsLatitude");
}
public void setGpsLatitude(Double gpsLatitude) {
set("gpsLatitude", gpsLatitude);
}
public Double getGpsLongitude() {
return (Double) get("gpsLongitude");
}
public void setGpsLongitude(Double gpsLongitude) {
set("gpsLongitude", gpsLongitude);
}
public Double getGpsAltitude() {
return (Double) get("gpsAltitude");
}
public void setGpsAltitude(Double gpsAltitude) {
set("gpsAltitude", gpsAltitude);
}
public String getGpsAddress() {
return (String) get("gpsAddress");
}
public void setGpsAddress(String gpsAddress) {
set("gpsAddress", gpsAddress);
}
public Date getLastEventOn() {
return (Date) get("lastEventOn");
}
public String getLastEventOnFormatted() {
return (String) get("lastEventOnFormatted");
}
public void setLastEventOn(Date lastEventDate) {
set("lastEventOn", lastEventDate);
}<|fim▁hole|> }
public void setLastEventType(String lastEventType) {
set("lastEventType", lastEventType);
}
public boolean isOnline() {
return getGwtDeviceStatus().compareTo("CONNECTED") == 0;
}
}<|fim▁end|> |
public String getLastEventType() {
return (String) get("lastEventType"); |
<|file_name|>2d.composite.solid.xor.worker.js<|end_file_name|><|fim▁begin|>// DO NOT EDIT! This test has been generated by /html/canvas/tools/gentest.py.
// OffscreenCanvas test in a worker:2d.composite.solid.xor
// Description:
// Note:
importScripts("/resources/testharness.js");
importScripts("/html/canvas/resources/canvas-tests.js");
<|fim▁hole|>var t_pass = t.done.bind(t);
var t_fail = t.step_func(function(reason) {
throw reason;
});
t.step(function() {
var offscreenCanvas = new OffscreenCanvas(100, 50);
var ctx = offscreenCanvas.getContext('2d');
ctx.fillStyle = 'rgba(0, 255, 255, 1.0)';
ctx.fillRect(0, 0, 100, 50);
ctx.globalCompositeOperation = 'xor';
ctx.fillStyle = 'rgba(255, 255, 0, 1.0)';
ctx.fillRect(0, 0, 100, 50);
_assertPixelApprox(offscreenCanvas, 50,25, 0,0,0,0, "50,25", "0,0,0,0", 5);
t.done();
});
done();<|fim▁end|> | var t = async_test(""); |
<|file_name|>RegisterAgentCommand.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.<|fim▁hole|>
import com.thoughtworks.go.config.Agent;
import com.thoughtworks.go.server.service.AgentRuntimeInfo;
import com.thoughtworks.go.server.service.AgentService;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Optional;
import java.util.UUID;
public class RegisterAgentCommand extends AgentPerformanceCommand {
public RegisterAgentCommand(AgentService agentService) {
this.agentService = agentService;
}
@Override
Optional<String> execute() {
return registerAgent();
}
private Optional<String> registerAgent() {
InetAddress localHost = getInetAddress();
Agent agent = new Agent("Perf-Test-Agent-" + UUID.randomUUID(), localHost.getHostName(), localHost.getHostAddress(), UUID.randomUUID().toString());
AgentRuntimeInfo agentRuntimeInfo = AgentRuntimeInfo.fromServer(agent, false, "location", 233232L, "osx");
agentService.requestRegistration(agentRuntimeInfo);
return Optional.ofNullable(agent.getUuid());
}
private InetAddress getInetAddress() {
InetAddress localHost;
try {
localHost = InetAddress.getLocalHost();
} catch (UnknownHostException e) {
throw new RuntimeException(e);
}
return localHost;
}
}<|fim▁end|> | */
package com.thoughtworks.go.server.perf.commands; |
<|file_name|>response.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::fmt::{Display, Formatter};
use std::hash::{Hash, Hasher};
use std::str::from_utf8;
use base64::encode;
use log::warn;
use maplit::hashmap;
use serde_json::{json, Value};
use crate::{DifferenceType, PactSpecification};
use crate::bodies::OptionalBody;
use crate::generators::{Generators, generators_from_json, generators_to_json};
use crate::http_parts::HttpPart;
use crate::json_utils::{body_from_json, headers_from_json, headers_to_json};
use crate::matchingrules::{matchers_from_json, matchers_to_json, MatchingRules};
use crate::v4::http_parts::HttpResponse;
/// Struct that defines the response.
#[derive(Debug, Clone, Eq)]
pub struct Response {
/// Response status
pub status: u16,
/// Response headers
pub headers: Option<HashMap<String, Vec<String>>>,
/// Response body
pub body: OptionalBody,
/// Response matching rules
pub matching_rules: MatchingRules,
/// Response generators
pub generators: Generators
}
impl Response {
/// Build a `Response` from a `Value` struct.
pub fn from_json(response: &Value, _: &PactSpecification
) -> anyhow::Result<Response> {
let status_val = match response.get("status") {
Some(v) => v.as_u64().unwrap() as u16,
None => 200
};
let headers = headers_from_json(response);<|fim▁hole|> Ok(Response {
status: status_val,
headers: headers.clone(),
body: body_from_json(response, "body", &headers),
matching_rules: matchers_from_json(response, &Some("responseMatchingRules".to_string()))?,
generators: generators_from_json(response)?,
})
}
/// Returns a default response: Status 200
#[deprecated(since="0.5.4", note="please use `default()` from the standard Default trait instead")]
pub fn default_response() -> Response {
Response::default()
}
/// Converts this response to a `Value` struct.
#[allow(unused_variables)]
pub fn to_json(&self, spec_version: &PactSpecification) -> Value {
let mut json = json!({
"status" : json!(self.status)
});
{
let map = json.as_object_mut().unwrap();
if self.headers.is_some() {
map.insert("headers".to_string(), headers_to_json(&self.headers.clone().unwrap()));
}
match self.body {
OptionalBody::Present(ref body, _, _) => {
if self.content_type().unwrap_or_default().is_json() {
match serde_json::from_slice(body) {
Ok(json_body) => { map.insert("body".to_string(), json_body); },
Err(err) => {
warn!("Failed to parse json body: {}", err);
map.insert("body".to_string(), Value::String(encode(body)));
}
}
} else {
match from_utf8(body) {
Ok(s) => map.insert("body".to_string(), Value::String(s.to_string())),
Err(_) => map.insert("body".to_string(), Value::String(encode(body)))
};
}
},
OptionalBody::Empty => { map.insert("body".to_string(), Value::String("".to_string())); },
OptionalBody::Missing => (),
OptionalBody::Null => { map.insert("body".to_string(), Value::Null); }
}
if self.matching_rules.is_not_empty() {
map.insert("matchingRules".to_string(), matchers_to_json(
&self.matching_rules.clone(), spec_version));
}
if self.generators.is_not_empty() {
map.insert("generators".to_string(), generators_to_json(
&self.generators.clone(), spec_version));
}
}
json
}
/// Return a description of all the differences from the other response
pub fn differences_from(&self, other: &Response) -> Vec<(DifferenceType, String)> {
let mut differences = vec![];
if self.status != other.status {
differences.push((DifferenceType::Status, format!("Response status {} != {}", self.status, other.status)));
}
if self.headers != other.headers {
differences.push((DifferenceType::Headers, format!("Response headers {:?} != {:?}", self.headers, other.headers)));
}
if self.body != other.body {
differences.push((DifferenceType::Body, format!("Response body '{:?}' != '{:?}'", self.body, other.body)));
}
if self.matching_rules != other.matching_rules {
differences.push((DifferenceType::MatchingRules, format!("Response matching rules {:?} != {:?}", self.matching_rules, other.matching_rules)));
}
differences
}
/// Convert this response to V4 format
pub fn as_v4_response(&self) -> HttpResponse {
HttpResponse {
status: self.status,
headers: self.headers.clone(),
body: self.body.clone(),
matching_rules: self.matching_rules.clone(),
generators: self.generators.clone()
}
}
}
impl HttpPart for Response {
fn headers(&self) -> &Option<HashMap<String, Vec<String>>> {
&self.headers
}
fn headers_mut(&mut self) -> &mut HashMap<String, Vec<String>> {
if self.headers.is_none() {
self.headers = Some(hashmap!{});
}
self.headers.as_mut().unwrap()
}
fn body(&self) -> &OptionalBody {
&self.body
}
fn body_mut(&mut self) -> &mut OptionalBody {
&mut self.body
}
fn matching_rules(&self) -> &MatchingRules {
&self.matching_rules
}
fn matching_rules_mut(&mut self) -> &mut MatchingRules {
&mut self.matching_rules
}
fn generators(&self) -> &Generators {
&self.generators
}
fn generators_mut(&mut self) -> &mut Generators {
&mut self.generators
}
fn lookup_content_type(&self) -> Option<String> {
self.lookup_header_value(&"content-type".to_string())
}
}
impl Hash for Response {
fn hash<H: Hasher>(&self, state: &mut H) {
self.status.hash(state);
if self.headers.is_some() {
for (k, v) in self.headers.clone().unwrap() {
k.hash(state);
v.hash(state);
}
}
self.body.hash(state);
self.matching_rules.hash(state);
self.generators.hash(state);
}
}
impl PartialEq for Response {
fn eq(&self, other: &Self) -> bool {
self.status == other.status && self.headers == other.headers && self.body == other.body &&
self.matching_rules == other.matching_rules && self.generators == other.generators
}
fn ne(&self, other: &Self) -> bool {
self.status != other.status || self.headers != other.headers || self.body != other.body ||
self.matching_rules != other.matching_rules || self.generators != other.generators
}
}
impl Display for Response {
fn fmt(&self, f: &mut Formatter) -> std::fmt::Result {
write!(f, "Response ( status: {}, headers: {:?}, body: {} )", self.status, self.headers,
self.body)
}
}
impl Default for Response {
fn default() -> Self {
Response {
status: 200,
headers: None,
body: OptionalBody::Missing,
matching_rules: MatchingRules::default(),
generators: Generators::default()
}
}
}
#[cfg(test)]
mod tests {
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use expectest::prelude::*;
use maplit::hashmap;
use crate::bodies::OptionalBody;
use crate::PactSpecification;
use crate::response::Response;
#[test]
fn response_from_json_defaults_to_status_200() {
let response_json : serde_json::Value = serde_json::from_str(r#"
{
"headers": {}
}
"#).unwrap();
let response = Response::from_json(&response_json, &PactSpecification::V1_1);
assert_eq!(response.unwrap().status, 200);
}
#[test]
fn response_to_json_with_defaults() {
let response = Response::default();
expect!(response.to_json(&PactSpecification::V3).to_string()).to(be_equal_to("{\"status\":200}"));
}
#[test]
fn response_to_json_with_headers() {
let response = Response { headers: Some(hashmap!{
"HEADERA".to_string() => vec!["VALUEA".to_string()],
"HEADERB".to_string() => vec!["VALUEB1, VALUEB2".to_string()]
}), .. Response::default() };
expect!(response.to_json(&PactSpecification::V3).to_string()).to(
be_equal_to(r#"{"headers":{"HEADERA":"VALUEA","HEADERB":"VALUEB1, VALUEB2"},"status":200}"#)
);
}
#[test]
fn response_to_json_with_json_body() {
let response = Response { headers: Some(hashmap!{
"Content-Type".to_string() => vec!["application/json".to_string()]
}), body: OptionalBody::Present(r#"{"key": "value"}"#.into(), None, None), .. Response::default() };
expect!(response.to_json(&PactSpecification::V3).to_string()).to(
be_equal_to(r#"{"body":{"key":"value"},"headers":{"Content-Type":"application/json"},"status":200}"#)
);
}
#[test]
fn response_to_json_with_non_json_body() {
let response = Response { headers: Some(hashmap!{ "Content-Type".to_string() => vec!["text/plain".to_string()] }),
body: OptionalBody::Present("This is some text".into(), None, None), .. Response::default() };
expect!(response.to_json(&PactSpecification::V3).to_string()).to(
be_equal_to(r#"{"body":"This is some text","headers":{"Content-Type":"text/plain"},"status":200}"#)
);
}
#[test]
fn response_to_json_with_empty_body() {
let response = Response { body: OptionalBody::Empty, .. Response::default() };
expect!(response.to_json(&PactSpecification::V3).to_string()).to(
be_equal_to(r#"{"body":"","status":200}"#)
);
}
#[test]
fn response_to_json_with_null_body() {
let response = Response { body: OptionalBody::Null, .. Response::default() };
expect!(response.to_json(&PactSpecification::V3).to_string()).to(
be_equal_to(r#"{"body":null,"status":200}"#)
);
}
fn hash<T: Hash>(t: &T) -> u64 {
let mut s = DefaultHasher::new();
t.hash(&mut s);
s.finish()
}
#[test]
fn hash_for_response() {
let response1 = Response::default();
let response2 = Response { status: 400, .. Response::default() };
let response3 = Response { headers: Some(hashmap!{
"H1".to_string() => vec!["A".to_string()]
}), .. Response::default() };
let response4 = Response { headers: Some(hashmap!{
"H1".to_string() => vec!["B".to_string()]
}), .. Response::default() };
expect!(hash(&response1)).to(be_equal_to(hash(&response1)));
expect!(hash(&response3)).to(be_equal_to(hash(&response3)));
expect!(hash(&response1)).to_not(be_equal_to(hash(&response2)));
expect!(hash(&response3)).to_not(be_equal_to(hash(&response4)));
}
}<|fim▁end|> | |
<|file_name|>parser.js<|end_file_name|><|fim▁begin|>const pug = require("pug");
const pugRuntimeWrap = require("pug-runtime/wrap");
const path = require("path");
const YAML = require("js-yaml");
const getCodeBlock = require("pug-code-block");
const detectIndent = require("detect-indent");
const rebaseIndent = require("rebase-indent");
const pugdocArguments = require("./arguments");
const MIXIN_NAME_REGEX = /^mixin +([-\w]+)?/;
const DOC_REGEX = /^\s*\/\/-\s+?\@pugdoc\s*$/;
const DOC_STRING = "//- @pugdoc";
const CAPTURE_ALL = "all";
const CAPTURE_SECTION = "section";
const EXAMPLE_BLOCK = "block";
/**
* Returns all pugdoc comment and code blocks for the given code
*
* @param templateSrc {string}
* @return {{lineNumber: number, comment: string, code: string}[]}
*/
function extractPugdocBlocks(templateSrc) {
return (
templateSrc
.split("\n")
// Walk through every line and look for a pugdoc comment
.map(function (line, lineIndex) {
// If the line does not contain a pugdoc comment skip it
if (!line.match(DOC_REGEX)) {
return undefined;
}
// If the line contains a pugdoc comment return
// the comment block and the next code block
const comment = getCodeBlock.byLine(templateSrc, lineIndex + 1);
const meta = parsePugdocComment(comment);
// add number of captured blocks
if (meta.capture <= 0) {
return undefined;
}
let capture = 2;
if (meta.capture) {
if (meta.capture === CAPTURE_ALL) {
capture = Infinity;
} else if (meta.capture === CAPTURE_SECTION) {
capture = Infinity;
} else {
capture = meta.capture + 1;
}
}
// get all code blocks
let code = getCodeBlock.byLine(templateSrc, lineIndex + 1, capture);
// make string
if (Array.isArray(code)) {
// remove comment
code.shift();
// join all code
code = code.join("\n");
} else {
return undefined;
}
// filter out all but current pugdoc section
if (meta.capture === CAPTURE_SECTION) {
const nextPugDocIndex = code.indexOf(DOC_STRING);
if (nextPugDocIndex > -1) {
code = code.substr(0, nextPugDocIndex);
}
}
// if no code and no comment, skip
if (comment.match(DOC_REGEX) && code === "") {
return undefined;
}
return {
lineNumber: lineIndex + 1,
comment: comment,
code: code,
};
})
// Remove skiped lines
.filter(function (result) {
return result !== undefined;
})
);
}
/**
* Returns all pugdocDocuments for the given code
*
* @param templateSrc {string}
* @param filename {string}
*/
function getPugdocDocuments(templateSrc, filename, locals) {
return extractPugdocBlocks(templateSrc).map(function (pugdocBlock) {
const meta = parsePugdocComment(pugdocBlock.comment);
const fragments = [];
// parse jsdoc style arguments list
if (meta.arguments) {
meta.arguments = meta.arguments.map(function (arg) {
return pugdocArguments.parse(arg, true);
});
}
// parse jsdoc style attributes list
if (meta.attributes) {
meta.attributes = meta.attributes.map(function (arg) {
return pugdocArguments.parse(arg, true);
});
}
let source = pugdocBlock.code;
source = source.replace(/\u2028|\u200B/g, "");
if (meta.example && meta.example !== false) {
if (meta.beforeEach) {
meta.example = `${meta.beforeEach}\n${meta.example}`;
}
if (meta.afterEach) {
meta.example = `${meta.example}\n${meta.afterEach}`;
}
}
// get example objects and add them to parent example
// also return them as separate pugdoc blocks
if (meta.examples) {
for (let i = 0, l = meta.examples.length; i < l; ++i) {
let x = meta.examples[i];
// do nothing for simple examples
if (typeof x === "string") {
if (meta.beforeEach) {
meta.examples[i] = `${meta.beforeEach}\n${x}`;
}
if (meta.afterEach) {
meta.examples[i] = `${x}\n${meta.afterEach}`;
}
continue;
}
if (meta.beforeEach && typeof x.beforeEach === "undefined") {
x.example = `${meta.beforeEach}\n${x.example}`;
}
if (meta.afterEach && typeof x.afterEach === "undefined") {
x.example = `${x.example}\n${meta.afterEach}`;
}
// merge example/examples with parent examples
meta.examples[i] = getExamples(x).reduce(
(acc, val) => acc.concat(val),
[]
);
// add fragments
fragments.push(x);
}
meta.examples = meta.examples.reduce((acc, val) => acc.concat(val), []);
}
// fix pug compilation for boolean use of example
const exampleClone = meta.example;
if (typeof meta.example === "boolean") {
meta.example = "";
}
const obj = {
// get meta
meta: meta,
// add file path
file: path.relative(".", filename),
// get pug code block matching the comments indent
source: source,
// get html output
output: compilePug(source, meta, filename, locals),
};
// remove output if example = false
if (exampleClone === false) {
obj.output = null;
}
// add fragments
if (fragments && fragments.length) {
obj.fragments = fragments.map((subexample) => {<|fim▁hole|> output: compilePug(source, subexample, filename, locals),
};
});
}
if (obj.output || obj.fragments) {
return obj;
}
return null;
});
}
/**
* Extract pug attributes from comment block
*/
function parsePugdocComment(comment) {
// remove first line (@pugdoc)
if (comment.indexOf("\n") === -1) {
return {};
}
comment = comment.substr(comment.indexOf("\n"));
comment = pugdocArguments.escapeArgumentsYAML(comment, "arguments");
comment = pugdocArguments.escapeArgumentsYAML(comment, "attributes");
// parse YAML
return YAML.safeLoad(comment) || {};
}
/**
* get all examples from the meta object
* either one or both of meta.example and meta.examples can be given
*/
function getExamples(meta) {
let examples = [];
if (meta.example) {
examples = examples.concat(meta.example);
}
if (meta.examples) {
examples = examples.concat(meta.examples);
}
return examples;
}
/**
* Compile Pug
*/
function compilePug(src, meta, filename, locals) {
let newSrc = [src];
// add example calls
getExamples(meta).forEach(function (example, i) {
// append to pug if it's a mixin example
if (MIXIN_NAME_REGEX.test(src)) {
newSrc.push(example);
// replace example block with src
} else {
if (i === 0) {
newSrc = [];
}
const lines = example.split("\n");
lines.forEach(function (line) {
if (line.trim() === EXAMPLE_BLOCK) {
const indent = detectIndent(line).indent.length;
line = rebaseIndent(src.split("\n"), indent).join("\n");
}
newSrc.push(line);
});
}
});
newSrc = newSrc.join("\n");
locals = Object.assign({}, locals, meta.locals);
// compile pug
const compiled = pug.compileClient(newSrc, {
name: "tmp",
externalRuntime: true,
filename: filename,
});
try {
const templateFunc = pugRuntimeWrap(compiled, "tmp");
return templateFunc(locals || {});
} catch (err) {
try {
const compiledDebug = pug.compileClient(newSrc, {
name: "tmp",
externalRuntime: true,
filename: filename,
compileDebug: true,
});
const templateFuncDebug = pugRuntimeWrap(compiledDebug, "tmp");
templateFuncDebug(locals || {});
} catch (debugErr) {
process.stderr.write(
`\n\nPug-doc error: ${JSON.stringify(meta, null, 2)}`
);
process.stderr.write(`\n\n${debugErr.toString()}`);
return null;
}
}
}
// Exports
module.exports = {
extractPugdocBlocks: extractPugdocBlocks,
getPugdocDocuments: getPugdocDocuments,
parsePugdocComment: parsePugdocComment,
getExamples: getExamples,
};<|fim▁end|> | return {
// get meta
meta: subexample,
// get html output |
<|file_name|>EntityIronGolem.java<|end_file_name|><|fim▁begin|>package net.minecraft.server;
import org.bukkit.craftbukkit.inventory.CraftItemStack; // CraftBukkit
public class EntityIronGolem extends EntityGolem {
private int e = 0;
Village d = null;
private int f;
private int g;
public EntityIronGolem(World world) {
super(world);
this.texture = "/mob/villager_golem.png";
this.a(1.4F, 2.9F);
this.getNavigation().a(true);
this.goalSelector.a(1, new PathfinderGoalMeleeAttack(this, 0.25F, true));
this.goalSelector.a(2, new PathfinderGoalMoveTowardsTarget(this, 0.22F, 32.0F));
this.goalSelector.a(3, new PathfinderGoalMoveThroughVillage(this, 0.16F, true));
this.goalSelector.a(4, new PathfinderGoalMoveTowardsRestriction(this, 0.16F));
this.goalSelector.a(5, new PathfinderGoalOfferFlower(this));
this.goalSelector.a(6, new PathfinderGoalRandomStroll(this, 0.16F));
this.goalSelector.a(7, new PathfinderGoalLookAtPlayer(this, EntityHuman.class, 6.0F));
this.goalSelector.a(8, new PathfinderGoalRandomLookaround(this));
this.targetSelector.a(1, new PathfinderGoalDefendVillage(this));
this.targetSelector.a(2, new PathfinderGoalHurtByTarget(this, false));
this.targetSelector.a(3, new PathfinderGoalNearestAttackableTarget(this, EntityMonster.class, 16.0F, 0, false, true));
}
protected void a() {
super.a();
this.datawatcher.a(16, Byte.valueOf((byte) 0));
}
public boolean aV() {
return true;
}
protected void bd() {
if (--this.e <= 0) {
this.e = 70 + this.random.nextInt(50);
this.d = this.world.villages.getClosestVillage(MathHelper.floor(this.locX), MathHelper.floor(this.locY), MathHelper.floor(this.locZ), 32);<|fim▁hole|> ChunkCoordinates chunkcoordinates = this.d.getCenter();
this.b(chunkcoordinates.x, chunkcoordinates.y, chunkcoordinates.z, this.d.getSize());
}
}
super.bd();
}
public int getMaxHealth() {
return 100;
}
protected int h(int i) {
return i;
}
public void d() {
super.d();
if (this.f > 0) {
--this.f;
}
if (this.g > 0) {
--this.g;
}
if (this.motX * this.motX + this.motZ * this.motZ > 2.500000277905201E-7D && this.random.nextInt(5) == 0) {
int i = MathHelper.floor(this.locX);
int j = MathHelper.floor(this.locY - 0.20000000298023224D - (double) this.height);
int k = MathHelper.floor(this.locZ);
int l = this.world.getTypeId(i, j, k);
if (l > 0) {
this.world.a("tilecrack_" + l, this.locX + ((double) this.random.nextFloat() - 0.5D) * (double) this.width, this.boundingBox.b + 0.1D, this.locZ + ((double) this.random.nextFloat() - 0.5D) * (double) this.width, 4.0D * ((double) this.random.nextFloat() - 0.5D), 0.5D, ((double) this.random.nextFloat() - 0.5D) * 4.0D);
}
}
}
public boolean a(Class oclass) {
return this.q() && EntityHuman.class.isAssignableFrom(oclass) ? false : super.a(oclass);
}
public boolean k(Entity entity) {
this.f = 10;
this.world.broadcastEntityEffect(this, (byte) 4);
boolean flag = entity.damageEntity(DamageSource.mobAttack(this), 7 + this.random.nextInt(15));
if (flag) {
entity.motY += 0.4000000059604645D;
}
this.world.makeSound(this, "mob.irongolem.throw", 1.0F, 1.0F);
return flag;
}
public Village n() {
return this.d;
}
public void e(boolean flag) {
this.g = flag ? 400 : 0;
this.world.broadcastEntityEffect(this, (byte) 11);
}
protected String aQ() {
return "none";
}
protected String aR() {
return "mob.irongolem.hit";
}
protected String aS() {
return "mob.irongolem.death";
}
protected void a(int i, int j, int k, int l) {
this.world.makeSound(this, "mob.irongolem.walk", 1.0F, 1.0F);
}
protected void dropDeathLoot(boolean flag, int i) {
// CraftBukkit start
java.util.List<org.bukkit.inventory.ItemStack> loot = new java.util.ArrayList<org.bukkit.inventory.ItemStack>();
int j = this.random.nextInt(3);
int k;
if (j > 0) {
loot.add(new CraftItemStack(Block.RED_ROSE.id, j));
}
k = 3 + this.random.nextInt(3);
if (k > 0) {
loot.add(new CraftItemStack(Item.IRON_INGOT.id, k));
}
org.bukkit.craftbukkit.event.CraftEventFactory.callEntityDeathEvent(this, loot);
// CraftBukkit end
}
public int p() {
return this.g;
}
public boolean q() {
return (this.datawatcher.getByte(16) & 1) != 0;
}
public void f(boolean flag) {
byte b0 = this.datawatcher.getByte(16);
if (flag) {
this.datawatcher.watch(16, Byte.valueOf((byte) (b0 | 1)));
} else {
this.datawatcher.watch(16, Byte.valueOf((byte) (b0 & -2)));
}
}
}<|fim▁end|> | if (this.d == null) {
this.aE();
} else { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .auto_rest_url_test_service import AutoRestUrlTestService, AutoRestUrlTestServiceConfiguration<|fim▁hole|> 'AutoRestUrlTestServiceConfiguration'
]
__version__ = VERSION<|fim▁end|> | from .version import VERSION
__all__ = [
'AutoRestUrlTestService', |
<|file_name|>easyjson.go<|end_file_name|><|fim▁begin|>// Code generated by easyjson for marshaling/unmarshaling. DO NOT EDIT.
package deviceorientation
import (
json "encoding/json"
easyjson "github.com/mailru/easyjson"
jlexer "github.com/mailru/easyjson/jlexer"
jwriter "github.com/mailru/easyjson/jwriter"
)
// suppress unused package warning
var (
_ *json.RawMessage
_ *jlexer.Lexer
_ *jwriter.Writer
_ easyjson.Marshaler
)
func easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation(in *jlexer.Lexer, out *SetDeviceOrientationOverrideParams) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
case "alpha":
out.Alpha = float64(in.Float64())
case "beta":
out.Beta = float64(in.Float64())
case "gamma":
out.Gamma = float64(in.Float64())
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation(out *jwriter.Writer, in SetDeviceOrientationOverrideParams) {
out.RawByte('{')
first := true
_ = first
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"alpha\":")
out.Float64(float64(in.Alpha))
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"beta\":")
out.Float64(float64(in.Beta))
if !first {
out.RawByte(',')
}
first = false
out.RawString("\"gamma\":")
out.Float64(float64(in.Gamma))
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v SetDeviceOrientationOverrideParams) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v SetDeviceOrientationOverrideParams) MarshalEasyJSON(w *jwriter.Writer) {
easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *SetDeviceOrientationOverrideParams) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation(&r, v)
return r.Error()
}
<|fim▁hole|>}
func easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation1(in *jlexer.Lexer, out *ClearDeviceOrientationOverrideParams) {
isTopLevel := in.IsStart()
if in.IsNull() {
if isTopLevel {
in.Consumed()
}
in.Skip()
return
}
in.Delim('{')
for !in.IsDelim('}') {
key := in.UnsafeString()
in.WantColon()
if in.IsNull() {
in.Skip()
in.WantComma()
continue
}
switch key {
default:
in.SkipRecursive()
}
in.WantComma()
}
in.Delim('}')
if isTopLevel {
in.Consumed()
}
}
func easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation1(out *jwriter.Writer, in ClearDeviceOrientationOverrideParams) {
out.RawByte('{')
first := true
_ = first
out.RawByte('}')
}
// MarshalJSON supports json.Marshaler interface
func (v ClearDeviceOrientationOverrideParams) MarshalJSON() ([]byte, error) {
w := jwriter.Writer{}
easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation1(&w, v)
return w.Buffer.BuildBytes(), w.Error
}
// MarshalEasyJSON supports easyjson.Marshaler interface
func (v ClearDeviceOrientationOverrideParams) MarshalEasyJSON(w *jwriter.Writer) {
easyjsonC5a4559bEncodeGithubComKnqChromedpCdpDeviceorientation1(w, v)
}
// UnmarshalJSON supports json.Unmarshaler interface
func (v *ClearDeviceOrientationOverrideParams) UnmarshalJSON(data []byte) error {
r := jlexer.Lexer{Data: data}
easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation1(&r, v)
return r.Error()
}
// UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *ClearDeviceOrientationOverrideParams) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation1(l, v)
}<|fim▁end|> | // UnmarshalEasyJSON supports easyjson.Unmarshaler interface
func (v *SetDeviceOrientationOverrideParams) UnmarshalEasyJSON(l *jlexer.Lexer) {
easyjsonC5a4559bDecodeGithubComKnqChromedpCdpDeviceorientation(l, v) |
<|file_name|>unittest_geocode.py<|end_file_name|><|fim▁begin|>from testscenarios import TestWithScenarios
import unittest
from geocode.geocode import GeoCodeAccessAPI
class GeoCodeTests(TestWithScenarios, unittest.TestCase):
scenarios = [
(
"Scenario - 1: Get latlng from address",
{
'address': "Sydney NSW",
'latlng': (-33.8674869, 151.2069902),
'method': "geocode",
}
),
(
"Scenario - 2: Get address from latlng",
{
'address': "Sydney NSW",
'latlng': (-33.8674869, 151.2069902),
'method': "address",
}
),
]
def setUp(self):
self.api = GeoCodeAccessAPI()
def test_geocode(self):
if self.method == 'geocode':
expected_address = self.address
expected_lat = self.latlng[0]
expected_lng = self.latlng[1]
geocode = self.api.get_geocode(expected_address)
self.assertAlmostEqual(geocode.lat, expected_lat, delta=5)
self.assertAlmostEqual(geocode.lng, expected_lng, delta=5)
self.assertIn(expected_address, geocode.address)
else:
expected_address = self.address
expected_lat = self.latlng[0]
expected_lng = self.latlng[1]
address = self.api.get_address(lat=expected_lat, lng=expected_lng)
self.assertIn(expected_address, address)
def tearDown(self):<|fim▁hole|>
if __name__ == "__main__":
unittest.main()<|fim▁end|> | pass |
<|file_name|>test_randomcolor_visual.py<|end_file_name|><|fim▁begin|>import randomcolor
import random
def main():
hues = ['red', 'orange', 'yellow', 'green', 'blue', 'purple', 'pink',
'monochrome', 'random']
luminosities = ['bright', 'light', 'dark', 'random']
formats = ['rgb', 'hex']
colors = []
rand_color = randomcolor.RandomColor(42)
rand = random.Random(42)
rand_int = lambda: rand.randint(4, 10)
colors.append(('one random color', rand_color.generate()))
i = rand_int()
colors.append((
"%d random colors" % i,
rand_color.generate(count=i)
))
# test all hues
for hue in hues:
i = rand_int()
colors.append((
"%d random colors with %s hue" % (i, hue),
rand_color.generate(hue=hue, count=i)
))
# test all luminosities
for luminosity in luminosities:
i = rand_int()
colors.append((
"%d random colors with %s luminosity" % (i, luminosity),
rand_color.generate(luminosity=luminosity, count=i)
))
# test random combinations
for _ in range(50):
i = rand_int()
hue = random.choice(hues)
luminosity = random.choice(luminosities)
format_ = random.choice(formats)
colors.append((
"%d random colors with %s hue, %s luminosity, and %s format"
% (i, hue, luminosity, format_),
rand_color.generate(hue=hue, luminosity=luminosity,
format_=format_, count=i)
))
color_rows = colors_to_rows(colors)
html = generate_html(color_rows)
with open('randomcolors.html', 'w') as f:
f.write(html)
def colors_to_rows(colors):
s = ""
for color_name, colors in colors:
s += "<tr>"
s += "<td>%s</td>" % (color_name)
s += "<td>"
for color in colors:
s += "<div class='color' style='background-color:%s'></div>" % color
s += "</td>"
s += "</tr>"
return s
def generate_html(table_rows):
return """
<!DOCTYPE html>
<html lang="en">
<head>
<title>randomcolor test</title>
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css" rel="stylesheet">
<style>
.color {
height: 30px;
width: 30px;
border-radius: 30px;
display: inline-block;
}
</style>
</head><|fim▁hole|> <body>
<div class="container">
<div class="row col-md-10 col-md-offset-1">
<h1>Random Color Test</h1>
<table class="table">
%s
</table>
</div>
</body>
</html>
""" % table_rows
if __name__ == "__main__":
main()<|fim▁end|> | |
<|file_name|>Trip.java<|end_file_name|><|fim▁begin|>package foodtruck.linxup;
import java.util.List;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.joda.time.DateTime;
import foodtruck.model.Location;
/**
* @author aviolette
* @since 11/1/16
*/
public class Trip {
private Location start;
private Location end;<|fim▁hole|> private Trip(Builder builder) {
this.start = builder.start;
this.end = builder.end;
this.startTime = builder.startTime;
this.endTime = builder.endTime;
this.positions = ImmutableList.copyOf(builder.positions);
}
public static Builder builder() {
return new Builder();
}
public static Builder builder(Trip instance) {
return new Builder(instance);
}
public String getName() {
return start.getShortenedName() + " to " + end.getShortenedName();
}
public Location getStart() {
return start;
}
public Location getEnd() {
return end;
}
public DateTime getStartTime() {
return startTime;
}
public DateTime getEndTime() {
return endTime;
}
public List<Position> getPositions() {
return positions;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
// .add("start", start)
// .add("end", end)
.add("startTime", startTime)
.add("endTime", endTime)
.toString();
}
public static class Builder {
private Location start;
private Location end;
private DateTime startTime;
private DateTime endTime;
private List<Position> positions = Lists.newLinkedList();
public Builder() {
}
public Builder(Trip instance) {
this.start = instance.start;
this.end = instance.end;
this.startTime = instance.startTime;
this.endTime = instance.endTime;
this.positions = instance.positions;
}
public Builder start(Location start) {
this.start = start;
return this;
}
public Builder end(Location end) {
this.end = end;
return this;
}
public Builder startTime(DateTime startTime) {
this.startTime = startTime;
return this;
}
public Builder endTime(DateTime endTime) {
this.endTime = endTime;
return this;
}
public Trip build() {
return new Trip(this);
}
public DateTime getStartTime() {
return startTime;
}
public DateTime getEndTime() {
return endTime;
}
public Builder addPosition(Position position) {
positions.add(position);
return this;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("start", start.getShortenedName())
.add("end", end.getShortenedName())
// .add("start", start)
// .add("end", end)
.add("startTime", startTime)
.add("endTime", endTime)
.toString();
}
}
}<|fim▁end|> | private DateTime startTime;
private DateTime endTime;
private List<Position> positions;
|
<|file_name|>test_migrations.py<|end_file_name|><|fim▁begin|># Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.api.compute import base
from tempest.common import waiters
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions
CONF = config.CONF
class MigrationsAdminTest(base.BaseV2ComputeAdminTest):
"""Test migration operations supported by admin user"""
@classmethod
def setup_clients(cls):
super(MigrationsAdminTest, cls).setup_clients()
cls.client = cls.os_admin.migrations_client
@decorators.idempotent_id('75c0b83d-72a0-4cf8-a153-631e83e7d53f')
def test_list_migrations(self):
"""Test admin user can get the migrations list"""
self.client.list_migrations()
@decorators.idempotent_id('1b512062-8093-438e-b47a-37d2f597cd64')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_list_migrations_in_flavor_resize_situation(self):
"""Admin can get the migrations list containing the resized server"""
server = self.create_test_server(wait_until="ACTIVE")
server_id = server['id']
self.resize_server(server_id, self.flavor_ref_alt)
body = self.client.list_migrations()['migrations']
instance_uuids = [x['instance_uuid'] for x in body]
self.assertIn(server_id, instance_uuids)
def _flavor_clean_up(self, flavor_id):
try:
self.admin_flavors_client.delete_flavor(flavor_id)
self.admin_flavors_client.wait_for_resource_deletion(flavor_id)
except exceptions.NotFound:
pass
@decorators.idempotent_id('33f1fec3-ba18-4470-8e4e-1d888e7c3593')
@testtools.skipUnless(CONF.compute_feature_enabled.resize,
'Resize not available.')
def test_resize_server_revert_deleted_flavor(self):
"""Test reverting resized server with original flavor deleted
Tests that we can revert the resize on an instance whose original
flavor has been deleted.
"""
# First we have to create a flavor that we can delete so make a copy
# of the normal flavor from which we'd create a server.
flavor = self.admin_flavors_client.show_flavor(
self.flavor_ref)['flavor']
flavor = self.admin_flavors_client.create_flavor(
name=data_utils.rand_name('test_resize_flavor_'),
ram=flavor['ram'],
disk=flavor['disk'],
vcpus=flavor['vcpus']
)['flavor']
self.addCleanup(self._flavor_clean_up, flavor['id'])
# Set extra specs same as self.flavor_ref for the created flavor,
# because the environment may need some special extra specs to
# create server which should have been contained in
# self.flavor_ref.
extra_spec_keys = self.admin_flavors_client.list_flavor_extra_specs(
self.flavor_ref)['extra_specs']
if extra_spec_keys:
self.admin_flavors_client.set_flavor_extra_spec(
flavor['id'], **extra_spec_keys)
# Now boot a server with the copied flavor.
server = self.create_test_server(
wait_until='ACTIVE', flavor=flavor['id'])
server = self.servers_client.show_server(server['id'])['server']
# If 'id' not in server['flavor'], we can only compare the flavor
# details, so here we should save the to-be-deleted flavor's details,
# for the flavor comparison after the server resizing.
if not server['flavor'].get('id'):
pre_flavor = {}
body = self.flavors_client.show_flavor(flavor['id'])['flavor']
for key in ['name', 'ram', 'vcpus', 'disk']:
pre_flavor[key] = body[key]
# Delete the flavor we used to boot the instance.
self._flavor_clean_up(flavor['id'])
# Now resize the server and wait for it to go into verify state.
self.servers_client.resize_server(server['id'], self.flavor_ref_alt)
waiters.wait_for_server_status(self.servers_client, server['id'],
'VERIFY_RESIZE')
# Now revert the resize, it should be OK even though the original
# flavor used to boot the server was deleted.
self.servers_client.revert_resize_server(server['id'])
waiters.wait_for_server_status(self.servers_client, server['id'],
'ACTIVE')
server = self.servers_client.show_server(server['id'])['server']
if server['flavor'].get('id'):
msg = ('server flavor is not same as flavor!')
self.assertEqual(flavor['id'], server['flavor']['id'], msg)
else:
self.assertEqual(pre_flavor['name'],
server['flavor']['original_name'],
"original_name in server flavor is not same as "
"flavor name!")
for key in ['ram', 'vcpus', 'disk']:
msg = ('attribute %s in server flavor is not same as '
'flavor!' % key)
self.assertEqual(pre_flavor[key], server['flavor'][key], msg)
def _test_cold_migrate_server(self, revert=False):
if CONF.compute.min_compute_nodes < 2:
msg = "Less than 2 compute nodes, skipping multinode tests."<|fim▁hole|>
self.admin_servers_client.migrate_server(server['id'])
waiters.wait_for_server_status(self.servers_client,
server['id'], 'VERIFY_RESIZE')
if revert:
self.servers_client.revert_resize_server(server['id'])
assert_func = self.assertEqual
else:
self.servers_client.confirm_resize_server(server['id'])
assert_func = self.assertNotEqual
waiters.wait_for_server_status(self.servers_client,
server['id'], 'ACTIVE')
dst_host = self.get_host_for_server(server['id'])
assert_func(src_host, dst_host)
@decorators.idempotent_id('4bf0be52-3b6f-4746-9a27-3143636fe30d')
@testtools.skipUnless(CONF.compute_feature_enabled.cold_migration,
'Cold migration not available.')
def test_cold_migration(self):
"""Test cold migrating server and then confirm the migration"""
self._test_cold_migrate_server(revert=False)
@decorators.idempotent_id('caa1aa8b-f4ef-4374-be0d-95f001c2ac2d')
@testtools.skipUnless(CONF.compute_feature_enabled.cold_migration,
'Cold migration not available.')
def test_revert_cold_migration(self):
"""Test cold migrating server and then revert the migration"""
self._test_cold_migrate_server(revert=True)<|fim▁end|> | raise self.skipException(msg)
server = self.create_test_server(wait_until="ACTIVE")
src_host = self.get_host_for_server(server['id']) |
<|file_name|>script_wp_before_3.5.js<|end_file_name|><|fim▁begin|>(function($) {
$(document).ready( function() {
if ( cntctfrm_ajax.cntctfrm_delete_multi_link ) {
$( '#cntctfrm_show_multi_notice' ).removeAttr('href title').css('cursor', 'pointer');
};
$( '#cntctfrm_change_label' ).change( function() {
if ( $( this ).is( ':checked' ) ) {
$( '.cntctfrm_change_label_block' ).show();
} else {
$( '.cntctfrm_change_label_block' ).hide();
}
});
$( '#cntctfrm_display_add_info' ).change( function() {
if ( $( this ).is( ':checked' ) ) {
$( '.cntctfrm_display_add_info_block' ).show();
} else {
$( '.cntctfrm_display_add_info_block' ).hide();
}
});
$( '#cntctfrm_add_language_button' ).click( function() {
$.ajax({
url: '../wp-admin/admin-ajax.php',/* update_url, */
type: "POST",
data: "action=cntctfrm_add_language&lang=" + $( '#cntctfrm_languages' ).val() + '&cntctfrm_ajax_nonce_field=' + cntctfrm_ajax.cntctfrm_nonce,
success: function( result ) {
var text = $.parseJSON( result );
var lang_val = $( '#cntctfrm_languages' ).val();
$( '.cntctfrm_change_label_block .cntctfrm_language_tab, .cntctfrm_action_after_send_block .cntctfrm_language_tab' ).each( function() {
$( this ).addClass( 'hidden' );
});
$( '.cntctfrm_change_label_block .cntctfrm_language_tab' ).first().clone().appendTo( '.cntctfrm_change_label_block' ).removeClass( 'hidden' ).removeClass( 'cntctfrm_tab_en' ).addClass( 'cntctfrm_tab_' + lang_val );
$( '.cntctfrm_action_after_send_block .cntctfrm_language_tab' ).first().clone().insertBefore( '#cntctfrm_before' ).removeClass( 'hidden' ).removeClass( 'cntctfrm_tab_en' ).addClass( 'cntctfrm_tab_' + lang_val );
$( '.cntctfrm_change_label_block .cntctfrm_language_tab' ).last().find( 'input' ).each( function() {
$( this ).val( '' );
$( this ).attr( 'name', $( this ).attr( 'name' ).replace( '[en]', '[' + lang_val + ']' ) );
});
$( '.cntctfrm_change_label_block .cntctfrm_language_tab' ).last().find( '.cntctfrm_info' ).last().html( text );
$( '.cntctfrm_action_after_send_block .cntctfrm_language_tab' ).last().find( 'input' ).val( '' ).attr( 'name', $( '.cntctfrm_action_after_send_block .cntctfrm_language_tab' ).last().find( 'input' ).attr( 'name' ).replace( '[en]', '[' + lang_val + ']' ) );
$( '.cntctfrm_action_after_send_block .cntctfrm_language_tab' ).last().find( '.cntctfrm_info' ).last().html( text );
$( '.cntctfrm_change_label_block .cntctfrm_label_language_tab, .cntctfrm_action_after_send_block .cntctfrm_label_language_tab' ).each( function() {
$( this ).removeClass( 'cntctfrm_active' );
});
$( '.cntctfrm_change_label_block .clear' ).prev().clone().attr( 'id', 'cntctfrm_label_' + lang_val ).addClass( 'cntctfrm_active' ).html( $( '#cntctfrm_languages option:selected' ).text() + ' <span class="cntctfrm_delete" rel="' + lang_val + '">X</span>').insertBefore( '.cntctfrm_change_label_block .clear' );
$( '.cntctfrm_action_after_send_block .clear' ).prev().clone().attr( 'id', 'cntctfrm_text_' + lang_val ).addClass( 'cntctfrm_active' ).html( $( '#cntctfrm_languages option:selected' ).text() + ' <span class="cntctfrm_delete" rel="' + lang_val + '">X</span>').insertBefore( '.cntctfrm_action_after_send_block .clear' );
$( '#cntctfrm_languages option:selected' ).remove();
},
error: function( request, status, error ) {
alert( error + request.status );
}
});
});
$( '.cntctfrm_change_label_block .cntctfrm_label_language_tab' ).live( 'click', function(){
$( '.cntctfrm_label_language_tab' ).each( function() {
$( this ).removeClass( 'cntctfrm_active' );
});
var index = $( '.cntctfrm_change_label_block .cntctfrm_label_language_tab' ).index( $( this ) );
$( this ).addClass( 'cntctfrm_active' );
var blocks = $( '.cntctfrm_action_after_send_block .cntctfrm_label_language_tab' );
$( blocks[ index ] ).addClass( 'cntctfrm_active' );
$( '.cntctfrm_language_tab' ).each( function() {
$( this ).addClass( 'hidden' );
});
$( '.' + this.id.replace( 'label', 'tab' ) ).removeClass( 'hidden' );
});
$( '.cntctfrm_action_after_send_block .cntctfrm_label_language_tab' ).live( 'click', function() {
$( '.cntctfrm_label_language_tab' ).each( function() {
$( this ).removeClass( 'cntctfrm_active' );
});
var index = $( '.cntctfrm_action_after_send_block .cntctfrm_label_language_tab' ).index( $( this ) );
$( this ).addClass( 'cntctfrm_active' );
var blocks = $( '.cntctfrm_change_label_block .cntctfrm_label_language_tab' );
$( blocks[ index ] ).addClass( 'cntctfrm_active' );
$( '.cntctfrm_language_tab' ).each( function() {
$( this ).addClass( 'hidden' );
});
console.log( this.id.replace( 'text', 'tab' ), index );
$( '.' + this.id.replace( 'text', 'tab' ) ).removeClass( 'hidden' );
});
$( '.cntctfrm_delete' ).live( 'click', function( event ) {
event.stopPropagation();
if ( confirm( cntctfrm_ajax.cntctfrm_confirm_text ) ) {
var lang = $( this ).attr('rel');
$.ajax({
url: '../wp-admin/admin-ajax.php',/* update_url, */
type: "POST",
data: "action=cntctfrm_remove_language&lang=" + lang + '&cntctfrm_ajax_nonce_field=' + cntctfrm_ajax.cntctfrm_nonce,
success: function(result) {
$( '#cntctfrm_label_' + lang + ', #cntctfrm_text_' + lang + ', .cntctfrm_tab_' + lang ).each( function() {
$( this ).remove();
});
$( '.cntctfrm_change_label_block .cntctfrm_label_language_tab' ).removeClass( 'cntctfrm_active' ).first().addClass( 'cntctfrm_active' );
$( '.cntctfrm_action_after_send_block .cntctfrm_label_language_tab' ).removeClass( 'cntctfrm_active' ).first().addClass( 'cntctfrm_active' );
$( '.cntctfrm_change_label_block .cntctfrm_language_tab' ).addClass( 'hidden' ).first().removeClass( 'hidden' );
$( '.cntctfrm_action_after_send_block .cntctfrm_language_tab' ).addClass( 'hidden' ).first().removeClass( 'hidden' );
},
error: function( request, status, error ) {
alert( error + request.status );
}
});
}
});
$( '.cntctfrm_language_tab_block' ).css( 'display', 'none' );
$( '.cntctfrm_language_tab_block_mini' ).css( 'display', 'block' );
$( '.cntctfrm_language_tab_block_mini' ).live( 'click touchstart', function() {<|fim▁hole|> } else {
$( '.cntctfrm_language_tab_block' ).css( 'display', 'none' );
$( '.cntctfrm_language_tab_block_mini' ).css( 'background-position', '' );
}
});
/* add notice about changing in the settings page */
$( '#cntctfrm_settings_form input' ).bind( "change click select", function() {
if ( $( this ).attr( 'type' ) != 'submit' ) {
$( '.updated.fade' ).css( 'display', 'none' );
$( '#cntctfrm_settings_notice' ).css( 'display', 'block' );
};
});
$( 'select[name="cntctfrm_user_email"]').focus( function() {
$('#cntctfrm_select_email_user').attr( 'checked', 'checked' );
$( '.updated.fade' ).css( 'display', 'none' );
$( '#cntctfrm_settings_notice' ).css( 'display', 'block' );
});
});
})(jQuery);<|fim▁end|> | if ( $( '.cntctfrm_language_tab_block' ).css( 'display' ) == 'none' ) {
$( '.cntctfrm_language_tab_block ').css( 'display', 'block' );
$( '.cntctfrm_language_tab_block_mini' ).css( 'background-position', '1px -3px' ); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The init2winit Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.<|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.<|fim▁end|> | |
<|file_name|>F57.py<|end_file_name|><|fim▁begin|>import fechbase
class Records(fechbase.RecordsBase):
def __init__(self):
fechbase.RecordsBase.__init__(self)
self.fields = [
{'name': 'FORM TYPE', 'number': '1'},
{'name': 'FILER FEC CMTE ID', 'number': '2'},
{'name': 'ENTITY TYPE', 'number': '3'},
{'name': 'NAME (Payee)', 'number': '4'},
{'name': 'STREET 1', 'number': '5'},
{'name': 'STREET 2', 'number': '6'},
{'name': 'CITY', 'number': '7'},
{'name': 'STATE', 'number': '8'},
{'name': 'ZIP', 'number': '9'},
{'name': 'TRANSDESC', 'number': '10'},
{'name': 'Of Expenditure', 'number': '11-'},
{'name': 'AMOUNT', 'number': '12'},
{'name': 'SUPPORT/OPPOSE', 'number': '13'},
{'name': 'S/O FEC CAN ID NUMBER', 'number': '14'},
{'name': 'S/O CAN/NAME', 'number': '15'},
{'name': 'S/O CAN/OFFICE', 'number': '16'},
{'name': 'S/O CAN/STATE', 'number': '17'},
{'name': 'S/O CAN/DIST', 'number': '18'},
{'name': 'FEC COMMITTEE ID NUMBER', 'number': '19'},
{'name': 'Unused field', 'number': '20'},
{'name': 'Unused field', 'number': '21'},
{'name': 'Unused field', 'number': '22'},
{'name': 'Unused field', 'number': '23'},
{'name': 'Unused field', 'number': '24'},
{'name': 'CONDUIT NAME', 'number': '25'},
{'name': 'CONDUIT STREET 1', 'number': '26'},
{'name': 'CONDUIT STREET 2', 'number': '27'},
{'name': 'CONDUIT CITY', 'number': '28'},
{'name': 'CONDUIT STATE', 'number': '29'},
{'name': 'CONDUIT ZIP', 'number': '30'},
{'name': 'AMENDED CD', 'number': '31'},
{'name': 'TRAN ID', 'number': '32'},
]<|fim▁hole|> self.fields_names = self.hash_names(self.fields)<|fim▁end|> | |
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|># coding: utf8
from functools import wraps
from logging import getLogger
logger = getLogger(__name__)
__author__ = 'marcos.costa'
class request_logger(object):
def __init__(self, method=None):
self.method = method
<|fim▁hole|> def __call__(self, func):
method = self.method
if method is None:
method = func.func_name
@wraps(func)
def wrapper(instance, request, *args, **kwargs):
response = func(instance, request, *args, **kwargs)
msg = ("\nCalled method: {method}\nrequest: {request}"
"\nresponse: {response}").format(method=method,
request=request,
response=response)
logger.info(msg)
return response
return wrapper<|fim▁end|> | |
<|file_name|>LineString.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
*
* GEOS - Geometry Engine Open Source
* http://geos.osgeo.org
*
* Copyright (C) 2011 Sandro Santilli <[email protected]>
* Copyright (C) 2005-2006 Refractions Research Inc.
* Copyright (C) 2001-2002 Vivid Solutions Inc.
*
* This is free software; you can redistribute and/or modify it under
* the terms of the GNU Lesser General Public Licence as published
* by the Free Software Foundation.
* See the COPYING file for more information.
*
**********************************************************************
*
* Last port: geom/LineString.java r320 (JTS-1.12)
*
**********************************************************************/
#include <geos/util/IllegalArgumentException.h>
#include <geos/algorithm/CGAlgorithms.h>
#include <geos/geom/Coordinate.h>
#include <geos/geom/CoordinateSequenceFactory.h>
#include <geos/geom/CoordinateSequence.h>
#include <geos/geom/CoordinateSequenceFilter.h>
#include <geos/geom/CoordinateFilter.h>
#include <geos/geom/Dimension.h>
#include <geos/geom/GeometryFilter.h>
#include <geos/geom/GeometryComponentFilter.h>
#include <geos/geom/GeometryFactory.h>
#include <geos/geom/LineString.h>
#include <geos/geom/Point.h>
#include <geos/geom/MultiPoint.h> // for getBoundary
#include <geos/geom/Envelope.h>
#include <algorithm>
#include <typeinfo>
#include <memory>
#include <cassert>
using namespace std;
using namespace geos::algorithm;
namespace geos {
namespace geom { // geos::geom
/*protected*/
LineString::LineString(const LineString &ls)
:
Geometry(ls),
points(ls.points->clone())
{
//points=ls.points->clone();
}
Geometry*
LineString::reverse() const
{
assert(points.get());
CoordinateSequence* seq = points->clone();
CoordinateSequence::reverse(seq);
assert(getFactory());
return getFactory()->createLineString(seq);
}
/*private*/
void
LineString::validateConstruction()
{
if (points.get()==NULL)
{
points.reset(getFactory()->getCoordinateSequenceFactory()->create());
return;
}
if (points->size()==1)
{
throw util::IllegalArgumentException("point array must contain 0 or >1 elements\n");
}
}
/*protected*/
LineString::LineString(CoordinateSequence *newCoords,
const GeometryFactory *factory)
:
Geometry(factory),
points(newCoords)
{
validateConstruction();
}
/*public*/
LineString::LineString(CoordinateSequence::AutoPtr newCoords,
const GeometryFactory *factory)
:
Geometry(factory),
points(newCoords)
{
validateConstruction();
}
LineString::~LineString()
{
//delete points;
}
CoordinateSequence*
LineString::getCoordinates() const
{
assert(points.get());
return points->clone();<|fim▁hole|> //return points;
}
const CoordinateSequence*
LineString::getCoordinatesRO() const
{
assert(0 != points.get());
return points.get();
}
const Coordinate&
LineString::getCoordinateN(int n) const
{
assert(points.get());
return points->getAt(n);
}
Dimension::DimensionType
LineString::getDimension() const
{
return Dimension::L; // line
}
int
LineString::getCoordinateDimension() const
{
return (int) points->getDimension();
}
int
LineString::getBoundaryDimension() const
{
if (isClosed()) {
return Dimension::False;
}
return 0;
}
bool
LineString::isEmpty() const
{
assert(points.get());
return points->isEmpty();
}
size_t
LineString::getNumPoints() const
{
assert(points.get());
return points->getSize();
}
Point*
LineString::getPointN(size_t n) const
{
assert(getFactory());
assert(points.get());
return getFactory()->createPoint(points->getAt(n));
}
Point*
LineString::getStartPoint() const
{
if (isEmpty()) {
return NULL;
//return new Point(NULL,NULL);
}
return getPointN(0);
}
Point*
LineString::getEndPoint() const
{
if (isEmpty()) {
return NULL;
//return new Point(NULL,NULL);
}
return getPointN(getNumPoints() - 1);
}
bool
LineString::isClosed() const
{
if (isEmpty()) {
return false;
}
return getCoordinateN(0).equals2D(getCoordinateN(getNumPoints()-1));
}
bool
LineString::isRing() const
{
return isClosed() && isSimple();
}
string
LineString::getGeometryType() const
{
return "LineString";
}
Geometry*
LineString::getBoundary() const
{
if (isEmpty()) {
return getFactory()->createMultiPoint();
}
// using the default OGC_SFS MOD2 rule, the boundary of a
// closed LineString is empty
if (isClosed()) {
return getFactory()->createMultiPoint();
}
vector<Geometry*> *pts=new vector<Geometry*>();
pts->push_back(getStartPoint());
pts->push_back(getEndPoint());
MultiPoint *mp = getFactory()->createMultiPoint(pts);
return mp;
}
bool
LineString::isCoordinate(Coordinate& pt) const
{
assert(points.get());
std::size_t npts=points->getSize();
for (std::size_t i = 0; i<npts; i++) {
if (points->getAt(i)==pt) {
return true;
}
}
return false;
}
/*protected*/
Envelope::AutoPtr
LineString::computeEnvelopeInternal() const
{
if (isEmpty()) {
// We don't return NULL here
// as it would indicate "unknown"
// envelope. In this case we
// *know* the envelope is EMPTY.
return Envelope::AutoPtr(new Envelope());
}
assert(points.get());
const Coordinate& c=points->getAt(0);
double minx = c.x;
double miny = c.y;
double maxx = c.x;
double maxy = c.y;
std::size_t npts=points->getSize();
for (std::size_t i=1; i<npts; i++) {
const Coordinate &c=points->getAt(i);
minx = minx < c.x ? minx : c.x;
maxx = maxx > c.x ? maxx : c.x;
miny = miny < c.y ? miny : c.y;
maxy = maxy > c.y ? maxy : c.y;
}
// caller expects a newly allocated Envelope.
// this function won't be called twice, unless
// cached Envelope is invalidated (set to NULL)
return Envelope::AutoPtr(new Envelope(minx, maxx, miny, maxy));
}
bool
LineString::equalsExact(const Geometry *other, double tolerance) const
{
if (!isEquivalentClass(other)) {
return false;
}
const LineString *otherLineString=dynamic_cast<const LineString*>(other);
assert(otherLineString);
size_t npts=points->getSize();
if (npts!=otherLineString->points->getSize()) {
return false;
}
for (size_t i=0; i<npts; ++i) {
if (!equal(points->getAt(i),otherLineString->points->getAt(i),tolerance)) {
return false;
}
}
return true;
}
void
LineString::apply_rw(const CoordinateFilter *filter)
{
assert(points.get());
points->apply_rw(filter);
}
void
LineString::apply_ro(CoordinateFilter *filter) const
{
assert(points.get());
points->apply_ro(filter);
}
void LineString::apply_rw(GeometryFilter *filter)
{
assert(filter);
filter->filter_rw(this);
}
void LineString::apply_ro(GeometryFilter *filter) const
{
assert(filter);
filter->filter_ro(this);
}
/*public*/
void
LineString::normalize()
{
assert(points.get());
std::size_t npts=points->getSize();
std::size_t n=npts/2;
for (std::size_t i=0; i<n; i++) {
std::size_t j = npts - 1 - i;
if (!(points->getAt(i)==points->getAt(j))) {
if (points->getAt(i).compareTo(points->getAt(j)) > 0) {
CoordinateSequence::reverse(points.get());
}
return;
}
}
}
int
LineString::compareToSameClass(const Geometry *ls) const
{
const LineString *line=dynamic_cast<const LineString*>(ls);
assert(line);
// MD - optimized implementation
std::size_t mynpts=points->getSize();
std::size_t othnpts=line->points->getSize();
if ( mynpts > othnpts ) return 1;
if ( mynpts < othnpts ) return -1;
for (std::size_t i=0; i<mynpts; i++)
{
int cmp=points->getAt(i).compareTo(line->points->getAt(i));
if (cmp) return cmp;
}
return 0;
}
const Coordinate*
LineString::getCoordinate() const
{
if (isEmpty()) return NULL;
return &(points->getAt(0));
}
double
LineString::getLength() const
{
return CGAlgorithms::length(points.get());
}
void
LineString::apply_rw(GeometryComponentFilter *filter)
{
assert(filter);
filter->filter_rw(this);
}
void
LineString::apply_ro(GeometryComponentFilter *filter) const
{
assert(filter);
filter->filter_ro(this);
}
void
LineString::apply_rw(CoordinateSequenceFilter& filter)
{
size_t npts=points->size();
if (!npts) return;
for (size_t i = 0; i<npts; ++i)
{
filter.filter_rw(*points, i);
if (filter.isDone()) break;
}
if (filter.isGeometryChanged()) geometryChanged();
}
void
LineString::apply_ro(CoordinateSequenceFilter& filter) const
{
size_t npts=points->size();
if (!npts) return;
for (size_t i = 0; i<npts; ++i)
{
filter.filter_ro(*points, i);
if (filter.isDone()) break;
}
//if (filter.isGeometryChanged()) geometryChanged();
}
GeometryTypeId
LineString::getGeometryTypeId() const
{
return GEOS_LINESTRING;
}
} // namespace geos::geom
} // namespace geos<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod message_set;
use std;
use std::fmt;
use linked_hash_map::{Iter, LinkedHashMap};
use uuid::Uuid;
pub mod message;
#[derive(Debug, PartialEq)]
pub struct Message {
properties: Map,
body: Option<Value>,
}
impl Message {
pub fn new() -> MessageBuilder {
MessageBuilder::new()
}
pub fn with_property<K, V>(key: K, value: V) -> MessageBuilder
where
K: Into<String>,
V: Into<Value>,
{
MessageBuilder::new().with_property(key.into(), value.into())
}
pub fn with_body<V>(value: V) -> MessageBuilder
where
V: Into<Value>,
{
MessageBuilder::new().with_body(value.into())
}
pub fn properties(&self) -> &Map {
&self.properties
}
pub fn body(&self) -> Option<&Value> {
match self.body {
Some(ref value) => Some(value),
None => None,
}
}
}
pub struct MessageBuilder {
map: LinkedHashMap<String, Value>,
body: Option<Value>,
}
impl MessageBuilder {
pub fn new() -> MessageBuilder {
MessageBuilder {
map: LinkedHashMap::new(),
body: None,
}
}
pub fn with_property<K, V>(mut self, key: K, value: V) -> MessageBuilder
where
K: Into<String>,
V: Into<Value>,
{
self.map.insert(key.into(), value.into());
self
}
pub fn with_body<V>(mut self, value: V) -> MessageBuilder
where
V: Into<Value>,
{
self.body = Some(value.into());
self
}
pub fn build(self) -> Message {
Message {
properties: Map { map: self.map },
body: self.body,
}
}
}
#[derive(PartialEq, Clone)]
pub struct Map {
map: LinkedHashMap<String, Value>,
}
impl Map {
pub fn new() -> MapBuilder {
MapBuilder {
map: LinkedHashMap::new(),
}
}
pub fn get(&self, key: &str) -> Option<&Value> {
self.map.get(key)
}
pub fn iter(&self) -> Iter<String, Value> {
self.map.iter()
}
pub fn len(&self) -> usize {
self.map.len()
}
}
impl fmt::Debug for Map {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.map.fmt(f)
}
}
pub struct MapBuilder {
map: LinkedHashMap<String, Value>,
}
impl MapBuilder {
pub fn insert<K, V>(mut self, key: K, value: V) -> MapBuilder
where
K: Into<String>,
V: Into<Value>,
{
self.map.insert(key.into(), value.into());
self
}
pub fn build(self) -> Map {
Map { map: self.map }
}
}
#[derive(Clone, PartialEq)]
pub struct List {
list: Vec<Value>,
}
impl List {
pub fn new() -> ListBuilder {
ListBuilder { list: Vec::new() }
}
pub fn iter(&self) -> std::slice::Iter<Value> {
self.list.iter()
}
pub fn len(&self) -> usize {
self.list.len()
}
}
impl std::ops::Index<usize> for List {
type Output = Value;
fn index(&self, index: usize) -> &Self::Output {
&self.list[index]
}
}
impl fmt::Debug for List {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.list.fmt(f)
}
}
pub struct ListBuilder {
list: Vec<Value>,
}
impl ListBuilder {
pub fn append<V>(mut self, value: V) -> ListBuilder
where
V: Into<Value>,
{
self.list.push(value.into());
self
}
pub fn build(self) -> List {
List { list: self.list }
}
}
#[derive(Debug, PartialEq, Clone)]
pub enum Value {
Null,
String(String),
Int64(i64),
Int32(i32),
Float32(f32),
Float64(f64),
Boolean(bool),
Bytes(Vec<u8>),
List(List),
Map(Map),
Uuid(Uuid),
}
impl From<String> for Value {
fn from(value: String) -> Self {
Value::String(value)
}
}
impl<'a> From<&'a str> for Value {
fn from(value: &'a str) -> Self {
Value::String(value.to_string())
}
}
impl From<i64> for Value {
fn from(value: i64) -> Self {
Value::Int64(value)
}
}
impl From<i32> for Value {
fn from(value: i32) -> Self {
Value::Int32(value)
}
}
impl From<f64> for Value {
fn from(value: f64) -> Self {
Value::Float64(value)
}
}
impl From<bool> for Value {
fn from(value: bool) -> Self {
Value::Boolean(value)
}
}
impl From<Vec<u8>> for Value {
fn from(value: Vec<u8>) -> Self {
Value::Bytes(value)
}
}
impl From<List> for Value {
fn from(value: List) -> Self {
Value::List(value)
}
}
impl From<Map> for Value {
fn from(value: Map) -> Self {
Value::Map(value)
}
}
impl From<Uuid> for Value {
fn from(value: Uuid) -> Self {
Value::Uuid(value)
}
}
pub trait MessageVisitor {
type Output;
fn visit_message(&self, value: &Message, buffer: &mut Self::Output);
fn visit_map(&self, value: &Map, buffer: &mut Self::Output);
fn visit_list(&self, value: &List, buffer: &mut Self::Output);
fn visit_value(&self, value: &Value, buffer: &mut Self::Output);
fn visit_bytes(&self, value: &Vec<u8>, buffer: &mut Self::Output);
fn visit_int32(&self, value: i32, buffer: &mut Self::Output);
fn visit_int64(&self, value: i64, buffer: &mut Self::Output);
fn visit_float32(&self, value: f32, buffer: &mut Self::Output);
fn visit_float64(&self, value: f64, buffer: &mut Self::Output);
fn visit_boolean(&self, _value: bool, _buffer: &mut Self::Output);
fn visit_string(&self, _value: &String, _buffer: &mut Self::Output);
fn visit_uuid(&self, value: &Uuid, buffer: &mut Self::Output);
fn visit_null(&self, _buffer: &mut Self::Output);
}
pub struct BinaryFormatSizeCalculator {}
impl MessageVisitor for BinaryFormatSizeCalculator {
type Output = usize;
fn visit_message(&self, message: &Message, buffer: &mut Self::Output) {
*buffer += 4;
for (key, value) in message.properties().iter() {
self.visit_string(key, buffer);
self.visit_value(value, buffer);
}
if let Some(value) = message.body() {
self.visit_value(value, buffer);
}
}
fn visit_map(&self, map: &Map, buffer: &mut Self::Output) {
*buffer += map.len();
for (key, value) in map.iter() {
self.visit_string(key, buffer);
self.visit_value(value, buffer);
}
}
fn visit_list(&self, list: &List, buffer: &mut Self::Output) {
*buffer += list.len();
for value in list.iter() {
self.visit_value(value, buffer);
}
}
fn visit_value(&self, value: &Value, buffer: &mut Self::Output) {
*buffer += 1;
match value {
&Value::Null => self.visit_null(buffer),
&Value::String(ref value) => {
self.visit_string(value, buffer);
}
&Value::Int32(value) => {
self.visit_int32(value, buffer);
}
&Value::Int64(value) => {
self.visit_int64(value, buffer);
}
&Value::Float32(value) => {
self.visit_float32(value, buffer);
}
&Value::Float64(value) => {
self.visit_float64(value, buffer);
}
&Value::Boolean(value) => {
self.visit_boolean(value, buffer);
}
&Value::Bytes(ref value) => {
self.visit_bytes(value, buffer);
}
&Value::Map(ref value) => {
self.visit_map(value, buffer);
}
&Value::List(ref value) => {
self.visit_list(value, buffer);
}
&Value::Uuid(ref value) => {
self.visit_uuid(value, buffer);
}
}
}
fn visit_bytes(&self, value: &Vec<u8>, buffer: &mut Self::Output) {
*buffer += 4 + value.len()
}
fn visit_int32(&self, _value: i32, buffer: &mut Self::Output) {
*buffer += 4;
}
fn visit_int64(&self, _value: i64, buffer: &mut Self::Output) {
*buffer += 8;
}
fn visit_float32(&self, _value: f32, buffer: &mut Self::Output) {
*buffer += 4;
}
fn visit_float64(&self, _value: f64, buffer: &mut Self::Output) {
*buffer += 8;
}
fn visit_boolean(&self, _value: bool, buffer: &mut Self::Output) {
*buffer += 1;
}
fn visit_string(&self, value: &String, buffer: &mut Self::Output) {
*buffer += 4 + value.len()
}
fn visit_uuid(&self, _value: &Uuid, buffer: &mut Self::Output) {
*buffer += 16
}
fn visit_null(&self, _buffer: &mut Self::Output) {}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn it_works() {
let message = Message::new()
.with_body("Hello")
.with_property(
"vehicles",
List::new().append("Aprilia").append("Infiniti").build(),
)
.with_property(
"address",
Map::new()
.insert("street", "400 Beale ST")
.insert("city", "San Francisco")
.insert("state", "CA")
.insert("zip", "94105")
.build(),
)
.build();
println!("message = {:?}", message);
let message = Message::new()
.with_body("Wicked!!")
.with_property("Hello", "World!")
.with_property("age", 42)
.with_property("weight", 175.5)
.with_property("address", "400 Beale ST APT 1403")
.with_property("city", "San Francisco")
.with_property("state", "CA")
.with_property("zip", "94105")
.with_property("married", false)
.build();
println!("message = {:?}", &message);
}
#[test]
fn get_property_value() {
let m = Message::with_property("msg", "World!").build();
assert_eq!(m.properties().get("msg"), Some(&Value::from("World!")));
assert_eq!(m.properties().get("missing"), None);
if let Some(&Value::String(ref value)) = m.properties().get("msg") {
println!("value = {:?}", value);
}
assert_eq!(m.body(), None);
}
#[test]
fn map_as_body() {
let m = Message::with_body(
Map::new()
.insert("fname", "Jimmie")
.insert("lname", "Fulton")
.build(),
).build();
println!("message = {:?}", &m);
match m.body() {
Some(&Value::Map(ref map)) => {
assert_eq!(map.get("fname"), Some(&Value::from("Jimmie")));
assert_eq!(map.get("lname"), Some(&Value::from("Fulton")));
}
_ => panic!("Map expected!"),
}
}
#[test]
fn list_index() {
let l = List::new()
.append("one")
.append("two")
.append("three")
.build();
assert_eq!(l[0], Value::from("one"));
}
#[test]
fn map_iterator() {
let map = Map::new()
.insert("key1", "value1")
.insert("key2", "value2")
.build();
let mut counter = 0;
for (_key, _value) in map.iter() {
counter += 1;
}
assert_eq!(counter, 2);
eprintln!("message = {:?}", map);
}
#[test]
pub fn examples() {}
#[test]
fn binary_size_calulator() {
let calculator = BinaryFormatSizeCalculator {};
let message = Message::with_body("Hello").build();
let mut size = 0;<|fim▁hole|> }
#[test]
fn binary_size_calcuator_2() {
let calculator = BinaryFormatSizeCalculator {};
let message = example();
let mut size = 0;
calculator.visit_message(&message, &mut size);
eprintln!("size = {:?}", size);
}
fn example() -> Message {
Message::new()
.with_property("fname", "Jimmie")
.with_property("lname", "Fulton")
.with_property("age", 42)
.with_property("temp", 98.6)
.with_property(
"vehicles",
List::new().append("Aprilia").append("Infiniti").build(),
)
.with_property(
"siblings",
Map::new()
.insert("brothers", List::new().append("Jason").build())
.insert(
"sisters",
List::new().append("Laura").append("Sariah").build(),
)
.build(),
)
.build()
}
}<|fim▁end|> | calculator.visit_message(&message, &mut size);
assert_eq!(size, 14); |
<|file_name|>List Comprehensions.py<|end_file_name|><|fim▁begin|>if __name__ == '__main__':
x = int(raw_input())
y = int(raw_input())
z = int(raw_input())<|fim▁hole|> n = int(raw_input())
print ( [ [i,j,k] for i in range(x+1) for j in range(y+1) for k in range(z+1) if i+j+k != n] )<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Initialization of All Modules of UnivMathSys
# Copyright (C) 2016 Zhang Chang-kai #
# Contact via: [email protected] #<|fim▁hole|>from Foundation import *
from Elementary import *
from Structure import *
# End of Initialization of All Modules<|fim▁end|> | # General Public License version 3.0 #
'''Initialization of All Modules'''
|
<|file_name|>input.py<|end_file_name|><|fim▁begin|>import sys
def inputText():
input = sys.stdin.readline()
return input.strip()
def inputChoices(list, backcmd = "b", backtext = "back"):
repeat = True
while repeat:
repeat = False<|fim▁hole|> print count, "-", item
count += 1
print backcmd, "-", backtext
input = inputText()
if input == backcmd:
return None
action = int(input)
if action >= len(list):
repeat = True
return action<|fim▁end|> | count = 0
for item in list: |
<|file_name|>test_handlers.py<|end_file_name|><|fim▁begin|># Copyright 2009-2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
from lp.services.mail.handlers import MailHandlers
from lp.testing import TestCase
class TestMailHandlers(TestCase):
"""Tests for the `MailHandlers` class."""
def test_get(self):
# MailHandlers.get() should return the registered handler for the
# given domain.
handlers = MailHandlers()
self.assertIsNot(None, handlers.get("bugs.launchpad.net"))
self.assertIs(None, handlers.get("no.such.domain"))
<|fim▁hole|> handlers.add("some.domain", handler)
self.assertIs(handler, handlers.get("some.domain"))
self.assertIs(handler, handlers.get("SOME.DOMAIN"))
self.assertIs(handler, handlers.get("Some.Domain"))
def test_add_for_new_domain(self):
# MailHandlers.add() registers a handler for the given domain.
handlers = MailHandlers()
self.assertIs(None, handlers.get("some.domain"))
handler = object()
handlers.add("some.domain", handler)
self.assertIs(handler, handlers.get("some.domain"))
def test_add_for_existing_domain(self):
# When adding a new handler for an already congfigured domain, the
# existing handler is overwritten.
handlers = MailHandlers()
handler1 = object()
handlers.add("some.domain", handler1)
handler2 = object()
handlers.add("some.domain", handler2)
self.assertIs(handler2, handlers.get("some.domain"))<|fim▁end|> | def test_get_is_case_insensitive(self):
# The domain passed to get() is treated case-insentitively.
handlers = MailHandlers()
handler = object() |
<|file_name|>install_local_soup.py<|end_file_name|><|fim▁begin|>import subprocess
# Hey this is embarrassing I'll remove it soon I promise.<|fim▁hole|>
def apply(params, state):
subprocess.check_output(["cp",
state['sqlite_file'],
"/srv/git/datacommons_manitoba/production.sqlite3"])
ok = False
for i in range(0, 4):
try:
subprocess.check_output(["/srv/git/datacommons_manitoba/rebuild.sh"])
ok = True
break
except subprocess.CalledProcessError:
pass
if not ok:
raise subprocess.CalledProcessError("rebuild sadness")
return True<|fim▁end|> | # I mean, maybe. Or I'll leave it malingering for years.
|
<|file_name|>entry_watcher_service_factory.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/extensions/api/file_system/entry_watcher_service_factory.h"
#include "chrome/browser/extensions/api/file_system/entry_watcher_service.h"
#include "chrome/browser/profiles/profile.h"
#include "components/keyed_service/content/browser_context_dependency_manager.h"
#include "content/public/browser/browser_context.h"
namespace extensions {
EntryWatcherServiceFactory* EntryWatcherServiceFactory::GetInstance() {
return Singleton<EntryWatcherServiceFactory>::get();
}
EntryWatcherServiceFactory::EntryWatcherServiceFactory()
: BrowserContextKeyedServiceFactory(
"EntryWatcherService",
BrowserContextDependencyManager::GetInstance()) {
}
EntryWatcherServiceFactory::~EntryWatcherServiceFactory() {
}
KeyedService* EntryWatcherServiceFactory::BuildServiceInstanceFor(
content::BrowserContext* context) const {
return new EntryWatcherService(Profile::FromBrowserContext(context));
}
bool EntryWatcherServiceFactory::ServiceIsCreatedWithBrowserContext() const {
// Required to restore persistent watchers as soon as the profile is loaded.
return true;<|fim▁hole|>}
} // namespace extensions<|fim▁end|> | |
<|file_name|>recurrent.py<|end_file_name|><|fim▁begin|># time series prediction of stock data
# using recurrent neural network with LSTM layer
from pybrain.datasets import SequentialDataSet
from itertools import cycle
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure.modules import LSTMLayer
from pybrain.supervised import RPropMinusTrainer
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.tools.customxml.networkreader import NetworkReader
import matplotlib.pyplot as plt
import os.path
import sys
#sys.path.insert(0, '../../smap_nepse')
from smap_nepse.prediction import prepareInput as pi
from sklearn import preprocessing
import numpy as np
__author__ = "Semanta Bhandari"
__copyright__ = ""
__credits__ = ["Sameer Rai","Sumit Shrestha","Sankalpa Timilsina"]
__license__ = ""
__version__ = "0.1"
__email__ = "[email protected]"
<|fim▁hole|> # column name to match with indicator calculating modules
# TODO: resolve issue with column name
df.columns = [
'Transactions',
'Traded_Shares',
'Traded_Amount',
'High',
'Low',
'Close']
data = df.Close.values
# TODO: write min_max normalization
# normalization
# cp = dataframe.pop(' Close Price')
# x = cp.values
temp = np.array(data).reshape(len(data),1)
min_max_scaler = preprocessing.MinMaxScaler()
data = min_max_scaler.fit_transform(temp)
# dataframe[' Close Price'] = x_scaled
# prepate sequential dataset for pyBrain rnn network
ds = SequentialDataSet(1, 1)
for sample, next_sample in zip(data, cycle(data[1:])):
ds.addSample(sample, next_sample)
# build rnn network with LSTM layer
# if saved network is available
if(os.path.isfile('random.xml')):
net = NetworkReader.readFrom('network.xml')
else:
net = buildNetwork(1, 20, 1,
hiddenclass=LSTMLayer, outputbias=False, recurrent=True)
# build trainer
trainer = RPropMinusTrainer(net, dataset=ds, verbose = True)
train_errors = [] # save errors for plotting later
EPOCHS_PER_CYCLE = 5
CYCLES = 5
EPOCHS = EPOCHS_PER_CYCLE * CYCLES
for i in range(CYCLES):
trainer.trainEpochs(EPOCHS_PER_CYCLE)
train_errors.append(trainer.testOnData())
epoch = (i+1) * EPOCHS_PER_CYCLE
print("\r epoch {}/{}".format(epoch, EPOCHS), end="")
sys.stdout.flush()
# save the network
NetworkWriter.writeToFile(net,'network.xml')
print()
print("final error =", train_errors[-1])
predicted = []
for dat in data:
predicted.append(net.activate(dat)[0])
# data = min_max_scaler.inverse_transform(data)
# predicted = min_max_scaler.inverse_transform(predicted)
predicted_array = min_max_scaler.inverse_transform(np.array(predicted).reshape(-1,1))
print(predicted_array[-1])
plt.figure()
legend_actual, = plt.plot(range(0, len(data)),temp, label = 'actual', linestyle = '--', linewidth = 2, c = 'blue')
legend_predicted, = plt.plot(range(0, len(data)), predicted_array, label = 'predicted', linewidth = 1.5, c='red')
plt.legend(handles=[legend_actual, legend_predicted])
plt.savefig('error.png')
plt.show()
# plt.plot(range(0,len(train_errors)),train_errors)
# plt.xlabel('epoch')
# plt.ylabel('error')
# plt.show()
# for sample, target in ds.getSequenceIterator(0):
# print(" sample = %4.2f" % sample)
# print("predicted next sample = %4.2f" % net.activate(sample))
# print(" actual next sample = %4.2f" % target)
# print()
rnn()<|fim▁end|> | def rnn():
# load dataframe from csv file
df = pi.load_data_frame('../../data/NABIL.csv') |
<|file_name|>DomapInfo_dlg.cpp<|end_file_name|><|fim▁begin|>// WinProm Copyright 2015 Edward Earl
// All rights reserved.
//
// This software is distributed under a license that is described in
// the LICENSE file that accompanies it.
//
// DomapInfo_dlg.cpp : implementation file
//
#include "stdafx.h"
#include "winprom.h"
#include "DomapInfo_dlg.h"
#ifdef _DEBUG
#define new DEBUG_NEW
#undef THIS_FILE
static char THIS_FILE[] = __FILE__;
#endif
/////////////////////////////////////////////////////////////////////////////
// CDomapInfo_dlg dialog
CDomapInfo_dlg::CDomapInfo_dlg(const Area_info& a, const Area_info& d,
const CString& n, bool m)
: CMapInfo_dlg(a,d,n,m,CDomapInfo_dlg::IDD)<|fim▁hole|> m_ndom_peaks = 0;
m_ndom_total = 0;
m_ndom_area = 0;
//}}AFX_DATA_INIT
}
void CDomapInfo_dlg::DoDataExchange(CDataExchange* pDX)
{
CMapInfo_dlg::DoDataExchange(pDX);
//{{AFX_DATA_MAP(CDomapInfo_dlg)
DDX_Text(pDX, IDC_NDOM_0PEAK, m_ndom_0peak);
DDX_Text(pDX, IDC_NDOM_1PEAK, m_ndom_1peak);
DDX_Text(pDX, IDC_NDOM_PEAKS, m_ndom_peaks);
DDX_Text(pDX, IDC_NDOM, m_ndom_total);
DDX_Text(pDX, IDC_NDOM_AREA, m_ndom_area);
//}}AFX_DATA_MAP
}
BEGIN_MESSAGE_MAP(CDomapInfo_dlg, CMapInfo_dlg)
//{{AFX_MSG_MAP(CDomapInfo_dlg)
// NOTE: the ClassWizard will add message map macros here
//}}AFX_MSG_MAP
END_MESSAGE_MAP()
/////////////////////////////////////////////////////////////////////////////
// CDomapInfo_dlg message handlers<|fim▁end|> | {
//{{AFX_DATA_INIT(CDomapInfo_dlg)
m_ndom_0peak = 0;
m_ndom_1peak = 0; |
<|file_name|>htmltextareaelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding;
use dom::bindings::codegen::Bindings::HTMLTextAreaElementBinding::HTMLTextAreaElementMethods;
use dom::bindings::codegen::InheritTypes::{HTMLElementCast, NodeCast};
use dom::bindings::codegen::InheritTypes::{HTMLTextAreaElementDerived, HTMLFieldSetElementDerived};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector};
use dom::document::Document;
use dom::element::{AttributeHandlers, HTMLTextAreaElementTypeId};
use dom::eventtarget::{EventTarget, NodeTargetTypeId};
use dom::htmlelement::HTMLElement;
use dom::node::{DisabledStateHelpers, Node, NodeHelpers, ElementNodeTypeId};
use dom::virtualmethods::VirtualMethods;
use servo_util::str::DOMString;
use string_cache::Atom;
#[dom_struct]
pub struct HTMLTextAreaElement {
htmlelement: HTMLElement,
}
impl HTMLTextAreaElementDerived for EventTarget {
fn is_htmltextareaelement(&self) -> bool {
*self.type_id() == NodeTargetTypeId(ElementNodeTypeId(HTMLTextAreaElementTypeId))
}
}<|fim▁hole|>
impl HTMLTextAreaElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLTextAreaElement {
HTMLTextAreaElement {
htmlelement: HTMLElement::new_inherited(HTMLTextAreaElementTypeId, localName, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLTextAreaElement> {
let element = HTMLTextAreaElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLTextAreaElementBinding::Wrap)
}
}
impl<'a> HTMLTextAreaElementMethods for JSRef<'a, HTMLTextAreaElement> {
// http://www.whatwg.org/html/#dom-fe-disabled
make_bool_getter!(Disabled)
// http://www.whatwg.org/html/#dom-fe-disabled
make_bool_setter!(SetDisabled, "disabled")
// https://html.spec.whatwg.org/multipage/forms.html#dom-textarea-type
fn Type(self) -> DOMString {
"textarea".to_string()
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLTextAreaElement> {
fn super_type<'a>(&'a self) -> Option<&'a VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.after_set_attr(name, value.clone()),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"disabled" => {
node.set_disabled_state(true);
node.set_enabled_state(false);
},
_ => ()
}
}
fn before_remove_attr(&self, name: &Atom, value: DOMString) {
match self.super_type() {
Some(ref s) => s.before_remove_attr(name, value),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
match name.as_slice() {
"disabled" => {
node.set_disabled_state(false);
node.set_enabled_state(true);
node.check_ancestors_disabled_state_for_form_control();
},
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
match self.super_type() {
Some(ref s) => s.bind_to_tree(tree_in_doc),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
node.check_ancestors_disabled_state_for_form_control();
}
fn unbind_from_tree(&self, tree_in_doc: bool) {
match self.super_type() {
Some(ref s) => s.unbind_from_tree(tree_in_doc),
_ => (),
}
let node: JSRef<Node> = NodeCast::from_ref(*self);
if node.ancestors().any(|ancestor| ancestor.is_htmlfieldsetelement()) {
node.check_ancestors_disabled_state_for_form_control();
} else {
node.check_disabled_attribute();
}
}
}
impl Reflectable for HTMLTextAreaElement {
fn reflector<'a>(&'a self) -> &'a Reflector {
self.htmlelement.reflector()
}
}<|fim▁end|> | |
<|file_name|>user-webuploader.js<|end_file_name|><|fim▁begin|>// 文件上传
jQuery(function() {<|fim▁hole|> $btn = $('#ctlBtn'),
state = 'pending',
// 优化retina, 在retina下这个值是2
ratio = window.devicePixelRatio || 1,
// 缩略图大小
thumbnailWidth = 100 * ratio,
thumbnailHeight = 100 * ratio,
uploader;
uploader = WebUploader.create({
// 单文件上传
multiple: false,
// 不压缩image
resize: false,
// swf文件路径
swf: '/webuploader/Uploader.swf',
// 文件接收服务端。
server: '/upload',
// 选择文件的按钮。可选。
// 内部根据当前运行是创建,可能是input元素,也可能是flash.
pick: '#picker',
// 只允许选择图片文件。
accept: {
title: 'Images',
extensions: 'gif,jpg,jpeg,bmp,png',
mimeTypes: 'image/*'
}
});
// 当有文件添加进来的时候
uploader.on( 'fileQueued', function( file ) {
var $li = $(
'<div id="' + file.id + '" class="file-item thumbnail">' +
'<img>' +
'<div class="info">' + file.name + '</div>' +
'</div>'
),
$img = $li.find('img');
$list.append( $li );
// 创建缩略图
uploader.makeThumb( file, function( error, src ) {
if ( error ) {
$img.replaceWith('<span>不能预览</span>');
return;
}
$img.attr( 'src', src );
}, thumbnailWidth, thumbnailHeight );
});
// 文件上传过程中创建进度条实时显示。
uploader.on( 'uploadProgress', function( file, percentage ) {
var $li = $( '#'+file.id ),
$percent = $li.find('.progress .progress-bar');
// 避免重复创建
if ( !$percent.length ) {
$percent = $('<div class="progress progress-striped active">' +
'<div class="progress-bar" role="progressbar" style="width: 0%">' +
'</div>' +
'</div>').appendTo( $li ).find('.progress-bar');
}
$li.find('p.state').text('上传中');
$percent.css( 'width', percentage * 100 + '%' );
});
uploader.on( 'uploadSuccess', function( file , data) {
$( '#'+file.id ).find('p.state').text('已上传');
$('#user_avatar').val(data.imageurl)
$('#avatar').attr("src",data.imageurl)
});
uploader.on( 'uploadError', function( file ) {
$( '#'+file.id ).find('p.state').text('上传出错');
});
uploader.on( 'uploadComplete', function( file ) {
$( '#'+file.id ).find('.progress').fadeOut();
});
uploader.on( 'all', function( type ) {
if ( type === 'startUpload' ) {
state = 'uploading';
} else if ( type === 'stopUpload' ) {
state = 'paused';
} else if ( type === 'uploadFinished' ) {
state = 'done';
}
if ( state === 'uploading' ) {
$btn.text('暂停上传');
} else {
$btn.text('开始上传');
}
});
$btn.on( 'click', function() {
if ( state === 'uploading' ) {
uploader.stop();
} else {
uploader.upload();
}
});
});<|fim▁end|> | var $ = jQuery,
$list = $('#thelist'), |
<|file_name|>routes.config.js<|end_file_name|><|fim▁begin|>;(function() {
angular.module('app.core')
.config(config);
/* @ngInject */
function config($stateProvider, $locationProvider, $urlRouterProvider) {
$stateProvider
/**
* @name landing
* @type {route}
* @description First page for incoming users, and for default routing
* for all failed routes.
*/
.state('landing', {
url: '/',
templateUrl: '/html/modules/landing/landing.html',
controller: 'LandingController',
controllerAs: 'vm'
})
/**
* @name home
* @type {route}
* @description User landing page, the main display.
*/
.state('home', {
url: '',
abstract: true,
views: {
'': {
templateUrl: 'html/modules/home/template.html'
},
'current-routes@home': {
templateUrl: 'html/modules/layout/current-routes.html',
controller: 'CurrentRoutesController',
controllerAs: 'vm'
},<|fim▁hole|> controllerAs: 'vm'
}
}
})
.state('home.home', {
url: '/home',
authenticate: true,
views: {
'container@home': {
templateUrl: 'html/modules/home/welcome.html'
}
}
})
.state('home.new-route', {
url: '/new-route/:route',
authenticate: true,
views: {
'container@home': {
templateUrl: 'html/modules/routes/new-route.html',
controller: 'NewRouteController',
controllerAs: 'vm'
}
}
})
/**
* @name editRoute
* @type {route}
* @description View for editing a specific route. Provides options
* to edit or delete the route.
*/
.state('home.edit-route', {
url: '/routes/{route:.*}',
authenticate: true,
views: {
'container@home': {
templateUrl: '/html/modules/routes/edit-routes.html',
controller: 'EditRoutesController',
controllerAs: 'vm',
}
}
})
/**
* @name Docs
* @type {route}
* @description View for the project documentation
*
*/
.state('docs',{
url:'',
abstract: true,
views: {
'': {
templateUrl: '/html/modules/docs/docs.html'
},
'doc-list@docs': {
templateUrl: '/html/modules/docs/docs-list.html',
controller: 'DocsController',
controllerAs: 'vm'
}
}
})
.state('docs.docs', {
url: '/docs',
views: {
'container@docs': {
templateUrl: '/html/modules/docs/current-doc.html'
}
}
})
.state('docs.current-doc', {
url: '/docs/:doc',
views: {
'container@docs': {
templateUrl: function($stateParams) {
return '/html/modules/docs/pages/' + $stateParams.doc + '.html';
}
}
}
})
.state('home.analytics', {
url: '/analytics/{route:.*}',
authenticate: true,
views: {
'container@home': {
templateUrl: '/html/modules/analytics/analytics.html',
controller: 'AnalyticsController',
controllerAs: 'vm'
}
}
});
// default uncaught routes to landing page
$urlRouterProvider.otherwise('/');
// enable HTML5 mode
$locationProvider.html5Mode(true);
}
}).call(this);<|fim▁end|> |
'add-routes@home': {
templateUrl: 'html/modules/layout/add-routes.html',
controller: 'AddRoutesController', |
<|file_name|>gui.py<|end_file_name|><|fim▁begin|>"""Graphical user interface."""
import collections
import ctypes
import sdl2
import hienoi.renderer
from hienoi._common import GLProfile, GraphicsAPI, ParticleDisplay, UserData
from hienoi._vectors import Vector2i, Vector2f, Vector4f
class NavigationAction(object):
"""Enumerator for the current nagivation action.
Attributes
----------
NONE
MOVE
ZOOM
"""
NONE = 0
MOVE = 1
ZOOM = 2
_Handles = collections.namedtuple(
'_Handles', (
'window',
'renderer',
))
_GLHandles = collections.namedtuple(
'_GLHandles', (
'context',
))
_RGBMasks = collections.namedtuple(
'_RGBMasks', (
'red',
'green',
'blue',
))
_FIT_VIEW_REL_PADDING = 2.0
if sdl2.SDL_BYTEORDER == sdl2.SDL_LIL_ENDIAN:
_RGB_MASKS = _RGBMasks(red=0x000000FF, green=0x0000FF00, blue=0x00FF0000)
else:
_RGB_MASKS = _RGBMasks(red=0x00FF0000, green=0x0000FF00, blue=0x000000FF)
class GUI(object):
"""GUI.
Parameters
----------
window_title : str
Title for the window.
window_position : hienoi.Vector2i
Initial window position.
window_size : hienoi.Vector2i
Initial window size.
window_flags : int
SDL2 window flags.
view_aperture_x : float
Initial length in world units to be shown on the X axis.
view_zoom_range : hienoi.Vector2f
Zoom value range for the view.
mouse_wheel_step : float
Coefficient value for each mouse wheel step.
grid_density : float
See :attr:`GUI.grid_density`.
grid_adaptive_threshold : float
See :attr:`GUI.grid_adaptive_threshold`.
show_grid : bool
See :attr:`GUI.show_grid`.
background_color : hienoi.Vector4f
See :attr:`GUI.background_color`.
grid_color : hienoi.Vector4f
See :attr:`GUI.grid_color`.
grid_origin_color : hienoi.Vector4f
See :attr:`GUI.grid_origin_color`.
particle_display : int
See :attr:`GUI.particle_display`.
point_size : int
See :attr:`GUI.point_size`.
edge_feather : float
See :attr:`GUI.edge_feather`.
stroke_width : float
See :attr:`GUI.stroke_width`.
initialize_callback : function
Callback function to initialize any GUI state.
It takes a single argument ``gui``, an instance of this class.
on_event_callback : function
Callback function ran during the event polling.
It takes 3 arguments: ``gui``, an instance of this class,
``data``, some data to pass back and forth between the caller and this
callback function, and ``event``, the event fired.
renderer : dict
Keyword arguments for the configuration of the renderer. See the
parameters for the class :class:`hienoi.renderer.Renderer`.
Attributes
----------
view_position : hienoi.Vector2f
Position of the view (camera).
view_zoom : float
Current zoom value for the view.
grid_density : float
Density of the grid.
A density of 10.0 means that there are around 10 grid divisions
displayed on the X axis. A grid division unit represents a fixed length
in world units, meaning that the actual grid density changes depending
on the view's zoom.
show_grid : bool
True to show the grid.
background_color : hienoi.Vector4f
Color for the background.
grid_color : hienoi.Vector4f
Color for the grid.
grid_origin_color : hienoi.Vector4f
Color for the origin axis of the grid.
particle_display : int
Display mode for the particles. Available values are enumerated in the
:class:`~hienoi.ParticleDisplay` class.
point_size : int
Size of the particles in pixels when the display mode is set to
:attr:`~hienoi.ParticleDisplay.POINT`.
edge_feather : float
Feather fall-off in pixels to apply to objects drawn with displays such
as :attr:`~hienoi.ParticleDisplay.CIRCLE` or
:attr:`~hienoi.ParticleDisplay.DISC`.
stroke_width : float
Width of the stroke in pixels to apply to objects drawn with displays
such as :attr:`~hienoi.ParticleDisplay.CIRCLE`.
quit : bool
``True`` to signal to the application that it should quit.
has_view_changed : bool
``True`` if the view state has just been changed following an event. It
is reset to ``False`` whenever :meth:`poll_events` is called.
user_data : object
Attribute reserved for any user data.
"""
def __init__(self,
window_title='hienoi',
window_position=Vector2i(sdl2.SDL_WINDOWPOS_CENTERED,
sdl2.SDL_WINDOWPOS_CENTERED),
window_size=Vector2i(800, 600),
window_flags=sdl2.SDL_WINDOW_RESIZABLE,
view_aperture_x=100.0,
view_zoom_range=Vector2f(1e-6, 1e+6),
mouse_wheel_step=0.01,
grid_density=10.0,
grid_adaptive_threshold=3.0,
show_grid=True,
background_color=Vector4f(0.15, 0.15, 0.15, 1.0),
grid_color=Vector4f(0.85, 0.85, 0.85, 0.05),
grid_origin_color=Vector4f(0.85, 0.25, 0.25, 0.25),
particle_display=ParticleDisplay.DISC,
point_size=4,
edge_feather=2.0,
stroke_width=0.0,
initialize_callback=None,
on_event_callback=None,
renderer=None):
renderer = {} if renderer is None else renderer
if sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO) != 0:
raise RuntimeError(sdl2.SDL_GetError().decode())
renderer_info = hienoi.renderer.get_info()
if renderer_info.api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MAJOR_VERSION,
renderer_info.major_version)
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MINOR_VERSION,<|fim▁hole|>
self._handles = _create_handles(window_title, window_position,
window_size, window_flags,
renderer_info)
self._renderer = hienoi.renderer.Renderer(**renderer)
self._initial_view_aperture_x = view_aperture_x
self._view_zoom_range = view_zoom_range
self._mouse_wheel_step = mouse_wheel_step
self._grid_adaptive_threshold = grid_adaptive_threshold
self._on_event_callback = on_event_callback
self._listen_for_navigation = False
self._is_view_manipulated = False
self.view_position = Vector2f(0.0, 0.0)
self._view_zoom = 1.0
self.grid_density = grid_density
self.show_grid = show_grid
self.background_color = background_color
self.grid_color = grid_color
self.grid_origin_color = grid_origin_color
self.particle_display = particle_display
self.point_size = point_size
self.edge_feather = edge_feather
self.stroke_width = stroke_width
self._navigation_action = NavigationAction.NONE
self.quit = False
self.user_data = UserData()
if initialize_callback:
initialize_callback(self)
@property
def view_zoom(self):
return self._view_zoom
@view_zoom.setter
def view_zoom(self, value):
self._view_zoom = max(self._view_zoom_range[0],
min(self._view_zoom_range[1], value))
@property
def navigation_action(self):
return self._navigation_action
@property
def has_view_changed(self):
return self._has_view_changed
def poll_events(self, scene_state, data=None):
"""Process each event in the queue.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
data : object
Data to pass back and forth between the caller and the function set
for the 'on event' callback.
"""
self._has_view_changed = False
event = sdl2.SDL_Event()
while sdl2.SDL_PollEvent(ctypes.byref(event)) != 0:
event_type = event.type
if event_type == sdl2.SDL_QUIT:
self._on_quit_event(event.quit)
elif event_type == sdl2.SDL_WINDOWEVENT:
self._on_window_event(event.window)
elif event_type == sdl2.SDL_KEYDOWN:
self._on_key_down_event(event.key, scene_state)
elif event_type == sdl2.SDL_KEYUP:
self._on_key_up_event(event.key)
elif event_type == sdl2.SDL_MOUSEBUTTONDOWN:
self._on_mouse_button_down_event(event.button)
elif event_type == sdl2.SDL_MOUSEBUTTONUP:
self._on_mouse_button_up_event(event.button)
elif event_type == sdl2.SDL_MOUSEWHEEL:
self._on_mouse_wheel_event(event.wheel)
elif event_type == sdl2.SDL_MOUSEMOTION:
self._on_mouse_motion_event(event.motion)
if self._on_event_callback:
self._on_event_callback(self, data, event)
if self.quit:
break
def render(self, scene_state):
"""Render a new frame.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
"""
renderer_state = hienoi.renderer.State(
window_size=self.get_window_size(),
view_position=self.view_position,
view_zoom=self._view_zoom,
origin=self.world_to_screen(Vector2f(0.0, 0.0)),
initial_view_aperture_x=self._initial_view_aperture_x,
view_aperture=self.get_view_aperture(),
grid_density=self.grid_density,
grid_adaptive_threshold=self._grid_adaptive_threshold,
background_color=self.background_color,
grid_color=self.grid_color,
grid_origin_color=self.grid_origin_color,
show_grid=self.show_grid,
particle_display=self.particle_display,
point_size=self.point_size,
edge_feather=self.edge_feather,
stroke_width=self.stroke_width,
)
self._renderer.render(renderer_state, scene_state)
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SwapWindow(self._handles.window)
def terminate(self):
"""Cleanup the GUI resources."""
self._renderer.cleanup()
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_DeleteContext(self._handles.renderer.context)
sdl2.SDL_DestroyWindow(self._handles.window)
sdl2.SDL_Quit()
def get_window_size(self):
"""Retrieve the window size.
Returns
-------
hienoi.Vector2i
The window size.
"""
window_size_x = ctypes.c_int()
window_size_y = ctypes.c_int()
sdl2.SDL_GetWindowSize(self._handles.window,
ctypes.byref(window_size_x),
ctypes.byref(window_size_y))
return Vector2i(window_size_x.value, window_size_y.value)
def get_view_aperture(self):
"""Retrieve the view aperture.
It represents the area in world units covered by the view.
Returns
-------
hienoi.Vector2f
The view aperture.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return Vector2f(aperture_x, aperture_x * window_size.y / window_size.x)
def get_mouse_position(self):
"""Retrieve the mouse position in screen space.
Returns
-------
hienoi.Vector2i
The mouse position.
"""
position_x = ctypes.c_int()
position_y = ctypes.c_int()
sdl2.SDL_GetMouseState(ctypes.byref(position_x),
ctypes.byref(position_y))
return Vector2i(position_x.value, position_y.value)
def get_screen_to_world_ratio(self):
"""Retrieve the ratio to convert a sreen unit into a world unit.
Returns
-------
float
The screen to world ratio.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return aperture_x / window_size.x
def screen_to_world(self, point):
"""Convert a point from screen space to world space coordinates.
Parameters
----------
point : hienoi.Vector2i
Point in screen space coordinates.
Returns
-------
hienoi.Vector2f
The point in world space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2f(
(self.view_position.x
+ (point.x - window_size.x / 2.0)
* view_aperture.x / window_size.x),
(self.view_position.y
- (point.y - window_size.y / 2.0)
* view_aperture.y / window_size.y))
def world_to_screen(self, point):
"""Convert a point from world space to screen space coordinates.
Parameters
----------
point : hienoi.Vector2f
Point in world space coordinates.
Returns
-------
hienoi.Vector2i
The point in screen space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2i(
int(round(
(window_size.x / view_aperture.x)
* (-self.view_position.x + point.x + view_aperture.x / 2.0))),
int(round(
(window_size.y / view_aperture.y)
* (self.view_position.y - point.y + view_aperture.y / 2.0))))
def write_snapshot(self, filename):
"""Take a snapshot of the view and write it as a BMP image.
Parameters
----------
filename : str
Destination filename.
"""
pixel_size = 4
pixels = self._renderer.read_pixels()
surface = sdl2.SDL_CreateRGBSurfaceFrom(
pixels.data, pixels.width, pixels.height,
8 * pixel_size, pixels.width * pixel_size,
_RGB_MASKS.red, _RGB_MASKS.green, _RGB_MASKS.blue, 0)
sdl2.SDL_SaveBMP(surface, filename)
sdl2.SDL_FreeSurface(surface)
def _reset_view(self):
"""Reset the view position and zoom."""
self.view_position = Vector2f(0.0, 0.0)
self.view_zoom = 1.0
self._has_view_changed = True
def _fit_view(self, scene_state):
"""Fit the view to the scene."""
if len(scene_state.particles) > 1:
window_size = self.get_window_size()
initial_size = Vector2f(
self._initial_view_aperture_x,
self._initial_view_aperture_x * window_size.y / window_size.x)
lower_bounds = scene_state.lower_bounds
upper_bounds = scene_state.upper_bounds
required_size = (upper_bounds - lower_bounds).iscale(
_FIT_VIEW_REL_PADDING)
required_size = Vector2f(
max(required_size.x,
initial_size.x * self._view_zoom_range[0]),
max(required_size.y,
initial_size.y * self._view_zoom_range[0]))
self.view_position = (lower_bounds + upper_bounds).iscale(0.5)
self.view_zoom = min(initial_size.x / required_size.x,
initial_size.y / required_size.y)
elif len(scene_state.particles) == 1:
self.view_position = Vector2f(
*scene_state.particles['position'][0])
self.view_zoom = 1.0
else:
self._reset_view()
self._has_view_changed = True
def _on_quit_event(self, event):
"""Event 'on quit'."""
self.quit = True
def _on_window_event(self, event):
"""Event 'on window'."""
if event.event == sdl2.SDL_WINDOWEVENT_SIZE_CHANGED:
self._renderer.resize(event.data1, event.data2)
def _on_key_down_event(self, event, scene_state):
"""Event 'on key down'."""
code = event.keysym.sym
modifier = event.keysym.mod
if modifier == sdl2.KMOD_NONE:
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = True
elif code == sdl2.SDLK_d:
self.particle_display = (
(self.particle_display + 1) % (ParticleDisplay._LAST + 1))
elif code == sdl2.SDLK_f:
self._fit_view(scene_state)
elif code == sdl2.SDLK_g:
self.show_grid = not self.show_grid
elif code == sdl2.SDLK_r:
self._reset_view()
def _on_key_up_event(self, event):
"""Event 'on key up'."""
code = event.keysym.sym
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = False
def _on_mouse_button_down_event(self, event):
"""Event 'on mouse button down'."""
if self._listen_for_navigation:
if event.button == sdl2.SDL_BUTTON_LEFT:
self._navigation_action = NavigationAction.MOVE
elif event.button == sdl2.SDL_BUTTON_RIGHT:
self._navigation_action = NavigationAction.ZOOM
def _on_mouse_button_up_event(self, event):
"""Event 'on mouse button up'."""
if (event.button == sdl2.SDL_BUTTON_LEFT
or event.button == sdl2.SDL_BUTTON_RIGHT):
self._navigation_action = NavigationAction.NONE
def _on_mouse_wheel_event(self, event):
"""Event 'on mouse wheel'."""
scale = 1.0 + self._mouse_wheel_step * event.y
self.view_zoom *= scale
self._has_view_changed = True
def _on_mouse_motion_event(self, event):
"""Event 'on mouse motion'."""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
if self._navigation_action == NavigationAction.MOVE:
self.view_position.set(
(self.view_position.x
- event.xrel * view_aperture.x / window_size.x),
(self.view_position.y
+ event.yrel * view_aperture.y / window_size.y))
self._has_view_changed = True
elif self._navigation_action == NavigationAction.ZOOM:
scale = (1.0
+ float(event.xrel) / window_size.x
- float(event.yrel) / window_size.y)
self.view_zoom *= scale
self._has_view_changed = True
def _create_handles(window_title, window_position, window_size, window_flags,
renderer_info):
"""Create the SDL2 handles."""
window_flags = sdl2.SDL_WINDOW_SHOWN | window_flags
if renderer_info.api == GraphicsAPI.OPENGL:
window_flags |= sdl2.SDL_WINDOW_OPENGL
window = sdl2.SDL_CreateWindow(
window_title.encode(),
window_position.x, window_position.y,
window_size.x, window_size.y,
window_flags)
if not window:
raise RuntimeError(sdl2.SDL_GetError().decode())
context = sdl2.SDL_GL_CreateContext(window)
if not context:
raise RuntimeError(sdl2.SDL_GetError().decode())
# Try to disable the vertical synchronization. It applies to the active
# context and thus needs to be called after `SDL_GL_CreateContext`.
sdl2.SDL_GL_SetSwapInterval(0)
return _Handles(
window=window,
renderer=_GLHandles(context=context))<|fim▁end|> | renderer_info.minor_version)
if renderer_info.profile == GLProfile.CORE:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_PROFILE_MASK,
sdl2.SDL_GL_CONTEXT_PROFILE_CORE) |
<|file_name|>code.py<|end_file_name|><|fim▁begin|>import math
import re
from collections import defaultdict
def matches(t1, t2):
t1r = "".join([t[-1] for t in t1])
t2r = "".join([t[-1] for t in t2])
t1l = "".join([t[0] for t in t1])
t2l = "".join([t[0] for t in t2])
t1_edges = [t1[0], t1[-1], t1r, t1l]
t2_edges = [t2[0], t2[-1], t2[0][::-1], t2[-1][::-1], t2l, t2l[::-1], t2r, t2r[::-1]]
for et1 in t1_edges:
for et2 in t2_edges:
if et1 == et2:
return True
return False
def flip(t):
return [l[::-1] for l in t]
# https://stackoverflow.com/a/34347121
def rotate(t):
return [*map("".join, zip(*reversed(t)))]
def set_corner(cor, right, down):
rr = "".join([t[-1] for t in right])
dr = "".join([t[-1] for t in down])
rl = "".join([t[0] for t in right])
dl = "".join([t[0] for t in down])
r_edges = [right[0], right[-1], right[0][::-1], right[-1][::-1], rr, rr[::-1], rl, rl[::-1]]
d_edges = [down[0], down[-1], down[0][::-1], down[-1][::-1], dr, dr[::-1], dl, dl[::-1]]
for _ in range(2):
cor = flip(cor)
for _ in range(4):
cor = rotate(cor)
if cor[-1] in d_edges and "".join([t[-1] for t in cor]) in r_edges:
return cor
return None
def remove_border(t):
return [x[1:-1] for x in t[1:-1]]
def set_left_edge(t1, t2):
ref = "".join([t[-1] for t in t1])<|fim▁hole|>
for _ in range(2):
t2 = flip(t2)
for _ in range(4):
t2 = rotate(t2)
if "".join([t[0] for t in t2]) == ref:
return t2
return None
def set_upper_edge(t1, t2):
ref = t1[-1]
for _ in range(2):
t2 = flip(t2)
for _ in range(4):
t2 = rotate(t2)
if t2[0] == ref:
return t2
return None
def assemble_image(img, tiles):
whole_image = []
for l in img:
slice = [""] * len(tiles[l[0]])
for t in l:
for i, s in enumerate(tiles[t]):
slice[i] += s
for s in slice:
whole_image.append(s)
return whole_image
def part1():
tiles = defaultdict(list)
for l in open("input.txt"):
if "Tile" in l:
tile = int(re.findall(r"\d+", l)[0])
elif "." in l or "#" in l:
tiles[tile].append(l.strip())
connected = defaultdict(set)
for i in tiles:
for t in tiles:
if i == t:
continue
if matches(tiles[i], tiles[t]):
connected[i].add(t)
connected[t].add(i)
prod = 1
for i in connected:
if len(connected[i]) == 2:
prod *= i
print(prod)
def part2():
tiles = defaultdict(list)
for l in open("input.txt"):
if "Tile" in l:
tile = int(re.findall(r"\d+", l)[0])
elif "." in l or "#" in l:
tiles[tile].append(l.strip())
connected = defaultdict(set)
for i in tiles:
for t in tiles:
if i == t:
continue
if matches(tiles[i], tiles[t]):
connected[i].add(t)
connected[t].add(i)
sz = int(math.sqrt(len(connected)))
image = [[0 for _ in range(sz)] for _ in range(sz)]
for i in connected:
if len(connected[i]) == 2:
corner = i
break
image[0][0] = corner
added = {corner}
for y in range(1, sz):
pos = connected[image[0][y - 1]]
for cand in pos:
if cand not in added and len(connected[cand]) < 4:
image[0][y] = cand
added.add(cand)
break
for x in range(1, sz):
for y in range(sz):
pos = connected[image[x - 1][y]]
for cand in pos:
if cand not in added:
image[x][y] = cand
added.add(cand)
break
tiles[image[0][0]] = set_corner(tiles[image[0][0]], tiles[image[0][1]], tiles[image[1][0]])
for y, l in enumerate(image):
if y != 0:
prv = image[y - 1][0]
tiles[l[0]] = set_upper_edge(tiles[prv], tiles[l[0]])
for x, tile in enumerate(l):
if x != 0:
prv = image[y][x - 1]
tiles[tile] = set_left_edge(tiles[prv], tiles[tile])
for t in tiles:
tiles[t] = remove_border(tiles[t])
image = assemble_image(image, tiles)
ky = 0
monster = set()
for l in open("monster.txt").read().split("\n"):
kx = len(l)
for i, ch in enumerate(l):
if ch == "#":
monster.add((i, ky))
ky += 1
for _ in range(2):
image = flip(image)
for _ in range(4):
image = rotate(image)
for x in range(0, len(image) - kx):
for y in range(0, len(image) - ky):
parts = []
for i, p in enumerate(monster):
dx = x + p[0]
dy = y + p[1]
parts.append(image[dy][dx] == "#")
if all(parts):
for p in monster:
dx = x + p[0]
dy = y + p[1]
image[dy] = image[dy][:dx] + "O" + image[dy][dx + 1 :]
with open("output.txt", "w+") as f:
for l in rotate(rotate(rotate(image))):
f.write(l + "\n")
print(sum([l.count("#") for l in image]))
if __name__ == "__main__":
part1()
part2()<|fim▁end|> | |
<|file_name|>diff_html.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
"""
MoinMoin - Side by side diffs
<|fim▁hole|> @license: GNU GPL, see COPYING for details.
"""
from MoinMoin.support import difflib
from MoinMoin.wikiutil import escape
def indent(line):
eol = ''
while line and line[0] == '\n':
eol += '\n'
line = line[1:]
stripped = line.lstrip()
if len(line) - len(stripped):
line = " " * (len(line) - len(stripped)) + stripped
#return "%d / %d / %s" % (len(line), len(stripped), line)
return eol + line
# This code originally by Scott Moonen, used with permission.
def diff(request, old, new, old_top='', new_top='', old_bottom='', new_bottom='', old_top_class='', new_top_class='', old_bottom_class='', new_bottom_class=''):
""" Find changes between old and new and return
HTML markup visualising them.
@param old: old text [unicode]
@param new: new text [unicode]
@param old_top: Custom html for adding ontop of old revision column (optional)
@param old_bottom: Custom html for adding at bottom of old revision column (optional)
@param new_top: Custom html for adding ontop of new revision column (optional)
@param new_bottom: Custom html for adding at bottom of new revision column (optional)
@param old_top_class: Custom class for <td> with old_top content (optional)
@param new_top_class: Custom class for <td> with new_top content (optional)
@param old_bottom_class: Custom class for <td> with old_bottom content (optional)
@param new_bottom_class: Custom class for <td> with new_bottom content (optional)
"""
_ = request.getText
t_line = _("Line") + " %d"
seq1 = old.splitlines()
seq2 = new.splitlines()
seqobj = difflib.SequenceMatcher(None, seq1, seq2)
linematch = seqobj.get_matching_blocks()
result = """
<table class="diff">
"""
if old_top or new_top:
result += '<tr><td class="%s">%s</td><td class="%s">%s</td></tr>' % (old_top_class, old_top, new_top_class, new_top)
if len(seq1) == len(seq2) and linematch[0] == (0, 0, len(seq1)):
# No differences.
result += '<tr><td class="diff-same" colspan="2">' + _("No differences found!") + '</td></tr>'
else:
result += """
<tr>
<td class="diff-removed"><span>%s</span></td>
<td class="diff-added"><span>%s</span></td>
</tr>
""" % (_('Deletions are marked like this.'), _('Additions are marked like this.'), )
lastmatch = (0, 0)
# Print all differences
for match in linematch:
# Starts of pages identical?
if lastmatch == match[0:2]:
lastmatch = (match[0] + match[2], match[1] + match[2])
continue
llineno, rlineno = lastmatch[0]+1, lastmatch[1]+1
result += """
<tr class="diff-title">
<td>%s:</td>
<td>%s:</td>
</tr>
""" % (request.formatter.line_anchorlink(1, llineno) + request.formatter.text(t_line % llineno) + request.formatter.line_anchorlink(0),
request.formatter.line_anchorlink(1, rlineno) + request.formatter.text(t_line % rlineno) + request.formatter.line_anchorlink(0))
leftpane = ''
rightpane = ''
linecount = max(match[0] - lastmatch[0], match[1] - lastmatch[1])
for line in range(linecount):
if line < match[0] - lastmatch[0]:
if line > 0:
leftpane += '\n'
leftpane += seq1[lastmatch[0] + line]
if line < match[1] - lastmatch[1]:
if line > 0:
rightpane += '\n'
rightpane += seq2[lastmatch[1] + line]
charobj = difflib.SequenceMatcher(None, leftpane, rightpane)
charmatch = charobj.get_matching_blocks()
if charobj.ratio() < 0.5:
# Insufficient similarity.
if leftpane:
leftresult = """<span>%s</span>""" % indent(escape(leftpane))
else:
leftresult = ''
if rightpane:
rightresult = """<span>%s</span>""" % indent(escape(rightpane))
else:
rightresult = ''
else:
# Some similarities; markup changes.
charlast = (0, 0)
leftresult = ''
rightresult = ''
for thismatch in charmatch:
if thismatch[0] - charlast[0] != 0:
leftresult += """<span>%s</span>""" % indent(
escape(leftpane[charlast[0]:thismatch[0]]))
if thismatch[1] - charlast[1] != 0:
rightresult += """<span>%s</span>""" % indent(
escape(rightpane[charlast[1]:thismatch[1]]))
leftresult += escape(leftpane[thismatch[0]:thismatch[0] + thismatch[2]])
rightresult += escape(rightpane[thismatch[1]:thismatch[1] + thismatch[2]])
charlast = (thismatch[0] + thismatch[2], thismatch[1] + thismatch[2])
leftpane = '<br>'.join([indent(x) for x in leftresult.splitlines()])
rightpane = '<br>'.join([indent(x) for x in rightresult.splitlines()])
# removed width="50%%"
result += """
<tr>
<td class="diff-removed">%s</td>
<td class="diff-added">%s</td>
</tr>
""" % (leftpane, rightpane)
lastmatch = (match[0] + match[2], match[1] + match[2])
if old_bottom or new_bottom:
result += '<tr><td class="%s">%s</td><td class="%s">%s</td></tr>' % (old_top_class, old_top, new_top_class, new_top)
result += '</table>\n'
return result<|fim▁end|> |
@copyright: 2002 Juergen Hermann <[email protected]>,
2002 Scott Moonen <[email protected]>
|
<|file_name|>makevcf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys,os
import textwrap
def print_header():
print textwrap.dedent("""\
##fileformat=VCFv4.1
##phasing=none
##INDIVIDUAL=TRUTH
##SAMPLE=<ID=TRUTH,Individual="TRUTH",Description="bamsurgeon spike-in">
##INFO=<ID=CIPOS,Number=2,Type=Integer,Description="Confidence interval around POS for imprecise variants">
##INFO=<ID=IMPRECISE,Number=0,Type=Flag,Description="Imprecise structural variation">
##INFO=<ID=SVTYPE,Number=1,Type=String,Description="Type of structural variant">
##INFO=<ID=SVLEN,Number=.,Type=Integer,Description="Difference in length between REF and ALT alleles">
##INFO=<ID=SOMATIC,Number=0,Type=Flag,Description="Somatic mutation in primary">
##INFO=<ID=VAF,Number=1,Type=Float,Description="Variant Allele Frequency">
##INFO=<ID=DPR,Number=1,Type=Float,Description="Avg Depth in Region (+/- 1bp)">
##INFO=<ID=MATEID,Number=1,Type=String,Description="Breakend mate">
##ALT=<ID=INV,Description="Inversion">
##ALT=<ID=DUP,Description="Duplication">
##ALT=<ID=DEL,Description="Deletion">
##ALT=<ID=INS,Description="Insertion">
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">
#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tSPIKEIN""")
if len(sys.argv) == 2:
print_header()<|fim▁hole|> if filename.endswith('.log'):
with open(sys.argv[1] + '/' + filename, 'r') as infile:
for line in infile:
if line.startswith('snv'):
#chrom, pos, mut = line.strip().split()
c = line.strip().split()
chrom = c[1].split(':')[0]
pos = c[3]
mut = c[4]
dpr = c[6]
vaf = c[7]
ref,alt = mut.split('-->')
print "\t".join((chrom,pos,'.',ref,alt,'100','PASS','SOMATIC;VAF=' + vaf + ';DPR=' + dpr,'GT','0/1'))
else:
print "usage:", sys.argv[0], "<log directory>"<|fim▁end|> | logdir_files = os.listdir(sys.argv[1])
for filename in logdir_files: |
<|file_name|>test_volume_utils.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#<|fim▁hole|># WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests For miscellaneous util methods used with volume."""
from cinder import context
from cinder import db
from cinder import flags
from cinder.openstack.common import importutils
from cinder.openstack.common import log as logging
from cinder.openstack.common.notifier import api as notifier_api
from cinder.openstack.common.notifier import test_notifier
from cinder import test
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
FLAGS = flags.FLAGS
class UsageInfoTestCase(test.TestCase):
QUEUE_NAME = 'cinder-volume'
HOSTNAME = 'my-host.com'
HOSTIP = '10.0.0.1'
BACKEND = 'test_backend'
MULTI_AT_BACKEND = 'test_b@ckend'
def setUp(self):
super(UsageInfoTestCase, self).setUp()
self.flags(connection_type='fake',
host='fake',
notification_driver=[test_notifier.__name__])
self.volume = importutils.import_object(FLAGS.volume_manager)
self.user_id = 'fake'
self.project_id = 'fake'
self.snapshot_id = 'fake'
self.volume_size = 0
self.context = context.RequestContext(self.user_id, self.project_id)
test_notifier.NOTIFICATIONS = []
def tearDown(self):
notifier_api._reset_drivers()
super(UsageInfoTestCase, self).tearDown()
def _create_volume(self, params={}):
"""Create a test volume."""
vol = {}
vol['snapshot_id'] = self.snapshot_id
vol['user_id'] = self.user_id
vol['project_id'] = self.project_id
vol['host'] = FLAGS.host
vol['availability_zone'] = FLAGS.storage_availability_zone
vol['status'] = "creating"
vol['attach_status'] = "detached"
vol['size'] = self.volume_size
vol.update(params)
return db.volume_create(self.context, vol)['id']
def test_notify_usage_exists(self):
"""Ensure 'exists' notification generates appropriate usage data."""
volume_id = self._create_volume()
volume = db.volume_get(self.context, volume_id)
volume_utils.notify_usage_exists(self.context, volume)
LOG.info("%r" % test_notifier.NOTIFICATIONS)
self.assertEquals(len(test_notifier.NOTIFICATIONS), 1)
msg = test_notifier.NOTIFICATIONS[0]
self.assertEquals(msg['priority'], 'INFO')
self.assertEquals(msg['event_type'], 'volume.exists')
payload = msg['payload']
self.assertEquals(payload['tenant_id'], self.project_id)
self.assertEquals(payload['user_id'], self.user_id)
self.assertEquals(payload['snapshot_id'], self.snapshot_id)
self.assertEquals(payload['volume_id'], volume.id)
self.assertEquals(payload['size'], self.volume_size)
for attr in ('display_name', 'created_at', 'launched_at',
'status', 'audit_period_beginning',
'audit_period_ending'):
self.assertTrue(attr in payload,
msg="Key %s not in payload" % attr)
db.volume_destroy(context.get_admin_context(), volume['id'])
def test_get_host_from_queue_simple(self):
fullname = "%s.%s@%s" % (self.QUEUE_NAME, self.HOSTNAME, self.BACKEND)
self.assertEquals(volume_utils.get_host_from_queue(fullname),
self.HOSTNAME)
def test_get_host_from_queue_ip(self):
fullname = "%s.%s@%s" % (self.QUEUE_NAME, self.HOSTIP, self.BACKEND)
self.assertEquals(volume_utils.get_host_from_queue(fullname),
self.HOSTIP)
def test_get_host_from_queue_multi_at_symbol(self):
fullname = "%s.%s@%s" % (self.QUEUE_NAME, self.HOSTNAME,
self.MULTI_AT_BACKEND)
self.assertEquals(volume_utils.get_host_from_queue(fullname),
self.HOSTNAME)
def test_get_host_from_queue_ip_multi_at_symbol(self):
fullname = "%s.%s@%s" % (self.QUEUE_NAME, self.HOSTIP,
self.MULTI_AT_BACKEND)
self.assertEquals(volume_utils.get_host_from_queue(fullname),
self.HOSTIP)<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![cfg_attr(feature = "unstable", feature(core_intrinsics))]
#![cfg_attr(feature = "unstable", feature(on_unimplemented))]
#![feature(const_fn)]
#![feature(mpsc_select)]
#![feature(plugin)]
#![feature(string_retain)]
#![feature(use_extern_macros)]
#![deny(unsafe_code)]
#![allow(non_snake_case)]
#![doc = "The script crate contains all matters DOM."]
#![plugin(script_plugins)]
#![cfg_attr(not(feature = "unrooted_must_root_lint"), allow(unknown_lints))]
extern crate app_units;
extern crate audio_video_metadata;
extern crate base64;
#[macro_use]
extern crate bitflags;
extern crate bluetooth_traits;
extern crate byteorder;
extern crate canvas_traits;
extern crate caseless;
extern crate chrono;
extern crate cookie as cookie_rs;
#[macro_use] extern crate cssparser;
#[macro_use] extern crate deny_public_fields;
extern crate devtools_traits;
extern crate dom_struct;
#[macro_use]
extern crate domobject_derive;
extern crate embedder_traits;
extern crate encoding_rs;
#[macro_use] extern crate enum_iterator;
extern crate euclid;
extern crate fnv;
extern crate gleam;
extern crate half;
#[macro_use] extern crate html5ever;
#[macro_use]
extern crate hyper;
extern crate hyper_serde;
extern crate image;
extern crate ipc_channel;
#[macro_use]
extern crate jstraceable_derive;
#[macro_use]
extern crate lazy_static;
extern crate libc;
#[macro_use]
extern crate log;
#[macro_use] extern crate malloc_size_of;
#[macro_use] extern crate malloc_size_of_derive;
extern crate metrics;
#[macro_use]
extern crate mime;
extern crate mime_guess;
extern crate mitochondria;
extern crate mozangle;
#[macro_use]
extern crate mozjs as js;
extern crate msg;
extern crate net_traits;
extern crate num_traits;
extern crate offscreen_gl_context;
extern crate parking_lot;
extern crate phf;
#[macro_use]
extern crate profile_traits;
extern crate ref_filter_map;
extern crate ref_slice;
extern crate regex;
extern crate script_layout_interface;
extern crate script_traits;
extern crate selectors;
extern crate serde;
extern crate serde_bytes;
extern crate servo_allocator;
extern crate servo_arc;
#[macro_use] extern crate servo_atoms;
extern crate servo_config;
extern crate servo_geometry;
extern crate servo_media;
extern crate servo_rand;
extern crate servo_url;
extern crate smallvec;
#[macro_use]
extern crate style;
extern crate style_traits;
extern crate swapper;
extern crate time;
#[cfg(target_os = "linux")]
extern crate tinyfiledialogs;
extern crate unicode_segmentation;
extern crate url;
extern crate utf8;
extern crate uuid;
extern crate webrender_api;
extern crate webvr_traits;
extern crate xml5ever;
#[macro_use]
mod task;
mod body;
pub mod clipboard_provider;
mod devtools;
pub mod document_loader;
#[macro_use]
mod dom;
pub mod fetch;
mod layout_image;
mod mem;
mod microtask;
mod network_listener;
pub mod script_runtime;
#[allow(unsafe_code)]
pub mod script_thread;
mod serviceworker_manager;
mod serviceworkerjob;
mod stylesheet_loader;
mod task_source;
pub mod test;
pub mod textinput;
mod timers;
mod unpremultiplytable;
mod webdriver_handlers;
/// A module with everything layout can use from script.
///
/// Try to keep this small!
///
/// TODO(emilio): A few of the FooHelpers can go away, presumably...
pub mod layout_exports {
pub use dom::bindings::inheritance::{CharacterDataTypeId, ElementTypeId};
pub use dom::bindings::inheritance::{HTMLElementTypeId, NodeTypeId};
pub use dom::bindings::root::LayoutDom;
pub use dom::characterdata::LayoutCharacterDataHelpers;
pub use dom::document::{Document, LayoutDocumentHelpers, PendingRestyle};
pub use dom::element::{Element, LayoutElementHelpers, RawLayoutElementHelpers};
pub use dom::node::NodeFlags;
pub use dom::node::{LayoutNodeHelpers, Node};
pub use dom::text::Text;
}
use dom::bindings::codegen::RegisterBindings;
use dom::bindings::conversions::is_dom_proxy;
use dom::bindings::proxyhandler;
use dom::bindings::utils::is_platform_object;
use js::jsapi::JSObject;
use script_traits::SWManagerSenders;
use serviceworker_manager::ServiceWorkerManager;
#[cfg(target_os = "linux")]
#[allow(unsafe_code)]
fn perform_platform_specific_initialization() {
use std::mem;
// 4096 is default max on many linux systems
const MAX_FILE_LIMIT: libc::rlim_t = 4096;
// Bump up our number of file descriptors to save us from impending doom caused by an onslaught
// of iframes.
unsafe {
let mut rlim: libc::rlimit = mem::uninitialized();
match libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim) {
0 => {
if rlim.rlim_cur >= MAX_FILE_LIMIT {
// we have more than enough
return;
}
rlim.rlim_cur = match rlim.rlim_max {
libc::RLIM_INFINITY => MAX_FILE_LIMIT,
_ => {
if rlim.rlim_max < MAX_FILE_LIMIT {
rlim.rlim_max
} else {
MAX_FILE_LIMIT<|fim▁hole|> 0 => (),
_ => warn!("Failed to set file count limit"),
};
},
_ => warn!("Failed to get file count limit"),
};
}
}
#[cfg(not(target_os = "linux"))]
fn perform_platform_specific_initialization() {}
pub fn init_service_workers(sw_senders: SWManagerSenders) {
// Spawn the service worker manager passing the constellation sender
ServiceWorkerManager::spawn_manager(sw_senders);
}
#[allow(unsafe_code)]
unsafe extern "C" fn is_dom_object(obj: *mut JSObject) -> bool {
!obj.is_null() && (is_platform_object(obj) || is_dom_proxy(obj))
}
#[allow(unsafe_code)]
pub fn init() {
unsafe {
proxyhandler::init();
// Create the global vtables used by the (generated) DOM
// bindings to implement JS proxies.
RegisterBindings::RegisterProxyHandlers();
js::glue::InitializeMemoryReporter(Some(is_dom_object));
}
perform_platform_specific_initialization();
}<|fim▁end|> | }
}
};
match libc::setrlimit(libc::RLIMIT_NOFILE, &rlim) { |
<|file_name|>test_keypairs.py<|end_file_name|><|fim▁begin|># Copyright 2011 Eldar Nugaev
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_policy import policy as oslo_policy
import webob
from nova.api.openstack.compute import keypairs as keypairs_v21
from nova.api.openstack import wsgi as os_wsgi
from nova.compute import api as compute_api
from nova import context as nova_context
from nova import exception
from nova import objects
from nova import policy
from nova import quota
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.objects import test_keypair
QUOTAS = quota.QUOTAS
keypair_data = {
'public_key': 'FAKE_KEY',
'fingerprint': 'FAKE_FINGERPRINT',
}
FAKE_UUID = 'b48316c5-71e8-45e4-9884-6c78055b9b13'
def fake_keypair(name):
return dict(test_keypair.fake_keypair,
name=name, **keypair_data)
def db_key_pair_get_all_by_user(self, user_id, limit, marker):
return [fake_keypair('FAKE')]
def db_key_pair_create(self, keypair):
return fake_keypair(name=keypair['name'])
def db_key_pair_destroy(context, user_id, name):
if not (user_id and name):
raise Exception()
def db_key_pair_create_duplicate(context):
raise exception.KeyPairExists(key_name='create_duplicate')
class KeypairsTestV21(test.TestCase):
base_url = '/v2/%s' % fakes.FAKE_PROJECT_ID
validation_error = exception.ValidationError
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def _setup_app_and_controller(self):
self.app_server = fakes.wsgi_app_v21()
self.controller = keypairs_v21.KeypairController()
def setUp(self):
super(KeypairsTestV21, self).setUp()
fakes.stub_out_networking(self)
fakes.stub_out_secgroup_api(self)
self.stub_out("nova.db.api.key_pair_get_all_by_user",
db_key_pair_get_all_by_user)
self.stub_out("nova.db.api.key_pair_create",
db_key_pair_create)
self.stub_out("nova.db.api.key_pair_destroy",
db_key_pair_destroy)
self._setup_app_and_controller()
self.req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
def test_keypair_list(self):
res_dict = self.controller.index(self.req)
response = {'keypairs': [{'keypair': dict(keypair_data, name='FAKE')}]}
self.assertEqual(res_dict, response)
def test_keypair_create(self):
body = {'keypair': {'name': 'create_test'}}
res_dict = self.controller.create(self.req, body=body)
self.assertGreater(len(res_dict['keypair']['fingerprint']), 0)
self.assertGreater(len(res_dict['keypair']['private_key']), 0)
self._assert_keypair_type(res_dict)
def _test_keypair_create_bad_request_case(self,
body,
exception):
self.assertRaises(exception,
self.controller.create, self.req, body=body)
def test_keypair_create_with_empty_name(self):
body = {'keypair': {'name': ''}}
self._test_keypair_create_bad_request_case(body,
self.validation_error)
def test_keypair_create_with_name_too_long(self):
body = {
'keypair': {
'name': 'a' * 256
}
}
self._test_keypair_create_bad_request_case(body,
self.validation_error)
def test_keypair_create_with_name_leading_trailing_spaces(self):
body = {
'keypair': {
'name': ' test '
}
}
self._test_keypair_create_bad_request_case(body,
self.validation_error)
def test_keypair_create_with_name_leading_trailing_spaces_compat_mode(
self):
body = {'keypair': {'name': ' test '}}
self.req.set_legacy_v2()
res_dict = self.controller.create(self.req, body=body)
self.assertEqual('test', res_dict['keypair']['name'])
def test_keypair_create_with_non_alphanumeric_name(self):
body = {
'keypair': {
'name': 'test/keypair'
}
}
self._test_keypair_create_bad_request_case(body,
webob.exc.HTTPBadRequest)
def test_keypair_import_bad_key(self):
body = {
'keypair': {
'name': 'create_test',
'public_key': 'ssh-what negative',
},
}
self._test_keypair_create_bad_request_case(body,
webob.exc.HTTPBadRequest)
def test_keypair_create_with_invalid_keypair_body(self):
body = {'alpha': {'name': 'create_test'}}
self._test_keypair_create_bad_request_case(body,
self.validation_error)
def test_keypair_import(self):
body = {
'keypair': {
'name': 'create_test',
'public_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDBYIznA'
'x9D7118Q1VKGpXy2HDiKyUTM8XcUuhQpo0srqb9rboUp4'
'a9NmCwpWpeElDLuva707GOUnfaBAvHBwsRXyxHJjRaI6Y'
'Qj2oLJwqvaSaWUbyT1vtryRqy6J3TecN0WINY71f4uymi'
'MZP0wby4bKBcYnac8KiCIlvkEl0ETjkOGUq8OyWRmn7lj'
'j5SESEUdBP0JnuTFKddWTU/wD6wydeJaUhBTqOlHn0kX1'
'GyqoNTE1UEhcM5ZRWgfUZfTjVyDF2kGj3vJLCJtJ8LoGc'
'j7YaN4uPg1rBle+izwE/tLonRrds+cev8p6krSSrxWOwB'
'bHkXa6OciiJDvkRzJXzf',
},
}
res_dict = self.controller.create(self.req, body=body)
# FIXME(ja): Should we check that public_key was sent to create?
self.assertGreater(len(res_dict['keypair']['fingerprint']), 0)
self.assertNotIn('private_key', res_dict['keypair'])
self._assert_keypair_type(res_dict)
@mock.patch('nova.objects.Quotas.check_deltas')
def test_keypair_import_quota_limit(self, mock_check):
mock_check.side_effect = exception.OverQuota(overs='key_pairs',
usages={'key_pairs': 100})
body = {
'keypair': {
'name': 'create_test',
'public_key': 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDBYIznA'
'x9D7118Q1VKGpXy2HDiKyUTM8XcUuhQpo0srqb9rboUp4'
'a9NmCwpWpeElDLuva707GOUnfaBAvHBwsRXyxHJjRaI6Y'
'Qj2oLJwqvaSaWUbyT1vtryRqy6J3TecN0WINY71f4uymi'
'MZP0wby4bKBcYnac8KiCIlvkEl0ETjkOGUq8OyWRmn7lj'
'j5SESEUdBP0JnuTFKddWTU/wD6wydeJaUhBTqOlHn0kX1'
'GyqoNTE1UEhcM5ZRWgfUZfTjVyDF2kGj3vJLCJtJ8LoGc'
'j7YaN4uPg1rBle+izwE/tLonRrds+cev8p6krSSrxWOwB'
'bHkXa6OciiJDvkRzJXzf',
},
}
ex = self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=body)
self.assertIn('Quota exceeded, too many key pairs.', ex.explanation)
@mock.patch('nova.objects.Quotas.check_deltas')
def test_keypair_create_quota_limit(self, mock_check):
mock_check.side_effect = exception.OverQuota(overs='key_pairs',
usages={'key_pairs': 100})
body = {
'keypair': {
'name': 'create_test',
},
}
ex = self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=body)
self.assertIn('Quota exceeded, too many key pairs.', ex.explanation)
@mock.patch('nova.objects.Quotas.check_deltas')
def test_keypair_create_over_quota_during_recheck(self, mock_check):
# Simulate a race where the first check passes and the recheck fails.
# First check occurs in compute/api.
exc = exception.OverQuota(overs='key_pairs', usages={'key_pairs': 100})
mock_check.side_effect = [None, exc]
body = {
'keypair': {
'name': 'create_test',
},
}
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, body=body)
ctxt = self.req.environ['nova.context']
self.assertEqual(2, mock_check.call_count)
call1 = mock.call(ctxt, {'key_pairs': 1}, ctxt.user_id)
call2 = mock.call(ctxt, {'key_pairs': 0}, ctxt.user_id)
mock_check.assert_has_calls([call1, call2])
# Verify we removed the key pair that was added after the first
# quota check passed.
key_pairs = objects.KeyPairList.get_by_user(ctxt, ctxt.user_id)
names = [key_pair.name for key_pair in key_pairs]
self.assertNotIn('create_test', names)
@mock.patch('nova.objects.Quotas.check_deltas')
def test_keypair_create_no_quota_recheck(self, mock_check):
# Disable recheck_quota.
self.flags(recheck_quota=False, group='quota')
body = {
'keypair': {
'name': 'create_test',
},
}
self.controller.create(self.req, body=body)
ctxt = self.req.environ['nova.context']
# check_deltas should have been called only once.
mock_check.assert_called_once_with(ctxt, {'key_pairs': 1},
ctxt.user_id)
def test_keypair_create_duplicate(self):
self.stub_out("nova.objects.KeyPair.create",
db_key_pair_create_duplicate)
body = {'keypair': {'name': 'create_duplicate'}}
ex = self.assertRaises(webob.exc.HTTPConflict,
self.controller.create, self.req, body=body)
self.assertIn("Key pair 'create_duplicate' already exists.",
ex.explanation)
@mock.patch('nova.objects.KeyPair.get_by_name')
def test_keypair_delete(self, mock_get_by_name):
mock_get_by_name.return_value = objects.KeyPair(
nova_context.get_admin_context(), **fake_keypair('FAKE'))
self.controller.delete(self.req, 'FAKE')
def test_keypair_get_keypair_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.req, 'DOESNOTEXIST')
def test_keypair_delete_not_found(self):
def db_key_pair_get_not_found(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out("nova.db.api.key_pair_destroy",
db_key_pair_get_not_found)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, self.req, 'FAKE')
def test_keypair_show(self):
def _db_key_pair_get(context, user_id, name):
return dict(test_keypair.fake_keypair,
name='foo', public_key='XXX', fingerprint='YYY',
type='ssh')
self.stub_out("nova.db.api.key_pair_get", _db_key_pair_get)
res_dict = self.controller.show(self.req, 'FAKE')
self.assertEqual('foo', res_dict['keypair']['name'])
self.assertEqual('XXX', res_dict['keypair']['public_key'])
self.assertEqual('YYY', res_dict['keypair']['fingerprint'])
self._assert_keypair_type(res_dict)
def test_keypair_show_not_found(self):
def _db_key_pair_get(context, user_id, name):
raise exception.KeypairNotFound(user_id=user_id, name=name)
self.stub_out("nova.db.api.key_pair_get", _db_key_pair_get)
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show, self.req, 'FAKE')
def _assert_keypair_type(self, res_dict):
self.assertNotIn('type', res_dict['keypair'])
class KeypairPolicyTestV21(test.NoDBTestCase):
KeyPairController = keypairs_v21.KeypairController()
policy_path = 'os_compute_api:os-keypairs'
def setUp(self):
super(KeypairPolicyTestV21, self).setUp()
@staticmethod
def _db_key_pair_get(context, user_id, name=None):
if name is not None:
return dict(test_keypair.fake_keypair,
name='foo', public_key='XXX', fingerprint='YYY',
type='ssh')
else:
return db_key_pair_get_all_by_user(context, user_id)
self.stub_out("nova.objects.keypair.KeyPair._get_from_db",
_db_key_pair_get)
self.req = fakes.HTTPRequest.blank('')
def test_keypair_list_fail_policy(self):
rules = {self.policy_path + ':index': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.assertRaises(exception.Forbidden,
self.KeyPairController.index,
self.req)
@mock.patch('nova.objects.KeyPairList.get_by_user')
def test_keypair_list_pass_policy(self, mock_get):
rules = {self.policy_path + ':index': ''}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
res = self.KeyPairController.index(self.req)
self.assertIn('keypairs', res)
def test_keypair_show_fail_policy(self):
rules = {self.policy_path + ':show': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.assertRaises(exception.Forbidden,
self.KeyPairController.show,
self.req, 'FAKE')
def test_keypair_show_pass_policy(self):
rules = {self.policy_path + ':show': ''}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
res = self.KeyPairController.show(self.req, 'FAKE')
self.assertIn('keypair', res)
def test_keypair_create_fail_policy(self):
body = {'keypair': {'name': 'create_test'}}
rules = {self.policy_path + ':create': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.assertRaises(exception.Forbidden,
self.KeyPairController.create,
self.req, body=body)
def _assert_keypair_create(self, mock_create, req):
mock_create.assert_called_with(req, 'fake_user', 'create_test', 'ssh')
@mock.patch.object(compute_api.KeypairAPI, 'create_key_pair')
def test_keypair_create_pass_policy(self, mock_create):
keypair_obj = objects.KeyPair(name='', public_key='',
fingerprint='', user_id='')
mock_create.return_value = (keypair_obj, 'dummy')
body = {'keypair': {'name': 'create_test'}}
rules = {self.policy_path + ':create': ''}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
res = self.KeyPairController.create(self.req, body=body)
self.assertIn('keypair', res)
req = self.req.environ['nova.context']
self._assert_keypair_create(mock_create, req)
def test_keypair_delete_fail_policy(self):
rules = {self.policy_path + ':delete': 'role:admin'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.assertRaises(exception.Forbidden,
self.KeyPairController.delete,
self.req, 'FAKE')
@mock.patch('nova.objects.KeyPair.destroy_by_name')
def test_keypair_delete_pass_policy(self, mock_destroy):
rules = {self.policy_path + ':delete': ''}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
self.KeyPairController.delete(self.req, 'FAKE')
class KeypairsTestV22(KeypairsTestV21):
wsgi_api_version = '2.2'
def test_keypair_list(self):
res_dict = self.controller.index(self.req)
expected = {'keypairs': [{'keypair': dict(keypair_data, name='FAKE',
type='ssh')}]}
self.assertEqual(expected, res_dict)
def _assert_keypair_type(self, res_dict):
self.assertEqual('ssh', res_dict['keypair']['type'])
def test_keypair_create_with_name_leading_trailing_spaces_compat_mode(
self):
pass
def test_create_server_keypair_name_with_leading_trailing_compat_mode(
self):
pass
class KeypairsTestV210(KeypairsTestV22):
wsgi_api_version = '2.10'
def test_keypair_create_with_name_leading_trailing_spaces_compat_mode(
self):
pass
def test_create_server_keypair_name_with_leading_trailing_compat_mode(
self):
pass
def test_keypair_list_other_user(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs?user_id=foo',
version=self.wsgi_api_version,
use_admin_context=True)
with mock.patch.object(self.controller.api, 'get_key_pairs') as mock_g:
self.controller.index(req)
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('foo', userid)
def test_keypair_list_other_user_not_admin(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs?user_id=foo',
version=self.wsgi_api_version)
with mock.patch.object(self.controller.api, 'get_key_pairs'):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_keypair_show_other_user(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs/FAKE?user_id=foo',
version=self.wsgi_api_version,
use_admin_context=True)
with mock.patch.object(self.controller.api, 'get_key_pair') as mock_g:
self.controller.show(req, 'FAKE')
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('foo', userid)
def test_keypair_show_other_user_not_admin(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs/FAKE?user_id=foo',
version=self.wsgi_api_version)
with mock.patch.object(self.controller.api, 'get_key_pair'):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, req, 'FAKE')
def test_keypair_delete_other_user(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs/FAKE?user_id=foo',
version=self.wsgi_api_version,
use_admin_context=True)
with mock.patch.object(self.controller.api,
'delete_key_pair') as mock_g:
self.controller.delete(req, 'FAKE')
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('foo', userid)
def test_keypair_delete_other_user_not_admin(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs/FAKE?user_id=foo',
version=self.wsgi_api_version)
with mock.patch.object(self.controller.api, 'delete_key_pair'):
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.delete, req, 'FAKE')
def test_keypair_create_other_user(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs',
version=self.wsgi_api_version,
use_admin_context=True)
body = {'keypair': {'name': 'create_test',
'user_id': '8861f37f-034e-4ca8-8abe-6d13c074574a'}}
with mock.patch.object(self.controller.api,
'create_key_pair',
return_value=(mock.MagicMock(), 1)) as mock_g:
res = self.controller.create(req, body=body)
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('8861f37f-034e-4ca8-8abe-6d13c074574a', userid)
self.assertIn('keypair', res)
def test_keypair_import_other_user(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs',
version=self.wsgi_api_version,
use_admin_context=True)
body = {'keypair': {'name': 'create_test',
'user_id': '8861f37f-034e-4ca8-8abe-6d13c074574a',
'public_key': 'public_key'}}
with mock.patch.object(self.controller.api,
'import_key_pair') as mock_g:
res = self.controller.create(req, body=body)
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('8861f37f-034e-4ca8-8abe-6d13c074574a', userid)
self.assertIn('keypair', res)
def test_keypair_create_other_user_not_admin(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs',
version=self.wsgi_api_version)
body = {'keypair': {'name': 'create_test',
'user_id': '8861f37f-034e-4ca8-8abe-6d13c074574a'}}
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.create,
req, body=body)
def test_keypair_list_other_user_invalid_in_old_microversion(self):
req = fakes.HTTPRequest.blank(self.base_url +
'/os-keypairs?user_id=foo',
version="2.9",
use_admin_context=True)
with mock.patch.object(self.controller.api, 'get_key_pairs') as mock_g:
self.controller.index(req)
userid = mock_g.call_args_list[0][0][1]
self.assertEqual('fake_user', userid)
class KeypairsTestV235(test.TestCase):
base_url = '/v2/%s' % fakes.FAKE_PROJECT_ID
wsgi_api_version = '2.35'
def _setup_app_and_controller(self):
self.app_server = fakes.wsgi_app_v21()
self.controller = keypairs_v21.KeypairController()
def setUp(self):
super(KeypairsTestV235, self).setUp()
self._setup_app_and_controller()
@mock.patch("nova.db.api.key_pair_get_all_by_user")
def test_keypair_list_limit_and_marker(self, mock_kp_get):
mock_kp_get.side_effect = db_key_pair_get_all_by_user
req = fakes.HTTPRequest.blank(
self.base_url + '/os-keypairs?limit=3&marker=fake_marker',
version=self.wsgi_api_version, use_admin_context=True)
res_dict = self.controller.index(req)
mock_kp_get.assert_called_once_with(
req.environ['nova.context'], 'fake_user',
limit=3, marker='fake_marker')
response = {'keypairs': [{'keypair': dict(keypair_data, name='FAKE',
type='ssh')}]}
self.assertEqual(res_dict, response)
@mock.patch('nova.compute.api.KeypairAPI.get_key_pairs')
def test_keypair_list_limit_and_marker_invalid_marker(self, mock_kp_get):
mock_kp_get.side_effect = exception.MarkerNotFound(marker='unknown_kp')
req = fakes.HTTPRequest.blank(
self.base_url + '/os-keypairs?limit=3&marker=unknown_kp',
version=self.wsgi_api_version, use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_keypair_list_limit_and_marker_invalid_limit(self):
req = fakes.HTTPRequest.blank(
self.base_url + '/os-keypairs?limit=abc&marker=fake_marker',
version=self.wsgi_api_version, use_admin_context=True)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
@mock.patch("nova.db.api.key_pair_get_all_by_user")
def test_keypair_list_limit_and_marker_invalid_in_old_microversion(
self, mock_kp_get):
mock_kp_get.side_effect = db_key_pair_get_all_by_user
req = fakes.HTTPRequest.blank(
self.base_url + '/os-keypairs?limit=3&marker=fake_marker',
version="2.30", use_admin_context=True)
self.controller.index(req)
mock_kp_get.assert_called_once_with(
req.environ['nova.context'], 'fake_user',
limit=None, marker=None)
class KeypairsTestV275(test.TestCase):
def setUp(self):
super(KeypairsTestV275, self).setUp()
self.controller = keypairs_v21.KeypairController()<|fim▁hole|> @mock.patch('nova.objects.KeyPair.get_by_name')
def test_keypair_list_additional_param_old_version(self, mock_get_by_name,
mock_kp_get):
req = fakes.HTTPRequest.blank(
'/os-keypairs?unknown=3',
version='2.74', use_admin_context=True)
self.controller.index(req)
self.controller.show(req, 1)
with mock.patch.object(self.controller.api,
'delete_key_pair'):
self.controller.delete(req, 1)
def test_keypair_list_additional_param(self):
req = fakes.HTTPRequest.blank(
'/os-keypairs?unknown=3',
version='2.75', use_admin_context=True)
self.assertRaises(exception.ValidationError, self.controller.index,
req)
def test_keypair_show_additional_param(self):
req = fakes.HTTPRequest.blank(
'/os-keypairs?unknown=3',
version='2.75', use_admin_context=True)
self.assertRaises(exception.ValidationError, self.controller.show,
req, 1)
def test_keypair_delete_additional_param(self):
req = fakes.HTTPRequest.blank(
'/os-keypairs?unknown=3',
version='2.75', use_admin_context=True)
self.assertRaises(exception.ValidationError, self.controller.delete,
req, 1)<|fim▁end|> |
@mock.patch("nova.db.api.key_pair_get_all_by_user") |
<|file_name|>display.rs<|end_file_name|><|fim▁begin|>use hal;
use hal::spi::Spi;<|fim▁hole|>use hal::gpio::{gpiob, gpioa, Output, PushPull, AF5};
use ls010b7dh01::Ls010b7dh01;
// Type aliases for these gross types
pub type Extcomin = gpiob::PB1<Output<PushPull>>;
pub type Display = Ls010b7dh01<
Spi<
hal::stm32f30x::SPI1,
(gpioa::PA5<AF5>, gpioa::PA6<AF5>, gpioa::PA7<AF5>),
>,
gpiob::PB0<Output<PushPull>>,
gpiob::PB2<Output<PushPull>>,
>;<|fim▁end|> | |
<|file_name|>visitor.rs<|end_file_name|><|fim▁begin|>use rustc::middle::{ty, def};
use rustc::middle::ty::MethodCall;
use syntax::{ast, ast_util, ast_map};
use syntax::codemap::Span;
use syntax::parse::token;
use syntax::visit;
use syntax::visit::Visitor;
use std::fmt;
use std::mem::replace;
use std::collections::BTreeMap;
fn type_is_unsafe_function(ty: ty::Ty) -> bool {
match ty.sty {
ty::ty_bare_fn(_, ref f) => f.unsafety == ast::Unsafety::Unsafe,
_ => false,
}
}
pub struct NodeInfo {
pub span: Span,
pub is_fn: bool,
pub compiler: bool,
pub ffi: Vec<Span>,
pub raw_deref: Vec<Span>,
pub static_mut: Vec<Span>,
pub unsafe_call: Vec<Span>,
pub transmute: Vec<Span>,
pub transmute_imm_to_mut: Vec<Span>,
// these are only picked up with written in unsafe blocks, but *const
// as *mut is legal anywhere.
pub cast_raw_ptr_const_to_mut: Vec<Span>,
pub asm: Vec<Span>,
}
impl NodeInfo {
fn new(span: Span, is_fn: bool, compiler: bool) -> NodeInfo {
NodeInfo {
span: span,
is_fn: is_fn,
compiler: compiler,
ffi: Vec::new(),
raw_deref: Vec::new(),
static_mut: Vec::new(),
unsafe_call: Vec::new(),
transmute: Vec::new(),
transmute_imm_to_mut: Vec::new(),
cast_raw_ptr_const_to_mut: Vec::new(),
asm: Vec::new()
}
}
}
impl fmt::Debug for NodeInfo {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let mut first = true;
macro_rules! p ( ($fmt: tt, $name: ident) => {
if !self.$name.is_empty() {
if !first {
try!(write!(fmt, ", "));
} else {
first = false
}
try!(write!(fmt, concat!("{} ", $fmt), self.$name.len()))
}
});
p!("asm", asm);
p!("deref", raw_deref);
p!("ffi", ffi);
p!("static mut", static_mut);
p!("transmute", transmute);
p!("transmute & to &mut", transmute_imm_to_mut);
p!("cast *const to *mut", cast_raw_ptr_const_to_mut);
p!("unsafe call", unsafe_call);
// silence dead assign warning
if first {}
Ok(())
}
}
pub struct UnsafeVisitor<'tcx, 'a: 'tcx> {
tcx: &'tcx ty::ctxt<'a>,
/// Whether we're in an unsafe context.
node_info: Option<(ast::NodeId, NodeInfo)>,
pub unsafes: BTreeMap<ast::NodeId, NodeInfo>,
}
impl<'tcx, 'a> UnsafeVisitor<'tcx, 'a> {
pub fn new(tcx: &'tcx ty::ctxt<'a>) -> UnsafeVisitor<'tcx, 'a> {
UnsafeVisitor {
tcx: tcx,
node_info: None,
unsafes: BTreeMap::new(),
}
}
pub fn check_crate(&mut self, krate: &ast::Crate) {
visit::walk_crate(self, krate)
}
fn info<'b>(&'b mut self) -> &'b mut NodeInfo {
&mut self.node_info.as_mut().unwrap().1
}
fn check_ptr_cast(&mut self, span: Span, from: &ast::Expr, to: &ast::Expr) -> bool {
let from_ty = ty::expr_ty(self.tcx, from);
let to_ty = ty::expr_ty(self.tcx, to);
match (&from_ty.sty, &to_ty.sty) {
(&ty::ty_rptr(_, ty::mt { mutbl: ast::MutImmutable, .. }),
&ty::ty_rptr(_, ty::mt { mutbl: ast::MutMutable, .. })) => {
self.info().transmute_imm_to_mut.push(span);
true
}
(&ty::ty_ptr(ty::mt { mutbl: ast::MutImmutable, .. }),
&ty::ty_ptr(ty::mt { mutbl: ast::MutMutable, .. })) => {
self.info().cast_raw_ptr_const_to_mut.push(span);
true
}
_ => {
false
}
}
}
}
impl<'tcx,'a,'b> Visitor<'a> for UnsafeVisitor<'tcx,'b> {
fn visit_fn(&mut self, fn_kind: visit::FnKind<'a>, fn_decl: &'a ast::FnDecl,
block: &ast::Block, span: Span, node_id: ast::NodeId) {
let (is_item_fn, is_unsafe_fn) = match fn_kind {
visit::FkItemFn(_, _, fn_style, _, _) =>
(true, fn_style == ast::Unsafety::Unsafe),
visit::FkMethod(_, sig, _) =>
(true, sig.unsafety == ast::Unsafety::Unsafe),
_ => (false, false),
};
let old_node_info = if is_unsafe_fn {
replace(&mut self.node_info, Some((node_id, NodeInfo::new(span, true, false))))
} else if is_item_fn {
replace(&mut self.node_info, None)
} else {
None
};
visit::walk_fn(self, fn_kind, fn_decl, block, span);
match replace(&mut self.node_info, old_node_info) {
Some((id, info)) => assert!(self.unsafes.insert(id, info).is_none()),
//Some((id, info)) => { self.unsafes.insert(id, info); }
None => {}
}
}
fn visit_block(&mut self, block: &'a ast::Block) {
let (old_node_info, inserted) = match block.rules {
ast::DefaultBlock => (None, false),
ast::UnsafeBlock(source) => {
let compiler = source == ast::CompilerGenerated;
if self.node_info.is_none() || compiler {
(replace(&mut self.node_info,
Some((block.id, NodeInfo::new(block.span, false, compiler)))),
true)
} else {
(None, false)
}
}
};
visit::walk_block(self, block);
if inserted {
match replace(&mut self.node_info, old_node_info) {
Some((id, info)) => assert!(self.unsafes.insert(id, info).is_none()),
//Some((id, info)) => { self.unsafes.insert(id, info); }
None => {}
}
}
}
fn visit_expr(&mut self, expr: &'a ast::Expr) {
if self.node_info.is_some() {
match expr.node {
ast::ExprMethodCall(_, _, _) => {
let method_call = MethodCall::expr(expr.id);
let base_type = self.tcx.method_map.borrow()[&method_call].ty;
if type_is_unsafe_function(base_type) {
self.info().unsafe_call.push(expr.span)
}
}
ast::ExprCall(ref base, ref args) => {
match (&base.node, &**args) {
(&ast::ExprPath(_, ref p), [ref arg])
// ew, but whatever.
if p.segments.last().unwrap().identifier.name ==
token::intern("transmute") => {
if !self.check_ptr_cast(expr.span, &**arg, expr) {
// not a */& -> *mut/&mut cast.
self.info().transmute.push(expr.span)
}
}
_ => {
let is_ffi = match self.tcx.def_map.borrow().get(&base.id) {
Some(&def::PathResolution { base_def: def::DefFn(did, _), .. }) => {
// cross-crate calls are always
// just unsafe calls.
ast_util::is_local(did) &&
match self.tcx.map.get(did.node) {
ast_map::NodeForeignItem(_) => true,
_ => false
}
}
_ => false
};
if is_ffi {
self.info().ffi.push(expr.span)
} else {
let base_type = ty::node_id_to_type(self.tcx, base.id);
if type_is_unsafe_function(base_type) {<|fim▁hole|> }
}
}
ast::ExprUnary(ast::UnDeref, ref base) => {
let base_type = ty::node_id_to_type(self.tcx, base.id);
match base_type.sty {
ty::ty_ptr(_) => {
self.info().raw_deref.push(expr.span)
}
_ => {}
}
}
ast::ExprInlineAsm(..) => {
self.info().asm.push(expr.span)
}
ast::ExprPath(..) => {
match ty::resolve_expr(self.tcx, expr) {
def::DefStatic(_, true) => {
self.info().static_mut.push(expr.span)
}
_ => {}
}
}
ast::ExprCast(ref from, _) => {
self.check_ptr_cast(expr.span, &**from, expr);
}
_ => {}
}
}
visit::walk_expr(self, expr);
}
}<|fim▁end|> | self.info().unsafe_call.push(expr.span)
}
} |
<|file_name|>test_patch.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2017 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import json
import os
from unittest.case import expectedFailure
from commoncode.testcase import FileBasedTesting
from commoncode.text import as_unicode
from extractcode import patch
class TestIsPatch(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_is_not_patch(self):
test_dir = self.get_test_loc('patch/not_patches', copy=True)
for r, _, files in os.walk(test_dir):
for f in files:
test_file = os.path.join(r, f)
assert not patch.is_patch(test_file)
def test_is_patch(self):
test_dir = self.get_test_loc('patch/patches', copy=True)
for r, _, files in os.walk(test_dir):
for f in files:
if not f.endswith('expected'):
test_file = os.path.join(r, f)
assert patch.is_patch(test_file)
def check_patch(test_file, expected_file, regen=False):
result = [list(pi) for pi in patch.patch_info(test_file)]
result = [[as_unicode(s), as_unicode(t), map(as_unicode, lines)]
for s, t, lines in result]
if regen:
with codecs.open(expected_file, 'wb', encoding='utf-8') as regened:
json.dump(result, regened, indent=2)
with codecs.open(expected_file, 'rb', encoding='utf-8') as expect:
expected = json.load(expect)
assert expected == result
class TestPatchInfoFailing(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
# FIXME: these tests need love and eventually a bug report upstream
@expectedFailure
def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr.patch')
# fails with Exception Unable to parse patch file
list(patch.patch_info(test_file))
@expectedFailure
def test_patch_info_patch_patches_problematic_opensso_patch(self):
test_file = self.get_test_loc(u'patch/patches/problematic/OpenSSO.patch')
# fails with Exception Unable to parse patch file
list(patch.patch_info(test_file))
class TestPatchInfo(FileBasedTesting):
test_data_dir = os.path.join(os.path.dirname(__file__), 'data')
def test_patch_info_patch_patches_dnsmasq_2_63_1_diff(self):
test_file = self.get_test_loc(u'patch/patches/dnsmasq_2.63-1.diff')
expected_file = self.get_test_loc('patch/patches/dnsmasq_2.63-1.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_dropbear_2012_55_1_diff(self):
test_file = self.get_test_loc(u'patch/patches/dropbear_2012.55-1.diff')
expected_file = self.get_test_loc('patch/patches/dropbear_2012.55-1.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_0_5_longjmp_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.0.5-longjmp.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.0.5-longjmp.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_1_vaarg_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.1-vaarg.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.1-vaarg.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_2_2_madvise_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-madvise.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-madvise.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_electricfence_2_2_2_pthread_patch(self):
test_file = self.get_test_loc(u'patch/patches/ElectricFence-2.2.2-pthread.patch')
expected_file = self.get_test_loc('patch/patches/ElectricFence-2.2.2-pthread.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_libmediainfo_0_7_43_diff(self):
test_file = self.get_test_loc(u'patch/patches/libmediainfo-0.7.43.diff')
expected_file = self.get_test_loc('patch/patches/libmediainfo-0.7.43.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_avahi_0_6_25_patches_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/avahi-0.6.25/patches/main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/avahi-0.6.25/patches/main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_fix_subarch_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/fix-subarch.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/fix-subarch.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_gtrick_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/gtrick.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/gtrick.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_busybox_patches_workaround_old_uclibc_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/busybox/patches/workaround_old_uclibc.patch')
expected_file = self.get_test_loc('patch/patches/misc/busybox/patches/workaround_old_uclibc.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_curl_patches_ekioh_cookie_fix_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/curl/patches/ekioh_cookie_fix.patch')
expected_file = self.get_test_loc('patch/patches/misc/curl/patches/ekioh_cookie_fix.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_e2fsprogs_1_37_uuidlibs_blkidlibs_only_target_makefile_in_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch')
expected_file = self.get_test_loc('patch/patches/misc/e2fsprogs-1.37/uuidlibs_blkidlibs_only_target_Makefile.in.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_ekioh_svg_opensource_patches_patch_ekioh_config_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch')
expected_file = self.get_test_loc('patch/patches/misc/ekioh-svg/opensource/patches/patch_ekioh_config.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webcore_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webcore.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_accelerated_blit_webkit_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/accelerated_blit_webkit.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_animated_gif_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/animated_gif.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/animated_gif.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_computed_style_for_transform_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/computed_style_for_transform.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_cookies_fixes_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/cookies_fixes.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_dlna_image_security_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/dlna_image_security.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_draw_pattern_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/draw_pattern.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/draw_pattern.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_logs_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_logs.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_logs.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_enable_proxy_setup_log_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/enable_proxy_setup_log.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_file_secure_mode_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/file_secure_mode.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_http_secure_mode_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/http_secure_mode.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_javascript_screen_resolution_fix_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/javascript_screen_resolution_fix.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_keycode_webkit_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/keycode_webkit.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_local_file_access_whitelist_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/local_file_access_whitelist.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_lower_case_css_attributes_for_transform_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/lower_case_css_attributes_for_transform.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_moving_empty_image_leaves_garbage_on_screen_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/moving_empty_image_leaves_garbage_on_screen.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_open_in_new_window_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/open_in_new_window.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_plugin_thread_async_call_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/plugin_thread_async_call.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_ram_cache_meta_expires_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/ram_cache_meta_expires.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_speedup_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/speedup.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/speedup.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_sync_xhr_https_access_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/sync_xhr_https_access.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_useragent_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/useragent.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/useragent.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_keyevent_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_keyevent.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webcore_videoplane_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webcore_videoplane.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_cssparser_parsetransitionshorthand_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_CSSParser_parseTransitionShorthand.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_database_support_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_database_support.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_dlna_images_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_dlna_images.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_finish_animations_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_finish_animations.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_patches_webkit_xmlhttprequest_cross_domain_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/patches/webkit_xmlhttprequest_cross_domain.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_createobject_null_check_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-createobject-null-check.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_dump_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-dump.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_getopensourcenotice_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-getopensourcenotice.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_jsvalue_equal_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-jsvalue-equal.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_moto_timer_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/moto-timer.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_support_parallel_idl_gen_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/support_parallel_idl_gen.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_accept_click_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_accept_click.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_webkit_opensource_prepatches_webcore_videoplane_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch')
expected_file = self.get_test_loc('patch/patches/misc/webkit/opensource/prepatches/webcore_videoplane.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_icu_patches_ekioh_config_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/icu/patches/ekioh-config.patch')
expected_file = self.get_test_loc('patch/patches/misc/icu/patches/ekioh-config.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_jfsutils_patches_largefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/jfsutils/patches/largefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/jfsutils/patches/largefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libasyncns_asyncns_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/asyncns.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/libasyncns/asyncns.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libasyncns_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libasyncns/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libasyncns/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libdaemon_0_13_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libdaemon-0.13/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libdaemon-0.13/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libiconv_patches_cp932_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/cp932.patch')
expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/cp932.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libiconv_patches_make_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libiconv/patches/make.patch')
expected_file = self.get_test_loc('patch/patches/misc/libiconv/patches/make.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_config_sub_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/config.sub.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/config.sub.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libjpeg_v6b_patches_makefile_cfg_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch')
expected_file = self.get_test_loc('patch/patches/misc/libjpeg-v6b/patches/makefile.cfg.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_pngconf_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngconf.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngconf.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libpng_1_2_8_pngrutil_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libpng-1.2.8/pngrutil.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/libpng-1.2.8/pngrutil.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_libxml2_patches_iconv_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/libxml2/patches/iconv.patch')
expected_file = self.get_test_loc('patch/patches/misc/libxml2/patches/iconv.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_0001_stmmac_updated_the_driver_and_added_several_fixes_a_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/0001-stmmac-updated-the-driver-and-added-several-fixes-a.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_addrspace_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/addrspace.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/addrspace.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_kernel_cpu_init_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_kernel_cpu_init.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_arch_sh_mm_init_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/arch_sh_mm_init.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_bigphysarea_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bigphysarea.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bigphysarea.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_bugs_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/bugs.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/bugs.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cache_sh4_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cache-sh4.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_cmdset_0001_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_cmdset_0001.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cfi_util_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cfi_util.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cfi_util.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_char_build_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/char_build.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/char_build.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_cmdlinepart_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/cmdlinepart.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_console_printk_loglevel_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/console_printk_loglevel.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_delayed_i2c_read_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/delayed_i2c_read.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_devinet_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/devinet.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/devinet.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_carrier_sense_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_carrier_sense.patch.expected')
check_patch(test_file, expected_file)
<|fim▁hole|> check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_dma_api_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/dma-api.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/dma-api.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_do_mounts_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/do_mounts.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/do_mounts.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_drivers_net_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/drivers_net_Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_fan_ctrl_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/fan_ctrl.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/fan_ctrl.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_hcd_stm_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/hcd_stm.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_head_s_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/head.S.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/head.S.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_stm_c_patch2(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c-stm.c.patch2.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_nostop_for_bitbanging_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_nostop_for_bitbanging.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_rate_normal_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_rate_normal.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_i2c_revert_to_117_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/i2c_revert_to_117.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_if_ppp_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/if_ppp.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/if_ppp.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_inittmpfs_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/inittmpfs.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/inittmpfs.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_init_kconfig_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_Kconfig.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_Kconfig.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_init_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/init_main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/init_main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ioremap_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ioremap.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ioremap.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ipconfig_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ipconfig.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ipconfig.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_extable_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_extable.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kernel_resource_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kernel_resource.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kernel_resource.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_kexec_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/kexec.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/kexec.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_ksymhash_elflib_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/ksymhash_elflib.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_libata_sense_data_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/libata_sense_data.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/libata_sense_data.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_localversion_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/localversion.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/localversion.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mach_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mach.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mach.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_marvell_88e3015_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/marvell_88e3015.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mb442_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mb442_setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_mmu_context_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/mmu_context.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/mmu_context.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_make_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_make.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_make.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_motorola_rootdisk_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/motorola_rootdisk.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_namespace_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/namespace.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/namespace.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_flash_based_bbt_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_flash_based_bbt.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nand_old_oob_layout_for_yaffs2_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nand_old_oob_layout_for_yaffs2.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_netconsole_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/netconsole.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/netconsole.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_nfsroot_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/nfsroot.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/nfsroot.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_page_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_page_alloc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/page_alloc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/page_alloc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pgtable_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pgtable.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pgtable.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_phy_device_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/phy_device.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/phy_device.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pid_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pid.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pid.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pio_irq_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pio-irq.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pio-irq.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_pmb_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/pmb.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/pmb.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_process_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/process.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/process.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sample_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sample.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sample.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sched_cfs_v2_6_23_12_v24_1_mod_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sched-cfs-v2.6.23.12-v24.1.mod.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stb7100_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stb7100.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_stx7105_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup-stx7105.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_kernel_setup_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_kernel_setup.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_sh_ksyms_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/sh_ksyms.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_smsc_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/smsc_makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/smsc_makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_soc_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/soc.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/soc.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert1_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert1.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert2_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert2.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_3_revert3_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.3_revert3.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_squashfs3_4_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/squashfs3.4.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/squashfs3.4.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_stasc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stasc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stasc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_stmmac_main_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/stmmac_main.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_suppress_igmp_report_listening_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/suppress_igmp_report_listening.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_time_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/time.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/time.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_2_5_1_for_2_6_23_17_diff(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs-2.5.1_for_2.6.23.17.diff.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_unionfs_remove_debug_printouts_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/unionfs_remove_debug_printouts.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x0_vidmem_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x0_vidmem.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19x3_board_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19x3_board.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19x3_board.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nand_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nand.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vip19xx_nor_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vip19xx_nor.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_vt_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/vt.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/vt.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_yaffs_guts_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17-yaffs_guts.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_linux_st710x_patches_yaffs2_2008_07_15_for_2_6_23_17_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/yaffs2-2008.07.15_for_2.6.23.17.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_npapi_patches_npapi_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/npapi/patches/npapi.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/npapi/patches/npapi.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_openssl_0_9_8_patches_configure_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/openssl-0.9.8/patches/Configure.patch')
expected_file = self.get_test_loc('patch/patches/misc/openssl-0.9.8/patches/Configure.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_sqlite_patches_permissions_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/sqlite/patches/permissions.patch')
expected_file = self.get_test_loc('patch/patches/misc/sqlite/patches/permissions.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_arpping_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/arpping.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_clientpacket_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/clientpacket.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_debug_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/debug.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpc_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpc.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_dhcpd_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/dhcpd.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_makefile_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/Makefile.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_options_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/options.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/options.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_packet_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/packet.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_route_patch1(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/route.patch1')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/route.patch1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_script_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/script.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/script.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_udhcp_0_9_8_patch_t1t2_patch1(self):
test_file = self.get_test_loc(u'patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1')
expected_file = self.get_test_loc('patch/patches/misc/udhcp-0.9.8/patch/t1t2.patch1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_build_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/BUILD.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/BUILD.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_cross_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/cross.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/cross.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_uclibc_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/uclibc.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/uclibc.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_ifclient_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_ifclient.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_ifclient.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_c_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.c.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.c.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_misc_vqec_patch_vqec_wv_h_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/vqec/patch/vqec_wv.h.patch')
expected_file = self.get_test_loc('patch/patches/misc/vqec/patch/vqec_wv.h.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_postgrey_1_30_group_patch(self):
test_file = self.get_test_loc(u'patch/patches/postgrey-1.30-group.patch')
expected_file = self.get_test_loc('patch/patches/postgrey-1.30-group.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_drupal_upload_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/drupal_upload.patch')
expected_file = self.get_test_loc('patch/patches/windows/drupal_upload.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_ether_patch_1_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/ether_patch_1.patch')
expected_file = self.get_test_loc('patch/patches/windows/ether_patch_1.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_js_delete_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/js_delete.patch')
expected_file = self.get_test_loc('patch/patches/windows/js_delete.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_plugin_explorer_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/plugin explorer.patch')
expected_file = self.get_test_loc('patch/patches/windows/plugin explorer.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_resolveentity32_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/resolveentity32.patch')
expected_file = self.get_test_loc('patch/patches/windows/resolveentity32.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_sift_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/sift.patch')
expected_file = self.get_test_loc('patch/patches/windows/sift.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_thumbnail_support_0_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch')
expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_thumbnail_support_0_patch_1(self):
test_file = self.get_test_loc(u'patch/patches/windows/thumbnail_support_0.patch.1')
expected_file = self.get_test_loc('patch/patches/windows/thumbnail_support_0.patch.1.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_webform_3_0_conditional_constructor_0_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/webform-3.0-conditional_constructor_0.patch')
expected_file = self.get_test_loc('patch/patches/windows/webform-3.0-conditional_constructor_0.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_windows_xml_rpc_addspace_patch(self):
test_file = self.get_test_loc(u'patch/patches/windows/xml_rpc_addSpace.patch')
expected_file = self.get_test_loc('patch/patches/windows/xml_rpc_addSpace.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_xvidcap_1_1_6_docdir_patch(self):
test_file = self.get_test_loc(u'patch/patches/xvidcap-1.1.6-docdir.patch')
expected_file = self.get_test_loc('patch/patches/xvidcap-1.1.6-docdir.patch.expected')
check_patch(test_file, expected_file)
def test_patch_info_patch_patches_xvidcap_xorg_patch(self):
test_file = self.get_test_loc(u'patch/patches/xvidcap-xorg.patch')
expected_file = self.get_test_loc('patch/patches/xvidcap-xorg.patch.expected')
check_patch(test_file, expected_file)<|fim▁end|> | def test_patch_info_patch_patches_misc_linux_st710x_patches_disable_unaligned_printks_patch(self):
test_file = self.get_test_loc(u'patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch')
expected_file = self.get_test_loc('patch/patches/misc/linux-st710x/patches/disable_unaligned_printks.patch.expected') |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
require("matchdep").filterDev("grunt-*").forEach(grunt.loadNpmTasks);
grunt.initConfig({
pkg: grunt.file.readJSON("package.json"),
copy: {
main: {
expand: true,
cwd: "src/",
src: ["**", "!css/**/*.scss", "!css/**/*.less"],
dest: "dist/"<|fim▁hole|> },
less: {
options: {
paths: ["src/css"]
},
src: {
expand: true,
cwd: "src/css",
src: "*.less",
ext: ".css",
dest: "src/css"
}
},
sass: {
dist:{
options:{
style: 'expanded', // values: nested, expanded, compact, compressed
noCache: true
},
files:[{
expand: true,
cwd: "src/css",
src: ["*.scss"],
dest: "src/css",
ext: ".css"
}]
}
},
watch: {
options: {
nospawn: true,
livereload: true
},
less: {
files: ["src/css/**/*.less"],
tasks: ["less"]
},
sass: {
files: ["src/css/**/*.scss"],
tasks: ["sass"]
},
copy: {
files: ["src/**"],
tasks: ["copy:main"]
}
}
});
grunt.registerTask("default", ["watch"]);
};<|fim▁end|> | } |
<|file_name|>result_form_unittests.py<|end_file_name|><|fim▁begin|>"""Unit tests for result_form_functional_tests.py
Systems:
- indicators.views.ResultCreate
- bad indicator id 404
- get with good ids gives form
- initial form data is correct
- correct disaggregation values
- form valid returns appropriate response
- form invalid returns appropriate response
- indicators.views.ResultUpdate
- indicators.forms.ResultForm
"""
import datetime
from indicators.views import ResultCreate, ResultUpdate
from indicators.forms import ResultForm
from indicators.models import Indicator, Result
from factories import (
indicators_models as i_factories,
workflow_models as w_factories
)
from django.urls import reverse
from django.http import Http404
from django import test
class TestResultCreateUpdate404(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory()
self.indicator = i_factories.IndicatorFactory(
program=self.program
)
self.result = i_factories.ResultFactory(
indicator=self.indicator
)
self.user = w_factories.UserFactory(first_name="FN", last_name="LN", username="tester", is_superuser=True)
self.user.set_password('password')
self.user.save()
self.tola_user = w_factories.TolaUserFactory(user=self.user)
self.tola_user.save()
self.client = test.Client(enforce_csrf_checks=False)
self.client.login(username='tester', password='password')
def test_create_view_raises_404_with_bad_indicator_id(self):
kwargs = {
'indicator': self.indicator.id + 1
}
bad_url = reverse('result_add', kwargs=kwargs)
response = self.client.get(bad_url)
self.assertEqual(response.status_code, 404)
def test_update_view_raises_404_with_bad_result_id(self):
kwargs = {
'pk': self.result.id + 1
}
bad_url = reverse('result_update', kwargs=kwargs)
response = self.client.get(bad_url)
self.assertEqual(response.status_code, 404)
class TestUpdateFormInitialValues(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory()
self.indicator = i_factories.IndicatorFactory(
program=self.program,
target_frequency=Indicator.ANNUAL
)
self.result = i_factories.ResultFactory(
indicator=self.indicator,
)
self.result.record_name = 'record name'
self.result.evidence_url = 'evidence url'
self.blank_result = i_factories.ResultFactory(
indicator=self.indicator
)
self.tola_user = w_factories.TolaUserFactory()
self.user = self.tola_user.user
self.request = type('Request', (object,), {'has_write_access': True, 'user': self.user})()
def test_initial_values(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, instance=self.result, request=self.request)
self.assertEqual(form['achieved'].value(), self.result.achieved)
self.assertEqual(form['target_frequency'].value(), Indicator.ANNUAL)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['date_collected'].value(), self.result.date_collected)
self.assertEqual(form['record_name'].value(), 'record name')
self.assertEqual(form['evidence_url'].value(), 'evidence url')
def test_initial_values_no_evidence(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, instance=self.blank_result, request=self.request)
self.assertEqual(form['achieved'].value(), self.result.achieved)
self.assertEqual(form['target_frequency'].value(), Indicator.ANNUAL)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['record_name'].value(), None)
self.assertEqual(form['evidence_url'].value(), None)
def test_create_form_initial_values(self):
form = ResultForm(user=self.user, indicator=self.indicator, program=self.program, request=self.request)
self.assertEqual(form['indicator'].value(), self.indicator.id)
self.assertEqual(form['program'].value(), self.program.id)
self.assertEqual(form['achieved'].value(), None)
self.assertEqual(form['record_name'].value(), None)
self.assertEqual(form['evidence_url'].value(), None)
class TestCreateValidation(test.TestCase):
def setUp(self):
self.program = w_factories.ProgramFactory(
reporting_period_start=datetime.date(2016, 1, 1),<|fim▁hole|> target_frequency=Indicator.LOP
)
self.tola_user = w_factories.TolaUserFactory()
self.user = self.tola_user.user
self.request = type('Request', (object,), {'has_write_access': True, 'user': self.user})()
self.form_kwargs = {
'user': self.user,
'indicator': self.indicator,
'program': self.program,
'request': self.request,
}
def test_good_data_validates(self):
minimal_data = {
'date_collected': '2016-01-01',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.date_collected, datetime.date(2016, 1, 1))
self.assertEqual(db_result.achieved, 30)
def test_good_data_with_evidence_validates(self):
minimal_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'new record',
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.record_name, 'new record')
def test_good_data_updating_evidence_validates(self):
minimal_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'existing record',
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertTrue(form.is_valid(), "errors {0}".format(form.errors))
new_result = form.save()
self.assertIsNotNone(new_result.id)
db_result = Result.objects.get(pk=new_result.id)
self.assertEqual(db_result.record_name, 'existing record')
self.assertEqual(db_result.evidence_url, 'http://google.com')
@test.tag('slow')
def test_adding_record_without_name_passes_validation(self):
bad_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'evidence_url': 'http://google.com',
'rationale': 'this is a rationale'
}
form = ResultForm(bad_data, **self.form_kwargs)
self.assertTrue(form.is_valid())
def test_adding_record_without_url_fails_validation(self):
bad_data = {
'date_collected': '2016-03-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
'record_name': 'new record',
}
form = ResultForm(bad_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('evidence_url', form.errors)
# date_collected validation
def test_collected_date_before_program_start(self):
minimal_data = {
'date_collected': '2015-12-31',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('date_collected', form.errors)
def test_collected_date_after_program_end(self):
minimal_data = {
'date_collected': '2017-1-1',
'achieved': '30',
'indicator': self.indicator.id,
'program': self.program.id,
}
form = ResultForm(minimal_data, **self.form_kwargs)
self.assertFalse(form.is_valid())
self.assertIn('date_collected', form.errors)<|fim▁end|> | reporting_period_end=datetime.date(2016, 12, 31),
)
self.indicator = i_factories.IndicatorFactory(
program=self.program, |
<|file_name|>GenericItem.java<|end_file_name|><|fim▁begin|>package it.unimi.di.big.mg4j.query;
/*
* MG4J: Managing Gigabytes for Java (big)
*
* Copyright (C) 2005-2015 Sebastiano Vigna
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 3 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License<|fim▁hole|> * for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, see <http://www.gnu.org/licenses/>.
*
*/
import it.unimi.di.big.mg4j.document.Document;
import it.unimi.di.big.mg4j.document.DocumentCollection;
import it.unimi.di.big.mg4j.document.DocumentFactory;
import it.unimi.di.big.mg4j.document.DocumentFactory.FieldType;
import it.unimi.dsi.fastutil.objects.ObjectArrayList;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.Reader;
import javax.servlet.ServletConfig;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.ExtendedProperties;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.velocity.Template;
import org.apache.velocity.context.Context;
import org.apache.velocity.tools.view.servlet.VelocityViewServlet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** An generic item, displaying all document fields.
*
* <P>This kind of {@link it.unimi.di.big.mg4j.query.QueryServlet} item will display each field
* of a document inside a <samp>FIELDSET</samp> element. It is mainly useful for debugging purposes.
*/
public class GenericItem extends VelocityViewServlet {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = LoggerFactory.getLogger( GenericItem.class );
@Override
protected ExtendedProperties loadConfiguration( final ServletConfig config ) throws FileNotFoundException, IOException {
return HttpQueryServer.setLiberalResourceLoading( super.loadConfiguration( config ) );
}
public Template handleRequest( final HttpServletRequest request, final HttpServletResponse response, final Context context ) throws Exception {
if ( request.getParameter( "doc" ) != null ) {
DocumentCollection collection = (DocumentCollection)getServletContext().getAttribute( "collection" );
response.setContentType( request.getParameter( "m" ) );
response.setCharacterEncoding( "UTF-8" );
final Document document = collection.document( Long.parseLong( request.getParameter( "doc" ) ) );
final DocumentFactory factory = collection.factory();
final ObjectArrayList<String> fields = new ObjectArrayList<String>();
final int numberOfFields = factory.numberOfFields();
LOGGER.debug( "ParsingFactory declares " + numberOfFields + " fields" );
for( int field = 0; field < numberOfFields; field++ ) {
if ( factory.fieldType( field ) != FieldType.TEXT ) fields.add( StringEscapeUtils.escapeHtml( document.content( field ).toString() ) );
else fields.add( StringEscapeUtils.escapeHtml( IOUtils.toString( (Reader)document.content( field ) ) ).replaceAll( "\n", "<br>\n" ) );
}
context.put( "title", document.title() );
context.put( "fields", fields );
context.put( "factory", factory );
return getTemplate( "it/unimi/dsi/mg4j/query/generic.velocity" );
}
return null;
}
}<|fim▁end|> | |
<|file_name|>mail_message.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from email.header import decode_header
from email.utils import formataddr
from odoo import _, api, fields, models, SUPERUSER_ID, tools
from odoo.exceptions import UserError, AccessError
from odoo.osv import expression
_logger = logging.getLogger(__name__)
def decode(text):
"""Returns unicode() string conversion of the the given encoded smtp header text"""
# TDE proposal: move to tools ?
if text:
text = decode_header(text.replace('\r', ''))
# The joining space will not be needed as of Python 3.3
# See https://hg.python.org/cpython/rev/8c03fe231877
return ' '.join([tools.ustr(x[0], x[1]) for x in text])
class Message(models.Model):
""" Messages model: system notification (replacing res.log notifications),
comments (OpenChatter discussion) and incoming emails. """
_name = 'mail.message'
_description = 'Message'
_inherit = ['ir.needaction_mixin']
_order = 'id desc'
_rec_name = 'record_name'
_message_read_limit = 30
@api.model
def _get_default_from(self):
if self.env.user.alias_name and self.env.user.alias_domain:
return formataddr((self.env.user.name, '%s@%s' % (self.env.user.alias_name, self.env.user.alias_domain)))
elif self.env.user.email:
return formataddr((self.env.user.name, self.env.user.email))
raise UserError(_("Unable to send email, please configure the sender's email address or alias."))
@api.model
def _get_default_author(self):
return self.env.user.partner_id
# content
subject = fields.Char('Subject')
date = fields.Datetime('Date', default=fields.Datetime.now)
body = fields.Html('Contents', default='', strip_classes=True)
attachment_ids = fields.Many2many(
'ir.attachment', 'message_attachment_rel',
'message_id', 'attachment_id',
string='Attachments',
help='Attachments are linked to a document through model / res_id and to the message '
'through this field.')
parent_id = fields.Many2one(
'mail.message', 'Parent Message', select=True, ondelete='set null',
help="Initial thread message.")
child_ids = fields.One2many('mail.message', 'parent_id', 'Child Messages')
# related document
model = fields.Char('Related Document Model', select=1)
res_id = fields.Integer('Related Document ID', select=1)
record_name = fields.Char('Message Record Name', help="Name get of the related document.")
# characteristics
message_type = fields.Selection([
('email', 'Email'),
('comment', 'Comment'),
('notification', 'System notification')],
'Type', required=True, default='email',
help="Message type: email for email message, notification for system "
"message, comment for other messages such as user replies",
oldname='type')
subtype_id = fields.Many2one('mail.message.subtype', 'Subtype', ondelete='set null', select=1)
# origin
email_from = fields.Char(
'From', default=_get_default_from,
help="Email address of the sender. This field is set when no matching partner is found and replaces the author_id field in the chatter.")
author_id = fields.Many2one(
'res.partner', 'Author', select=1,
ondelete='set null', default=_get_default_author,
help="Author of the message. If not set, email_from may hold an email address that did not match any partner.")
author_avatar = fields.Binary("Author's avatar", related='author_id.image_small')
# recipients
partner_ids = fields.Many2many('res.partner', string='Recipients')
needaction_partner_ids = fields.Many2many(
'res.partner', 'mail_message_res_partner_needaction_rel', string='Partners with Need Action')
needaction = fields.Boolean(
'Need Action', compute='_get_needaction', search='_search_needaction',
help='Need Action')
channel_ids = fields.Many2many(
'mail.channel', 'mail_message_mail_channel_rel', string='Channels')
# user interface
starred_partner_ids = fields.Many2many(
'res.partner', 'mail_message_res_partner_starred_rel', string='Favorited By')
starred = fields.Boolean(
'Starred', compute='_get_starred', search='_search_starred',
help='Current user has a starred notification linked to this message')
# tracking
tracking_value_ids = fields.One2many(
'mail.tracking.value', 'mail_message_id',
string='Tracking values',
help='Tracked values are stored in a separate model. This field allow to reconstruct '
'the tracking and to generate statistics on the model.')
# mail gateway
no_auto_thread = fields.Boolean(
'No threading for answers',
help='Answers do not go in the original document discussion thread. This has an impact on the generated message-id.')
message_id = fields.Char('Message-Id', help='Message unique identifier', select=1, readonly=1, copy=False)
reply_to = fields.Char('Reply-To', help='Reply email address. Setting the reply_to bypasses the automatic thread creation.')
mail_server_id = fields.Many2one('ir.mail_server', 'Outgoing mail server')
@api.multi
def _get_needaction(self):
""" Need action on a mail.message = notified on my channel """
my_messages = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.needaction_partner_ids)
for message in self:
message.needaction = message in my_messages
@api.multi
def _is_accessible(self):
self.ensure_one()
return False
@api.model
def _search_needaction(self, operator, operand):
if operator == '=' and operand:
return [('needaction_partner_ids', 'in', self.env.user.partner_id.id)]
return [('needaction_partner_ids', 'not in', self.env.user.partner_id.id)]
@api.depends('starred_partner_ids')
def _get_starred(self):
""" Compute if the message is starred by the current user. """
# TDE FIXME: use SQL
starred = self.sudo().filtered(lambda msg: self.env.user.partner_id in msg.starred_partner_ids)
for message in self:
message.starred = message in starred
@api.model
def _search_starred(self, operator, operand):
if operator == '=' and operand:
return [('starred_partner_ids', 'in', [self.env.user.partner_id.id])]
return [('starred_partner_ids', 'not in', [self.env.user.partner_id.id])]
@api.model
def _needaction_domain_get(self):
return [('needaction', '=', True)]
#------------------------------------------------------
# Notification API
#------------------------------------------------------
@api.model
def mark_all_as_read(self, channel_ids=None, domain=None):
""" Remove all needactions of the current partner. If channel_ids is
given, restrict to messages written in one of those channels. """
partner_id = self.env.user.partner_id.id
if domain is None:
query = "DELETE FROM mail_message_res_partner_needaction_rel WHERE res_partner_id IN %s"
args = [(partner_id,)]
if channel_ids:
query += """
AND mail_message_id in
(SELECT mail_message_id
FROM mail_message_mail_channel_rel
WHERE mail_channel_id in %s)"""
args += [tuple(channel_ids)]
query += " RETURNING mail_message_id as id"
self._cr.execute(query, args)
self.invalidate_cache()
ids = [m['id'] for m in self._cr.dictfetchall()]
else:
# not really efficient method: it does one db request for the
# search, and one for each message in the result set to remove the
# current user from the relation.
msg_domain = [('needaction_partner_ids', 'in', partner_id)]
if channel_ids:
msg_domain += [('channel_ids', 'in', channel_ids)]
unread_messages = self.search(expression.AND([msg_domain, domain]))
unread_messages.sudo().write({'needaction_partner_ids': [(3, partner_id)]})
ids = unread_messages.mapped('id')
notification = {'type': 'mark_as_read', 'message_ids': ids, 'channel_ids': channel_ids}
self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)
return ids
@api.multi
def mark_as_unread(self, channel_ids=None):
""" Add needactions to messages for the current partner. """
partner_id = self.env.user.partner_id.id
for message in self:
message.write({'needaction_partner_ids': [(4, partner_id)]})
ids = [m.id for m in self]
notification = {'type': 'mark_as_unread', 'message_ids': ids, 'channel_ids': channel_ids}
self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)
@api.multi
def set_message_done(self):
""" Remove the needaction from messages for the current partner. """
partner_id = self.env.user.partner_id
messages = self.filtered(lambda msg: partner_id in msg.needaction_partner_ids)
if not len(messages):
return
messages.sudo().write({'needaction_partner_ids': [(3, partner_id.id)]})
# notifies changes in messages through the bus. To minimize the number of
# notifications, we need to group the messages depending on their channel_ids
groups = []
current_channel_ids = messages[0].channel_ids
current_group = []
for record in messages:
if record.channel_ids == current_channel_ids:
current_group.append(record.id)
else:
groups.append((current_group, current_channel_ids))
current_group = [record.id]
current_channel_ids = record.channel_ids
groups.append((current_group, current_channel_ids))
current_group = [record.id]
current_channel_ids = record.channel_ids
for (msg_ids, channel_ids) in groups:
notification = {'type': 'mark_as_read', 'message_ids': msg_ids, 'channel_ids': [c.id for c in channel_ids]}
self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', partner_id.id), notification)
@api.model
def unstar_all(self):
""" Unstar messages for the current partner. """
partner_id = self.env.user.partner_id.id
starred_messages = self.search([('starred_partner_ids', 'in', partner_id)])
starred_messages.write({'starred_partner_ids': [(3, partner_id)]})
ids = [m.id for m in starred_messages]
notification = {'type': 'toggle_star', 'message_ids': ids, 'starred': False}
self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)
@api.multi
def toggle_message_starred(self):
""" Toggle messages as (un)starred. Technically, the notifications related
to uid are set to (un)starred.
"""
# a user should always be able to star a message he can read
self.check_access_rule('read')
starred = not self.starred
if starred:
self.sudo().write({'starred_partner_ids': [(4, self.env.user.partner_id.id)]})
else:
self.sudo().write({'starred_partner_ids': [(3, self.env.user.partner_id.id)]})
notification = {'type': 'toggle_star', 'message_ids': [self.id], 'starred': starred}
self.env['bus.bus'].sendone((self._cr.dbname, 'res.partner', self.env.user.partner_id.id), notification)
#------------------------------------------------------
# Message loading for web interface
#------------------------------------------------------
@api.model
def _message_read_dict_postprocess(self, messages, message_tree):
""" Post-processing on values given by message_read. This method will
handle partners in batch to avoid doing numerous queries.
:param list messages: list of message, as get_dict result
:param dict message_tree: {[msg.id]: msg browse record}
"""
# 1. Aggregate partners (author_id and partner_ids), attachments and tracking values
partners = self.env['res.partner']
attachments = self.env['ir.attachment']
trackings = self.env['mail.tracking.value']
for key, message in message_tree.iteritems():
if message.author_id:
partners |= message.author_id
if message.subtype_id and message.partner_ids: # take notified people of message with a subtype
partners |= message.partner_ids
elif not message.subtype_id and message.partner_ids: # take specified people of message without a subtype (log)
partners |= message.partner_ids
if message.attachment_ids:
attachments |= message.attachment_ids
if message.tracking_value_ids:
trackings |= message.tracking_value_ids
# Read partners as SUPERUSER -> display the names like classic m2o even if no access
partners_names = partners.sudo().name_get()
partner_tree = dict((partner[0], partner) for partner in partners_names)
# 2. Attachments as SUPERUSER, because could receive msg and attachments for doc uid cannot see
attachments_data = attachments.sudo().read(['id', 'datas_fname', 'name', 'mimetype'])
attachments_tree = dict((attachment['id'], {
'id': attachment['id'],
'filename': attachment['datas_fname'],
'name': attachment['name'],
'mimetype': attachment['mimetype'],
}) for attachment in attachments_data)
# 3. Tracking values
tracking_tree = dict((tracking.id, {
'id': tracking.id,
'changed_field': tracking.field_desc,
'old_value': tracking.get_old_display_value()[0],
'new_value': tracking.get_new_display_value()[0],
'field_type': tracking.field_type,
}) for tracking in trackings)
# 4. Update message dictionaries
for message_dict in messages:
message_id = message_dict.get('id')
message = message_tree[message_id]
if message.author_id:
author = partner_tree[message.author_id.id]
else:
author = (0, message.email_from)
partner_ids = []
if message.subtype_id:
partner_ids = [partner_tree[partner.id] for partner in message.partner_ids
if partner.id in partner_tree]
else:
partner_ids = [partner_tree[partner.id] for partner in message.partner_ids
if partner.id in partner_tree]
attachment_ids = []
for attachment in message.attachment_ids:
if attachment.id in attachments_tree:
attachment_ids.append(attachments_tree[attachment.id])
tracking_value_ids = []
for tracking_value in message.tracking_value_ids:
if tracking_value.id in tracking_tree:
tracking_value_ids.append(tracking_tree[tracking_value.id])
message_dict.update({
'author_id': author,
'partner_ids': partner_ids,
'attachment_ids': attachment_ids,<|fim▁hole|> })
return True
@api.model
def message_fetch(self, domain, limit=20):
return self.search(domain, limit=limit).message_format()
@api.multi
def message_format(self):
""" Get the message values in the format for web client. Since message values can be broadcasted,
computed fields MUST NOT BE READ and broadcasted.
:returns list(dict).
Example :
{
'body': HTML content of the message
'model': u'res.partner',
'record_name': u'Agrolait',
'attachment_ids': [
{
'file_type_icon': u'webimage',
'id': 45,
'name': u'sample.png',
'filename': u'sample.png'
}
],
'needaction_partner_ids': [], # list of partner ids
'res_id': 7,
'tracking_value_ids': [
{
'old_value': "",
'changed_field': "Customer",
'id': 2965,
'new_value': "Axelor"
}
],
'author_id': (3, u'Administrator'),
'email_from': '[email protected]' # email address or False
'subtype_id': (1, u'Discussions'),
'channel_ids': [], # list of channel ids
'date': '2015-06-30 08:22:33',
'partner_ids': [[7, "Sacha Du Bourg-Palette"]], # list of partner name_get
'message_type': u'comment',
'id': 59,
'subject': False
'is_note': True # only if the subtype is internal
}
"""
message_values = self.read([
'id', 'body', 'date', 'author_id', 'email_from', # base message fields
'message_type', 'subtype_id', 'subject', # message specific
'model', 'res_id', 'record_name', # document related
'channel_ids', 'partner_ids', # recipients
'needaction_partner_ids', # list of partner ids for whom the message is a needaction
'starred_partner_ids', # list of partner ids for whom the message is starred
])
message_tree = dict((m.id, m) for m in self)
self._message_read_dict_postprocess(message_values, message_tree)
# add subtype data (is_note flag, subtype_description). Do it as sudo
# because portal / public may have to look for internal subtypes
subtypes = self.env['mail.message.subtype'].sudo().search(
[('id', 'in', [msg['subtype_id'][0] for msg in message_values if msg['subtype_id']])]).read(['internal', 'description'])
subtypes_dict = dict((subtype['id'], subtype) for subtype in subtypes)
for message in message_values:
message['is_note'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['internal']
message['subtype_description'] = message['subtype_id'] and subtypes_dict[message['subtype_id'][0]]['description']
return message_values
#------------------------------------------------------
# mail_message internals
#------------------------------------------------------
@api.model_cr
def init(self):
self._cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'mail_message_model_res_id_idx'""")
if not self._cr.fetchone():
self._cr.execute("""CREATE INDEX mail_message_model_res_id_idx ON mail_message (model, res_id)""")
@api.model
def _find_allowed_model_wise(self, doc_model, doc_dict):
doc_ids = doc_dict.keys()
allowed_doc_ids = self.env[doc_model].with_context(active_test=False).search([('id', 'in', doc_ids)]).ids
return set([message_id for allowed_doc_id in allowed_doc_ids for message_id in doc_dict[allowed_doc_id]])
@api.model
def _find_allowed_doc_ids(self, model_ids):
IrModelAccess = self.env['ir.model.access']
allowed_ids = set()
for doc_model, doc_dict in model_ids.iteritems():
if not IrModelAccess.check(doc_model, 'read', False):
continue
allowed_ids |= self._find_allowed_model_wise(doc_model, doc_dict)
return allowed_ids
@api.model
def _search(self, args, offset=0, limit=None, order=None, count=False, access_rights_uid=None):
""" Override that adds specific access rights of mail.message, to remove
ids uid could not see according to our custom rules. Please refer to
check_access_rule for more details about those rules.
Non employees users see only message with subtype (aka do not see
internal logs).
After having received ids of a classic search, keep only:
- if author_id == pid, uid is the author, OR
- uid belongs to a notified channel, OR
- uid is in the specified recipients, OR
- uid have read access to the related document is model, res_id
- otherwise: remove the id
"""
# Rules do not apply to administrator
if self._uid == SUPERUSER_ID:
return super(Message, self)._search(
args, offset=offset, limit=limit, order=order,
count=count, access_rights_uid=access_rights_uid)
# Non-employee see only messages with a subtype (aka, no internal logs)
if not self.env['res.users'].has_group('base.group_user'):
args = ['&', '&', ('subtype_id', '!=', False), ('subtype_id.internal', '=', False)] + list(args)
# Perform a super with count as False, to have the ids, not a counter
ids = super(Message, self)._search(
args, offset=offset, limit=limit, order=order,
count=False, access_rights_uid=access_rights_uid)
if not ids and count:
return 0
elif not ids:
return ids
pid = self.env.user.partner_id.id
author_ids, partner_ids, channel_ids, allowed_ids = set([]), set([]), set([]), set([])
model_ids = {}
# check read access rights before checking the actual rules on the given ids
super(Message, self.sudo(access_rights_uid or self._uid)).check_access_rights('read')
self._cr.execute("""SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, partner_rel.res_partner_id, channel_partner.channel_id as channel_id
FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s)
LEFT JOIN "mail_message_mail_channel_rel" channel_rel
ON channel_rel.mail_message_id = m.id
LEFT JOIN "mail_channel" channel
ON channel.id = channel_rel.mail_channel_id
LEFT JOIN "mail_channel_partner" channel_partner
ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = (%%s)
WHERE m.id = ANY (%%s)""" % self._table, (pid, pid, ids,))
for id, rmod, rid, author_id, partner_id, channel_id in self._cr.fetchall():
if author_id == pid:
author_ids.add(id)
elif partner_id == pid:
partner_ids.add(id)
elif channel_id:
channel_ids.add(id)
elif rmod and rid:
model_ids.setdefault(rmod, {}).setdefault(rid, set()).add(id)
allowed_ids = self._find_allowed_doc_ids(model_ids)
final_ids = author_ids | partner_ids | channel_ids | allowed_ids
if count:
return len(final_ids)
else:
# re-construct a list based on ids, because set did not keep the original order
id_list = [id for id in ids if id in final_ids]
return id_list
@api.multi
def check_access_rule(self, operation):
""" Access rules of mail.message:
- read: if
- author_id == pid, uid is the author OR
- uid is in the recipients (partner_ids) OR
- uid is member of a listern channel (channel_ids.partner_ids) OR
- uid have read access to the related document if model, res_id
- otherwise: raise
- create: if
- no model, no res_id (private message) OR
- pid in message_follower_ids if model, res_id OR
- uid can read the parent OR
- uid have write or create access on the related document if model, res_id, OR
- otherwise: raise
- write: if
- author_id == pid, uid is the author, OR
- uid is in the recipients (partner_ids) OR
- uid has write or create access on the related document if model, res_id
- otherwise: raise
- unlink: if
- uid has write or create access on the related document if model, res_id
- otherwise: raise
Specific case: non employee users see only messages with subtype (aka do
not see internal logs).
"""
def _generate_model_record_ids(msg_val, msg_ids):
""" :param model_record_ids: {'model': {'res_id': (msg_id, msg_id)}, ... }
:param message_values: {'msg_id': {'model': .., 'res_id': .., 'author_id': ..}}
"""
model_record_ids = {}
for id in msg_ids:
vals = msg_val.get(id, {})
if vals.get('model') and vals.get('res_id'):
model_record_ids.setdefault(vals['model'], set()).add(vals['res_id'])
return model_record_ids
if self._uid == SUPERUSER_ID:
return
# Non employees see only messages with a subtype (aka, not internal logs)
if not self.env['res.users'].has_group('base.group_user'):
self._cr.execute('''SELECT DISTINCT message.id, message.subtype_id, subtype.internal
FROM "%s" AS message
LEFT JOIN "mail_message_subtype" as subtype
ON message.subtype_id = subtype.id
WHERE message.message_type = %%s AND (message.subtype_id IS NULL OR subtype.internal IS TRUE) AND message.id = ANY (%%s)''' % (self._table), ('comment', self.ids,))
if self._cr.fetchall():
raise AccessError(
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') %
(self._description, operation))
# Read mail_message.ids to have their values
message_values = dict((res_id, {}) for res_id in self.ids)
if operation in ['read', 'write']:
self._cr.execute("""SELECT DISTINCT m.id, m.model, m.res_id, m.author_id, m.parent_id, partner_rel.res_partner_id, channel_partner.channel_id as channel_id
FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s)
LEFT JOIN "mail_message_mail_channel_rel" channel_rel
ON channel_rel.mail_message_id = m.id
LEFT JOIN "mail_channel" channel
ON channel.id = channel_rel.mail_channel_id
LEFT JOIN "mail_channel_partner" channel_partner
ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = (%%s)
WHERE m.id = ANY (%%s)""" % self._table, (self.env.user.partner_id.id, self.env.user.partner_id.id, self.ids,))
for mid, rmod, rid, author_id, parent_id, partner_id, channel_id in self._cr.fetchall():
message_values[mid] = {
'model': rmod,
'res_id': rid,
'author_id': author_id,
'parent_id': parent_id,
'notified': any((message_values[mid].get('notified'), partner_id, channel_id))
}
else:
self._cr.execute("""SELECT DISTINCT id, model, res_id, author_id, parent_id FROM "%s" WHERE id = ANY (%%s)""" % self._table, (self.ids,))
for mid, rmod, rid, author_id, parent_id in self._cr.fetchall():
message_values[mid] = {'model': rmod, 'res_id': rid, 'author_id': author_id, 'parent_id': parent_id}
# Author condition (READ, WRITE, CREATE (private))
author_ids = []
if operation == 'read' or operation == 'write':
author_ids = [mid for mid, message in message_values.iteritems()
if message.get('author_id') and message.get('author_id') == self.env.user.partner_id.id]
elif operation == 'create':
author_ids = [mid for mid, message in message_values.iteritems()
if not message.get('model') and not message.get('res_id')]
# Parent condition, for create (check for received notifications for the created message parent)
notified_ids = []
if operation == 'create':
# TDE: probably clean me
parent_ids = [message.get('parent_id') for mid, message in message_values.iteritems()
if message.get('parent_id')]
self._cr.execute("""SELECT DISTINCT m.id, partner_rel.res_partner_id, channel_partner.partner_id FROM "%s" m
LEFT JOIN "mail_message_res_partner_rel" partner_rel
ON partner_rel.mail_message_id = m.id AND partner_rel.res_partner_id = (%%s)
LEFT JOIN "mail_message_mail_channel_rel" channel_rel
ON channel_rel.mail_message_id = m.id
LEFT JOIN "mail_channel" channel
ON channel.id = channel_rel.mail_channel_id
LEFT JOIN "mail_channel_partner" channel_partner
ON channel_partner.channel_id = channel.id AND channel_partner.partner_id = (%%s)
WHERE m.id = ANY (%%s)""" % self._table, (self.env.user.partner_id.id, self.env.user.partner_id.id, parent_ids,))
not_parent_ids = [mid[0] for mid in self._cr.fetchall() if any([mid[1], mid[2]])]
notified_ids += [mid for mid, message in message_values.iteritems()
if message.get('parent_id') in not_parent_ids]
# Recipients condition, for read and write (partner_ids) and create (message_follower_ids)
other_ids = set(self.ids).difference(set(author_ids), set(notified_ids))
model_record_ids = _generate_model_record_ids(message_values, other_ids)
if operation in ['read', 'write']:
notified_ids = [mid for mid, message in message_values.iteritems() if message.get('notified')]
elif operation == 'create':
for doc_model, doc_ids in model_record_ids.items():
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', doc_model),
('res_id', 'in', list(doc_ids)),
('partner_id', '=', self.env.user.partner_id.id),
])
fol_mids = [follower.res_id for follower in followers]
notified_ids += [mid for mid, message in message_values.iteritems()
if message.get('model') == doc_model and message.get('res_id') in fol_mids]
# CRUD: Access rights related to the document
other_ids = other_ids.difference(set(notified_ids))
model_record_ids = _generate_model_record_ids(message_values, other_ids)
document_related_ids = []
for model, doc_ids in model_record_ids.items():
DocumentModel = self.env[model]
mids = DocumentModel.browse(doc_ids).exists()
if hasattr(DocumentModel, 'check_mail_message_access'):
DocumentModel.check_mail_message_access(mids.ids, operation) # ?? mids ?
else:
self.env['mail.thread'].check_mail_message_access(mids.ids, operation, model_name=model)
document_related_ids += [mid for mid, message in message_values.iteritems()
if message.get('model') == model and message.get('res_id') in mids.ids]
# Calculate remaining ids: if not void, raise an error
other_ids = other_ids.difference(set(document_related_ids))
if not other_ids:
return
raise AccessError(
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') %
(self._description, operation))
@api.model
def _get_record_name(self, values):
""" Return the related document name, using name_get. It is done using
SUPERUSER_ID, to be sure to have the record name correctly stored. """
model = values.get('model', self.env.context.get('default_model'))
res_id = values.get('res_id', self.env.context.get('default_res_id'))
if not model or not res_id or model not in self.pool:
return False
return self.env[model].sudo().browse(res_id).name_get()[0][1]
@api.model
def _get_reply_to(self, values):
""" Return a specific reply_to: alias of the document through
message_get_reply_to or take the email_from """
model, res_id, email_from = values.get('model', self._context.get('default_model')), values.get('res_id', self._context.get('default_res_id')), values.get('email_from') # ctx values / defualt_get res ?
if model:
# return self.env[model].browse(res_id).message_get_reply_to([res_id], default=email_from)[res_id]
return self.env[model].message_get_reply_to([res_id], default=email_from)[res_id]
else:
# return self.env['mail.thread'].message_get_reply_to(default=email_from)[None]
return self.env['mail.thread'].message_get_reply_to([None], default=email_from)[None]
@api.model
def _get_message_id(self, values):
if values.get('no_auto_thread', False) is True:
message_id = tools.generate_tracking_message_id('reply_to')
elif values.get('res_id') and values.get('model'):
message_id = tools.generate_tracking_message_id('%(res_id)s-%(model)s' % values)
else:
message_id = tools.generate_tracking_message_id('private')
return message_id
@api.model
def create(self, values):
# coming from mail.js that does not have pid in its values
if self.env.context.get('default_starred'):
self = self.with_context({'default_starred_partner_ids': [(4, self.env.user.partner_id.id)]})
if 'email_from' not in values: # needed to compute reply_to
values['email_from'] = self._get_default_from()
if not values.get('message_id'):
values['message_id'] = self._get_message_id(values)
if 'reply_to' not in values:
values['reply_to'] = self._get_reply_to(values)
if 'record_name' not in values and 'default_record_name' not in self.env.context:
values['record_name'] = self._get_record_name(values)
message = super(Message, self).create(values)
message._notify(force_send=self.env.context.get('mail_notify_force_send', True),
user_signature=self.env.context.get('mail_notify_user_signature', True))
return message
@api.multi
def read(self, fields=None, load='_classic_read'):
""" Override to explicitely call check_access_rule, that is not called
by the ORM. It instead directly fetches ir.rules and apply them. """
self.check_access_rule('read')
return super(Message, self).read(fields=fields, load=load)
@api.multi
def unlink(self):
# cascade-delete attachments that are directly attached to the message (should only happen
# for mail.messages that act as parent for a standalone mail.mail record).
self.check_access_rule('unlink')
self.mapped('attachment_ids').filtered(
lambda attach: attach.res_model == self._name and (attach.res_id in self.ids or attach.res_id == 0)
).unlink()
return super(Message, self).unlink()
#------------------------------------------------------
# Messaging API
#------------------------------------------------------
@api.multi
def _notify(self, force_send=False, user_signature=True):
""" Add the related record followers to the destination partner_ids if is not a private message.
Call mail_notification.notify to manage the email sending
"""
group_user = self.env.ref('base.group_user')
# have a sudoed copy to manipulate partners (public can go here with
# website modules like forum / blog / ...
self_sudo = self.sudo()
# TDE CHECK: add partners / channels as arguments to be able to notify a message with / without computation ??
self.ensure_one() # tde: not sure, just for testinh, will see
partners = self.env['res.partner'] | self.partner_ids
channels = self.env['mail.channel'] | self.channel_ids
# all followers of the mail.message document have to be added as partners and notified
# and filter to employees only if the subtype is internal
if self_sudo.subtype_id and self.model and self.res_id:
followers = self.env['mail.followers'].sudo().search([
('res_model', '=', self.model),
('res_id', '=', self.res_id)
]).filtered(lambda fol: self.subtype_id in fol.subtype_ids)
if self_sudo.subtype_id.internal:
followers = followers.filtered(lambda fol: fol.channel_id or (fol.partner_id.user_ids and group_user in fol.partner_id.user_ids[0].mapped('groups_id')))
channels = self_sudo.channel_ids | followers.mapped('channel_id')
partners = self_sudo.partner_ids | followers.mapped('partner_id')
else:
channels = self_sudo.channel_ids
partners = self_sudo.partner_ids
# remove author from notified partners
if not self._context.get('mail_notify_author', False) and self_sudo.author_id:
partners = partners - self_sudo.author_id
# update message
self.write({'channel_ids': [(6, 0, channels.ids)], 'needaction_partner_ids': [(6, 0, partners.ids)]})
# notify partners and channels
partners._notify(self, force_send=force_send, user_signature=user_signature)
channels._notify(self)
# Discard cache, because child / parent allow reading and therefore
# change access rights.
if self.parent_id:
self.parent_id.invalidate_cache()
return True<|fim▁end|> | 'tracking_value_ids': tracking_value_ids, |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import re
from main import sc
__author__ = 'minh'
<|fim▁hole|>
not_allowed_chars = '[\/*?"<>|\s\t]'
numeric_regex = r"\A((\\-)?[0-9]{1,3}(,[0-9]{3})+(\\.[0-9]+)?)|((\\-)?[0-9]*\\.[0-9]+)|((\\-)?[0-9]+)|((\\-)?[0" \
r"-9]*\\.?[0-9]+([eE][-+]?[0-9]+)?)\Z"
@staticmethod
def is_number(example):
matches = re.match(Utils.numeric_regex, example.strip())
if matches and matches.span()[1] == len(example.strip()):
return True
return False
@staticmethod
def clean_examples_numeric(examples):
return sc.parallelize(examples).map(lambda x: float(x) if Utils.is_number(x) else "").filter(
lambda x: x).collect()
@staticmethod
def get_distribution(data):
return sc.parallelize(data).map(lambda word: (word, 1)).reduceByKey(lambda a, b: a + b).sortBy(
lambda x: x).zipWithIndex().flatMap(lambda value, idx: [str(idx)] * int(value/len(data) * 100))
@staticmethod
def get_index_name(index_config):
return "%s!%s" % (index_config['name'], index_config['size'])<|fim▁end|> |
class Utils:
def __init__(self):
pass |
<|file_name|>virtimage.py<|end_file_name|><|fim▁begin|>#
# Copyright 2013 Red Hat, Inc.
# Copyright 2008 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
import virtconv.formats as formats
import virtconv.vmcfg as vmcfg
import virtconv.diskcfg as diskcfg
import virtconv.netdevcfg as netdevcfg
from virtinst import virtimage
from xml.sax.saxutils import escape
import re
import logging
ide_letters = list("abcdefghijklmnopqrstuvwxyz")
pv_boot_template = \
""" <boot type="xen">
<guest>
<arch>%(arch)s</arch>
<features>
<pae/>
</features>
</guest>
<os>
<loader>pygrub</loader>
</os>
%(disks)s
</boot>"""
hvm_boot_template = \
""" <boot type="hvm">
<guest>
<arch>%(arch)s</arch>
</guest>
<os>
<loader dev="hd"/>
</os>
%(disks)s
</boot>"""
image_template = \
"""
"""
def export_os_params(vm):
"""
Export OS-specific parameters.
"""
from virtinst import osdict
os = osdict.lookup_os(vm.os_variant)
def get_os_val(key, default):
val = None
if os:
val = os.to_dict().get(key)
if val is None:
val = default
return val
acpi = ""
if vm.noacpi is False and get_os_val("acpi", True):
acpi = "<acpi />"
apic = ""
if vm.noapic is False and get_os_val("apic", False):
apic = "<apic />"
return acpi, apic
def export_disks(vm):
"""
Export code for the disks. Slightly tricky for two reasons.
We can't handle duplicate disks: some vmx files define SCSI/IDE devices
that point to the same storage, and Xen isn't happy about that. We
just ignore any entries that have duplicate paths.
Since there is no SCSI support in rombios, and the SCSI emulation is
troublesome with Solaris, we forcibly switch the disks to IDE, and expect
the guest OS to cope (which at least Linux does admirably).
Note that we even go beyond hdd: above that work if the domU has PV
drivers.
"""
paths = []
disks = {}
for (bus, instance), disk in sorted(vm.disks.iteritems()):<|fim▁hole|>
if bus == "scsi":
instance = 0
while disks.get(("ide", instance)):
instance += 1
disks[("ide", instance)] = disk
if disk.path:
paths += [disk.path]
diskout = []
storage = []
for (bus, instance), disk in sorted(disks.iteritems()):
# virt-image XML cannot handle an empty CD device
if not disk.path:
continue
path = disk.path
drive_nr = ide_letters[int(instance) % 26]
disk_prefix = "xvd"
if vm.type == vmcfg.VM_TYPE_HVM:
if bus == "ide":
disk_prefix = "hd"
else:
disk_prefix = "sd"
# FIXME: needs updating for later Xen enhancements; need to
# implement capabilities checking for max disks etc.
diskout.append(""" <drive disk="%s" target="%s%s"/>\n""" %
(path, disk_prefix, drive_nr))
typ = "raw"
if disk.format in diskcfg.qemu_formats:
typ = diskcfg.qemu_formats[disk.format]
elif disk.typ == diskcfg.DISK_TYPE_ISO:
typ = "iso"
storage.append(
""" <disk file="%s" use="system" format="%s"/>\n""" %
(path, typ))
return storage, diskout
class virtimage_parser(formats.parser):
"""
Support for virt-install's image format (see virt-image man page).
"""
name = "virt-image"
suffix = ".virt-image.xml"
can_import = True
can_export = True
can_identify = True
@staticmethod
def identify_file(input_file):
"""
Return True if the given file is of this format.
"""
try:
f = file(input_file, "r")
output = f.read()
f.close()
virtimage.parse(output, input_file)
except RuntimeError:
return False
return True
@staticmethod
def import_file(input_file):
"""
Import a configuration file. Raises if the file couldn't be
opened, or parsing otherwise failed.
"""
vm = vmcfg.vm()
try:
f = file(input_file, "r")
output = f.read()
f.close()
logging.debug("Importing virt-image XML:\n%s", output)
config = virtimage.parse(output, input_file)
except Exception, e:
raise ValueError(_("Couldn't import file '%s': %s") %
(input_file, e))
domain = config.domain
boot = domain.boots[0]
if not config.name:
raise ValueError(_("No Name defined in '%s'") % input_file)
vm.name = config.name
vm.arch = boot.arch
vm.memory = int(config.domain.memory / 1024)
if config.descr:
vm.description = config.descr
vm.nr_vcpus = config.domain.vcpu
bus = "ide"
nr_disk = 0
for d in boot.drives:
disk = d.disk
format_mappings = {
virtimage.Disk.FORMAT_RAW: diskcfg.DISK_FORMAT_RAW,
virtimage.Disk.FORMAT_VMDK: diskcfg.DISK_FORMAT_VMDK,
virtimage.Disk.FORMAT_QCOW: diskcfg.DISK_FORMAT_QCOW,
virtimage.Disk.FORMAT_QCOW2: diskcfg.DISK_FORMAT_QCOW2,
virtimage.Disk.FORMAT_VDI: diskcfg.DISK_FORMAT_VDI,
}
fmt = None
if disk.format in format_mappings:
fmt = format_mappings[disk.format]
else:
raise ValueError(_("Unknown disk format '%s'"), disk.format)
devid = (bus, nr_disk)
vm.disks[devid] = diskcfg.disk(bus=bus,
typ=diskcfg.DISK_TYPE_DISK)
vm.disks[devid].format = fmt
vm.disks[devid].path = disk.file
nr_disk = nr_disk + 1
nics = domain.interface
nic_idx = 0
while nic_idx in range(0, nics):
# XXX Eventually need to add support for mac addresses if given
vm.netdevs[nic_idx] = netdevcfg.netdev(
typ=netdevcfg.NETDEV_TYPE_UNKNOWN)
nic_idx = nic_idx + 1
vm.validate()
return vm
@staticmethod
def export(vm):
"""
Export a configuration file as a string.
@vm vm configuration instance
Raises ValueError if configuration is not suitable.
"""
if not vm.memory:
raise ValueError(_("VM must have a memory setting"))
# xend wants the name to match r'^[A-Za-z0-9_\-\.\:\/\+]+$', and
# the schema agrees.
vmname = re.sub(r'[^A-Za-z0-9_\-\.:\/\+]+', '_', vm.name)
# Hmm. Any interface is a good interface?
interface = None
if len(vm.netdevs):
interface = " <interface/>"
acpi, apic = export_os_params(vm)
if vm.type == vmcfg.VM_TYPE_PV:
boot_template = pv_boot_template
else:
boot_template = hvm_boot_template
(storage, disks) = export_disks(vm)
boot_xml = boot_template % {
"disks" : "".join(disks).strip("\n"),
"arch" : vm.arch,
"acpi" : acpi,
"apic" : apic,
}
out = image_template % {
"boot_template": boot_xml,
"name" : vmname,
"description" : escape(vm.description),
"nr_vcpus" : vm.nr_vcpus,
# Mb to Kb
"memory" : int(vm.memory) * 1024,
"interface" : interface,
"storage" : "".join(storage).strip("\n"),
}
return out
formats.register_parser(virtimage_parser)<|fim▁end|> |
if disk.path and disk.path in paths:
continue |
<|file_name|>KafkaLogCompactionConverterTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package wherehows.ingestion.converters;
import com.linkedin.events.metadata.DatasetIdentifier;
import com.linkedin.events.metadata.DeploymentDetail;
import com.linkedin.events.metadata.MetadataChangeEvent;
import java.util.Collections;
import org.testng.annotations.Test;
import static org.testng.Assert.*;
public class KafkaLogCompactionConverterTest {
@Test
public void testConvert() {
MetadataChangeEvent event = new MetadataChangeEvent();
event.datasetIdentifier = new DatasetIdentifier();
event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka";
DeploymentDetail deployment = new DeploymentDetail();
deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "compact");
event.deploymentInfo = Collections.singletonList(deployment);
MetadataChangeEvent newEvent = new KafkaLogCompactionConverter().convert(event);
assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka-lc");
}
@Test
public void testNotConvert() {
KafkaLogCompactionConverter converter = new KafkaLogCompactionConverter();
MetadataChangeEvent event = new MetadataChangeEvent();
event.datasetIdentifier = new DatasetIdentifier();
event.datasetIdentifier.dataPlatformUrn = "foo";
DeploymentDetail deployment = new DeploymentDetail();
deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "compact");
event.deploymentInfo = Collections.singletonList(deployment);
MetadataChangeEvent newEvent = converter.convert(event);
assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "foo");
event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka";
event.deploymentInfo = null;
newEvent = converter.convert(event);
assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka");
event.datasetIdentifier.dataPlatformUrn = "urn:li:dataPlatform:kafka";<|fim▁hole|> assertEquals(newEvent.datasetIdentifier.dataPlatformUrn, "urn:li:dataPlatform:kafka");
}
}<|fim▁end|> | deployment.additionalDeploymentInfo = Collections.singletonMap("EI", "delete");
event.deploymentInfo = Collections.singletonList(deployment);
newEvent = converter.convert(event); |
<|file_name|>test_dn.py<|end_file_name|><|fim▁begin|>import contextlib
import unittest
import pytest
from cryptography import x509
import six
from ipapython.dn import DN, RDN, AVA
if six.PY3:
unicode = str
def cmp(a, b):
if a == b:
assert not a < b
assert not a > b
assert not a != b
assert a <= b
assert a >= b
return 0
elif a < b:
assert not a > b
assert a != b
assert a <= b
assert not a >= b
return -1
else:
assert a > b
assert a != b
assert not a <= b
assert a >= b
return 1
pytestmark = pytest.mark.tier0
def expected_class(klass, component):
if klass is AVA:
if component == 'self':
return AVA
elif klass is RDN:
if component == 'self':
return RDN
elif component == 'AVA':
return AVA
elif klass is DN:
if component == 'self':
return DN
elif component == 'AVA':
return AVA
elif component == 'RDN':
return RDN
raise ValueError("class %s with component '%s' unknown" % (klass.__name__, component))
class TestAVA(unittest.TestCase):
def setUp(self):
self.attr1 = 'cn'
self.value1 = 'Bob'
self.str_ava1 = '%s=%s' % (self.attr1, self.value1)
self.ava1 = AVA(self.attr1, self.value1)
self.attr2 = 'ou'
self.value2 = 'People'
self.str_ava2 = '%s=%s' % (self.attr2, self.value2)
self.ava2 = AVA(self.attr2, self.value2)
self.attr3 = 'c'
self.value3 = 'US'
self.str_ava3 = '%s=%s' % (self.attr3, self.value3)
self.ava3 = AVA(self.attr3, self.value3)
def assertExpectedClass(self, klass, obj, component):
self.assertIs(obj.__class__, expected_class(klass, component))
def test_create(self):
# Create with attr,value pair
ava1 = AVA(self.attr1, self.value1)
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1, self.ava1)
# Create with "attr=value" string
ava1 = AVA(self.str_ava1)
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1, self.ava1)
# Create with tuple (attr, value)
ava1 = AVA((self.attr1, self.value1))
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1, self.ava1)
# Create with list [attr, value]
ava1 = AVA([self.attr1, self.value1])
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1, self.ava1)
# Create with no args should fail
with self.assertRaises(TypeError):
AVA()
# Create with more than 3 args should fail
with self.assertRaises(TypeError):
AVA(self.attr1, self.value1, self.attr1, self.attr1)
# Create with 1 arg which is not string should fail
with self.assertRaises(TypeError):
AVA(1)
# Create with malformed AVA string should fail
with self.assertRaises(ValueError):
AVA("cn")
# Create with non-string parameters, should convert
ava1 = AVA(1, self.value1)
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1.attr, u'1')
ava1 = AVA((1, self.value1))
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1.attr, u'1')
ava1 = AVA(self.attr1, 1)
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1.value, u'1')
ava1 = AVA((self.attr1, 1))
self.assertExpectedClass(AVA, ava1, 'self')
self.assertEqual(ava1.value, u'1')
def test_indexing(self):
ava1 = AVA(self.ava1)
self.assertEqual(ava1[self.attr1], self.value1)
self.assertEqual(ava1[0], self.attr1)
self.assertEqual(ava1[1], self.value1)
with self.assertRaises(KeyError):
ava1['foo'] # pylint: disable=pointless-statement
with self.assertRaises(KeyError):
ava1[3] # pylint: disable=pointless-statement
def test_properties(self):
ava1 = AVA(self.ava1)
self.assertEqual(ava1.attr, self.attr1)
self.assertIsInstance(ava1.attr, unicode)
self.assertEqual(ava1.value, self.value1)
self.assertIsInstance(ava1.value, unicode)
def test_str(self):
ava1 = AVA(self.ava1)
self.assertEqual(str(ava1), self.str_ava1)
self.assertIsInstance(str(ava1), str)
def test_cmp(self):
# Equality
ava1 = AVA(self.attr1, self.value1)
self.assertTrue(ava1 == self.ava1)
self.assertFalse(ava1 != self.ava1)
self.assertTrue(ava1 == self.str_ava1)
self.assertFalse(ava1 != self.str_ava1)
result = cmp(ava1, self.ava1)
self.assertEqual(result, 0)
# Upper case attr should still be equal
ava1 = AVA(self.attr1.upper(), self.value1)
self.assertFalse(ava1.attr == self.attr1)
self.assertTrue(ava1.value == self.value1)
self.assertTrue(ava1 == self.ava1)
self.assertFalse(ava1 != self.ava1)
result = cmp(ava1, self.ava1)
self.assertEqual(result, 0)
# Upper case value should still be equal
ava1 = AVA(self.attr1, self.value1.upper())
self.assertTrue(ava1.attr == self.attr1)
self.assertFalse(ava1.value == self.value1)
self.assertTrue(ava1 == self.ava1)
self.assertFalse(ava1 != self.ava1)
result = cmp(ava1, self.ava1)
self.assertEqual(result, 0)
# Make ava1's attr greater
with self.assertRaises(AttributeError):
ava1.attr = self.attr1 + "1"
ava1 = AVA(self.attr1 + "1", self.value1.upper())
self.assertFalse(ava1 == self.ava1)
self.assertTrue(ava1 != self.ava1)
result = cmp(ava1, self.ava1)
self.assertEqual(result, 1)
result = cmp(self.ava1, ava1)
self.assertEqual(result, -1)
# Reset ava1's attr, should be equal again
with self.assertRaises(AttributeError):
ava1.attr = self.attr1
ava1 = AVA(self.attr1, self.value1.upper())
result = cmp(ava1, self.ava1)
self.assertEqual(result, 0)
# Make ava1's value greater
# attr will be equal, this tests secondary comparision component
with self.assertRaises(AttributeError):
ava1.value = self.value1 + "1"
ava1 = AVA(self.attr1, self.value1 + "1")
result = cmp(ava1, self.ava1)
self.assertEqual(result, 1)
result = cmp(self.ava1, ava1)
self.assertEqual(result, -1)
def test_hashing(self):
# create AVA's that are equal but differ in case
ava1 = AVA((self.attr1.lower(), self.value1.upper()))
ava2 = AVA((self.attr1.upper(), self.value1.lower()))
# AVAs that are equal should hash to the same value.
self.assertEqual(ava1, ava2)
self.assertEqual(hash(ava1), hash(ava2))
# Different AVA objects with the same value should
# map to 1 common key and 1 member in a set. The key and
# member are based on the object's value.
ava1_a = AVA(self.ava1)
ava1_b = AVA(self.ava1)
ava2_a = AVA(self.ava2)
ava2_b = AVA(self.ava2)
ava3_a = AVA(self.ava3)
ava3_b = AVA(self.ava3)
self.assertEqual(ava1_a, ava1_b)
self.assertEqual(ava2_a, ava2_b)
self.assertEqual(ava3_a, ava3_b)
d = dict()
s = set()
d[ava1_a] = str(ava1_a)
d[ava1_b] = str(ava1_b)
d[ava2_a] = str(ava2_a)
d[ava2_b] = str(ava2_b)
s.add(ava1_a)
s.add(ava1_b)
s.add(ava2_a)
s.add(ava2_b)
self.assertEqual(len(d), 2)
self.assertEqual(len(s), 2)
self.assertEqual(sorted(d), sorted([ava1_a, ava2_a]))
self.assertEqual(sorted(s), sorted([ava1_a, ava2_a]))
self.assertTrue(ava1_a in d)
self.assertTrue(ava1_b in d)
self.assertTrue(ava2_a in d)
self.assertTrue(ava2_b in d)
self.assertFalse(ava3_a in d)
self.assertFalse(ava3_b in d)
self.assertTrue(ava1_a in s)
self.assertTrue(ava1_b in s)
self.assertTrue(ava2_a in s)
self.assertTrue(ava2_b in s)
self.assertFalse(ava3_a in s)
self.assertFalse(ava3_b in s)
class TestRDN(unittest.TestCase):
def setUp(self):
# ava1 must sort before ava2
self.attr1 = 'cn'
self.value1 = 'Bob'
self.str_ava1 = '%s=%s' % (self.attr1, self.value1)
self.ava1 = AVA(self.attr1, self.value1)
self.str_rdn1 = '%s=%s' % (self.attr1, self.value1)
self.rdn1 = RDN((self.attr1, self.value1))
self.attr2 = 'ou'
self.value2 = 'people'
self.str_ava2 = '%s=%s' % (self.attr2, self.value2)
self.ava2 = AVA(self.attr2, self.value2)
self.str_rdn2 = '%s=%s' % (self.attr2, self.value2)
self.rdn2 = RDN((self.attr2, self.value2))
self.str_ava3 = '%s=%s+%s=%s' % (self.attr1, self.value1, self.attr2, self.value2)
self.str_rdn3 = '%s=%s+%s=%s' % (self.attr1, self.value1, self.attr2, self.value2)
self.rdn3 = RDN(self.ava1, self.ava2)
def assertExpectedClass(self, klass, obj, component):
self.assertIs(obj.__class__, expected_class(klass, component))
def test_create(self):
# Create with single attr,value pair
rdn1 = RDN((self.attr1, self.value1))
self.assertEqual(len(rdn1), 1)
self.assertEqual(rdn1, self.rdn1)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
self.assertEqual(rdn1[0], self.ava1)
# Create with multiple attr,value pairs
rdn3 = RDN((self.attr1, self.value1), (self.attr2, self.value2))
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
# Create with multiple attr,value pairs passed as lists
rdn3 = RDN([self.attr1, self.value1], [self.attr2, self.value2])
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
# Create with multiple attr,value pairs but reverse
# constructor parameter ordering. RDN canonical ordering
# should remain the same
rdn3 = RDN((self.attr2, self.value2), (self.attr1, self.value1))
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
# Create with single AVA object
rdn1 = RDN(self.ava1)
self.assertEqual(len(rdn1), 1)
self.assertEqual(rdn1, self.rdn1)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
self.assertEqual(rdn1[0], self.ava1)
# Create with multiple AVA objects
rdn3 = RDN(self.ava1, self.ava2)
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
# Create with multiple AVA objects but reverse constructor
# parameter ordering. RDN canonical ordering should remain
# the same
rdn3 = RDN(self.ava2, self.ava1)
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
# Create with single string with 1 AVA
rdn1 = RDN(self.str_rdn1)
self.assertEqual(len(rdn1), 1)
self.assertEqual(rdn1, self.rdn1)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
self.assertEqual(rdn1[0], self.ava1)
# Create with single string with 2 AVA's
rdn3 = RDN(self.str_rdn3)
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[1], self.ava2)
def test_properties(self):
rdn1 = RDN(self.rdn1)
rdn2 = RDN(self.rdn2)
rdn3 = RDN(self.rdn3)
self.assertEqual(rdn1.attr, self.attr1)
self.assertIsInstance(rdn1.attr, unicode)
self.assertEqual(rdn1.value, self.value1)
self.assertIsInstance(rdn1.value, unicode)
self.assertEqual(rdn2.attr, self.attr2)
self.assertIsInstance(rdn2.attr, unicode)
self.assertEqual(rdn2.value, self.value2)
self.assertIsInstance(rdn2.value, unicode)
self.assertEqual(rdn3.attr, self.attr1)
self.assertIsInstance(rdn3.attr, unicode)
self.assertEqual(rdn3.value, self.value1)
self.assertIsInstance(rdn3.value, unicode)
def test_str(self):
rdn1 = RDN(self.rdn1)
rdn2 = RDN(self.rdn2)
rdn3 = RDN(self.rdn3)
self.assertEqual(str(rdn1), self.str_rdn1)
self.assertIsInstance(str(rdn1), str)
self.assertEqual(str(rdn2), self.str_rdn2)
self.assertIsInstance(str(rdn2), str)
self.assertEqual(str(rdn3), self.str_rdn3)
self.assertIsInstance(str(rdn3), str)
def test_cmp(self):
# Equality
rdn1 = RDN((self.attr1, self.value1))
self.assertTrue(rdn1 == self.rdn1)
self.assertFalse(rdn1 != self.rdn1)
self.assertTrue(rdn1 == self.str_rdn1)
self.assertFalse(rdn1 != self.str_rdn1)
result = cmp(rdn1, self.rdn1)
self.assertEqual(result, 0)
# Make rdn1's attr greater
rdn1 = RDN((self.attr1 + "1", self.value1))
self.assertFalse(rdn1 == self.rdn1)
self.assertTrue(rdn1 != self.rdn1)
result = cmp(rdn1, self.rdn1)
self.assertEqual(result, 1)
result = cmp(self.rdn1, rdn1)
self.assertEqual(result, -1)
# Reset rdn1's attr, should be equal again
rdn1 = RDN((self.attr1, self.value1))
result = cmp(rdn1, self.rdn1)
self.assertEqual(result, 0)
# Make rdn1's value greater
# attr will be equal, this tests secondary comparision component
rdn1 = RDN((self.attr1, self.value1 + "1"))
result = cmp(rdn1, self.rdn1)
self.assertEqual(result, 1)
result = cmp(self.rdn1, rdn1)
self.assertEqual(result, -1)
# Make sure rdn's with more ava's are greater
result = cmp(self.rdn1, self.rdn3)
self.assertEqual(result, -1)
result = cmp(self.rdn3, self.rdn1)
self.assertEqual(result, 1)
def test_indexing(self):
rdn1 = RDN(self.rdn1)
rdn2 = RDN(self.rdn2)
rdn3 = RDN(self.rdn3)
self.assertEqual(rdn1[0], self.ava1)
self.assertEqual(rdn1[self.ava1.attr], self.ava1.value)
with self.assertRaises(KeyError):
rdn1['foo'] # pylint: disable=pointless-statement
self.assertEqual(rdn2[0], self.ava2)
self.assertEqual(rdn2[self.ava2.attr], self.ava2.value)
with self.assertRaises(KeyError):
rdn2['foo'] # pylint: disable=pointless-statement
self.assertEqual(rdn3[0], self.ava1)
self.assertEqual(rdn3[self.ava1.attr], self.ava1.value)
self.assertEqual(rdn3[1], self.ava2)
self.assertEqual(rdn3[self.ava2.attr], self.ava2.value)
with self.assertRaises(KeyError):
rdn3['foo'] # pylint: disable=pointless-statement
self.assertEqual(rdn1.attr, self.attr1)
self.assertEqual(rdn1.value, self.value1)
with self.assertRaises(TypeError):
rdn3[1.0] # pylint: disable=pointless-statement
# Slices
self.assertEqual(rdn3[0:1], [self.ava1])
self.assertEqual(rdn3[:], [self.ava1, self.ava2])
def test_assignments(self):
rdn = RDN((self.attr1, self.value1))
with self.assertRaises(TypeError):
# pylint: disable=unsupported-assignment-operation
rdn[0] = self.ava2
def test_iter(self):
rdn1 = RDN(self.rdn1)
rdn2 = RDN(self.rdn2)
rdn3 = RDN(self.rdn3)
self.assertEqual(len(rdn1), 1)
self.assertEqual(rdn1[:], [self.ava1])
for i, ava in enumerate(rdn1):
if i == 0:
self.assertEqual(ava, self.ava1)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(rdn1)))
self.assertEqual(len(rdn2), 1)
self.assertEqual(rdn2[:], [self.ava2])
for i, ava in enumerate(rdn2):
if i == 0:
self.assertEqual(ava, self.ava2)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(rdn2)))
self.assertEqual(len(rdn3), 2)
self.assertEqual(rdn3[:], [self.ava1, self.ava2])
for i, ava in enumerate(rdn3):
if i == 0:
self.assertEqual(ava, self.ava1)
elif i == 1:
self.assertEqual(ava, self.ava2)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(rdn3)))
def test_concat(self):
rdn1 = RDN((self.attr1, self.value1))
rdn2 = RDN((self.attr2, self.value2))
# in-place addtion
rdn1 += rdn2
self.assertEqual(rdn1, self.rdn3)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
rdn1 = RDN((self.attr1, self.value1))
rdn1 += self.ava2
self.assertEqual(rdn1, self.rdn3)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
rdn1 = RDN((self.attr1, self.value1))
rdn1 += self.str_ava2
self.assertEqual(rdn1, self.rdn3)
self.assertExpectedClass(RDN, rdn1, 'self')
for i in range(0, len(rdn1)):
self.assertExpectedClass(RDN, rdn1[i], 'AVA')
# concatenation
rdn1 = RDN((self.attr1, self.value1))
rdn3 = rdn1 + rdn2
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
rdn3 = rdn1 + self.ava2
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
rdn3 = rdn1 + self.str_ava2
self.assertEqual(rdn3, self.rdn3)
self.assertExpectedClass(RDN, rdn3, 'self')
for i in range(0, len(rdn3)):
self.assertExpectedClass(RDN, rdn3[i], 'AVA')
def test_hashing(self):
# create RDN's that are equal but differ in case
rdn1 = RDN((self.attr1.lower(), self.value1.upper()))
rdn2 = RDN((self.attr1.upper(), self.value1.lower()))
# RDNs that are equal should hash to the same value.
self.assertEqual(rdn1, rdn2)
self.assertEqual(hash(rdn1), hash(rdn2))
class TestDN(unittest.TestCase):
def setUp(self):
# ava1 must sort before ava2
self.attr1 = 'cn'
self.value1 = u'Bob'
self.str_ava1 = '%s=%s' % (self.attr1, self.value1)
self.ava1 = AVA(self.attr1, self.value1)
self.str_rdn1 = '%s=%s' % (self.attr1, self.value1)
self.rdn1 = RDN((self.attr1, self.value1))
self.attr2 = 'ou'
self.value2 = u'people'
self.str_ava2 = '%s=%s' % (self.attr2, self.value2)
self.ava2 = AVA(self.attr2, self.value2)
self.str_rdn2 = '%s=%s' % (self.attr2, self.value2)
self.rdn2 = RDN((self.attr2, self.value2))
self.str_dn1 = self.str_rdn1
self.dn1 = DN(self.rdn1)
self.str_dn2 = self.str_rdn2
self.dn2 = DN(self.rdn2)
self.str_dn3 = '%s,%s' % (self.str_rdn1, self.str_rdn2)
self.dn3 = DN(self.rdn1, self.rdn2)
self.base_rdn1 = RDN(('dc', 'redhat'))
self.base_rdn2 = RDN(('dc', 'com'))
self.base_dn = DN(self.base_rdn1, self.base_rdn2)
self.container_rdn1 = RDN(('cn', 'sudorules'))
self.container_rdn2 = RDN(('cn', 'sudo'))
self.container_dn = DN(self.container_rdn1, self.container_rdn2)
self.base_container_dn = DN((self.attr1, self.value1),
self.container_dn, self.base_dn)
self.x500name = x509.Name([
x509.NameAttribute(
x509.NameOID.ORGANIZATIONAL_UNIT_NAME, self.value2),
x509.NameAttribute(x509.NameOID.COMMON_NAME, self.value1),
])
def assertExpectedClass(self, klass, obj, component):
self.assertIs(obj.__class__, expected_class(klass, component))
def test_create(self):
# Create with single attr,value pair
dn1 = DN((self.attr1, self.value1))
self.assertEqual(len(dn1), 1)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[0].attr, unicode)
self.assertIsInstance(dn1[0].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
# Create with single attr,value pair passed as a tuple
dn1 = DN((self.attr1, self.value1))
self.assertEqual(len(dn1), 1)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
# Creation with multiple attr,value string pairs should fail
with self.assertRaises(ValueError):
dn1 = DN(self.attr1, self.value1, self.attr2, self.value2)
# Create with multiple attr,value pairs passed as tuples & lists
dn1 = DN((self.attr1, self.value1), [self.attr2, self.value2])
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[1], self.rdn2)
# Create with multiple attr,value pairs passed as tuple and RDN
dn1 = DN((self.attr1, self.value1), RDN((self.attr2, self.value2)))
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[1], self.rdn2)
# Create with multiple attr,value pairs but reverse
# constructor parameter ordering. RDN ordering should also be
# reversed because DN's are a ordered sequence of RDN's
dn1 = DN((self.attr2, self.value2), (self.attr1, self.value1))
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn2)
self.assertEqual(dn1[1], self.rdn1)
# Create with single RDN object
dn1 = DN(self.rdn1)
self.assertEqual(len(dn1), 1)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
# Create with multiple RDN objects, assure ordering is preserved.
dn1 = DN(self.rdn1, self.rdn2)
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[1], self.rdn2)
# Create with multiple RDN objects in different order, assure
# ordering is preserved.
dn1 = DN(self.rdn2, self.rdn1)
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn2)
self.assertEqual(dn1[1], self.rdn1)
# Create with single string with 1 RDN
dn1 = DN(self.str_rdn1)
self.assertEqual(len(dn1), 1)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
# Create with single string with 2 RDN's
dn1 = DN(self.str_dn3)
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[1], self.rdn2)
# Create with a python-cryptography 'Name'
dn1 = DN(self.x500name)
self.assertEqual(len(dn1), 2)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
self.assertIsInstance(dn1[i].attr, unicode)
self.assertIsInstance(dn1[i].value, unicode)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[1], self.rdn2)
# Create with RDN, and 2 DN's (e.g. attr + container + base)
dn1 = DN((self.attr1, self.value1), self.container_dn, self.base_dn)
self.assertEqual(len(dn1), 5)
dn_str = ','.join([str(self.rdn1),
str(self.container_rdn1), str(self.container_rdn2),
str(self.base_rdn1), str(self.base_rdn2)])
self.assertEqual(str(dn1), dn_str)
def test_str(self):
dn1 = DN(self.dn1)
dn2 = DN(self.dn2)
dn3 = DN(self.dn3)
self.assertEqual(str(dn1), self.str_dn1)
self.assertIsInstance(str(dn1), str)
self.assertEqual(str(dn2), self.str_dn2)
self.assertIsInstance(str(dn2), str)
self.assertEqual(str(dn3), self.str_dn3)
self.assertIsInstance(str(dn3), str)
def test_cmp(self):
# Equality
dn1 = DN((self.attr1, self.value1))
self.assertTrue(dn1 == self.dn1)
self.assertFalse(dn1 != self.dn1)
self.assertTrue(dn1 == self.str_dn1)
self.assertFalse(dn1 != self.str_dn1)
result = cmp(dn1, self.dn1)
self.assertEqual(result, 0)
# Make dn1's attr greater
with self.assertRaises(AttributeError):
dn1[0].attr = self.attr1 + "1"
dn1 = DN((self.attr1 + "1", self.value1))
self.assertFalse(dn1 == self.dn1)
self.assertTrue(dn1 != self.dn1)
result = cmp(dn1, self.dn1)
self.assertEqual(result, 1)
result = cmp(self.dn1, dn1)
self.assertEqual(result, -1)
# Reset dn1's attr, should be equal again
with self.assertRaises(AttributeError):
dn1[0].attr = self.attr1
dn1 = DN((self.attr1, self.value1))
result = cmp(dn1, self.dn1)
self.assertEqual(result, 0)
# Make dn1's value greater
# attr will be equal, this tests secondary comparision component
with self.assertRaises(AttributeError):
dn1[0].value = self.value1 + "1"
dn1 = DN((self.attr1, self.value1 + "1"))
result = cmp(dn1, self.dn1)
self.assertEqual(result, 1)
result = cmp(self.dn1, dn1)
self.assertEqual(result, -1)
# Make sure dn's with more rdn's are greater
result = cmp(self.dn1, self.dn3)
self.assertEqual(result, -1)
result = cmp(self.dn3, self.dn1)
self.assertEqual(result, 1)
# Test startswith, endswith
container_dn = DN(self.container_dn)
base_container_dn = DN(self.base_container_dn)
self.assertTrue(base_container_dn.startswith(self.rdn1))
self.assertTrue(base_container_dn.startswith(self.dn1))
self.assertTrue(base_container_dn.startswith(self.dn1 + container_dn))
self.assertFalse(base_container_dn.startswith(self.dn2))
self.assertFalse(base_container_dn.startswith(self.rdn2))
self.assertTrue(base_container_dn.startswith((self.dn1)))
self.assertTrue(base_container_dn.startswith((self.rdn1)))
self.assertFalse(base_container_dn.startswith((self.rdn2)))
self.assertTrue(base_container_dn.startswith((self.rdn2, self.rdn1)))
self.assertTrue(base_container_dn.startswith((self.dn1, self.dn2)))
self.assertTrue(base_container_dn.endswith(self.base_dn))
self.assertTrue(base_container_dn.endswith(container_dn + self.base_dn))
self.assertFalse(base_container_dn.endswith(DN(self.base_rdn1)))
self.assertTrue(base_container_dn.endswith(DN(self.base_rdn2)))
self.assertTrue(base_container_dn.endswith((DN(self.base_rdn1), DN(self.base_rdn2))))
# Test "in" membership
self.assertTrue(self.container_rdn1 in container_dn)
self.assertTrue(container_dn in container_dn)
self.assertFalse(self.base_rdn1 in container_dn)
self.assertTrue(self.container_rdn1 in base_container_dn)
self.assertTrue(container_dn in base_container_dn)
self.assertTrue(container_dn + self.base_dn in
base_container_dn)
self.assertTrue(self.dn1 + container_dn + self.base_dn in
base_container_dn)
self.assertTrue(self.dn1 + container_dn + self.base_dn ==
base_container_dn)
self.assertFalse(self.container_rdn1 in self.base_dn)
def test_indexing(self):
dn1 = DN(self.dn1)
dn2 = DN(self.dn2)
dn3 = DN(self.dn3)
self.assertEqual(dn1[0], self.rdn1)
self.assertEqual(dn1[self.rdn1.attr], self.rdn1.value)
with self.assertRaises(KeyError):
dn1['foo'] # pylint: disable=pointless-statement
self.assertEqual(dn2[0], self.rdn2)
self.assertEqual(dn2[self.rdn2.attr], self.rdn2.value)
with self.assertRaises(KeyError):
dn2['foo'] # pylint: disable=pointless-statement
self.assertEqual(dn3[0], self.rdn1)
self.assertEqual(dn3[self.rdn1.attr], self.rdn1.value)
self.assertEqual(dn3[1], self.rdn2)
self.assertEqual(dn3[self.rdn2.attr], self.rdn2.value)
with self.assertRaises(KeyError):
dn3['foo'] # pylint: disable=pointless-statement
with self.assertRaises(TypeError):
dn3[1.0] # pylint: disable=pointless-statement
def test_assignments(self):
dn = DN('t=0,t=1,t=2,t=3,t=4,t=5,t=6,t=7,t=8,t=9')
with self.assertRaises(TypeError):
# pylint: disable=unsupported-assignment-operation
dn[0] = RDN('t=a')
with self.assertRaises(TypeError):
# pylint: disable=unsupported-assignment-operation
dn[0:1] = [RDN('t=a'), RDN('t=b')]
def test_iter(self):
dn1 = DN(self.dn1)
dn2 = DN(self.dn2)
dn3 = DN(self.dn3)
self.assertEqual(len(dn1), 1)
self.assertEqual(dn1[:], self.rdn1)
for i, ava in enumerate(dn1):
if i == 0:
self.assertEqual(ava, self.rdn1)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(self.rdn1)))
self.assertEqual(len(dn2), 1)
self.assertEqual(dn2[:], self.rdn2)
for i, ava in enumerate(dn2):
if i == 0:
self.assertEqual(ava, self.rdn2)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(self.rdn2)))
self.assertEqual(len(dn3), 2)
self.assertEqual(dn3[:], DN(self.rdn1, self.rdn2))
for i, ava in enumerate(dn3):
if i == 0:
self.assertEqual(ava, self.rdn1)
elif i == 1:
self.assertEqual(ava, self.rdn2)
else:
self.fail("got iteration index %d, but len=%d" % (i, len(dn3)))
def test_concat(self):
dn1 = DN((self.attr1, self.value1))
dn2 = DN([self.attr2, self.value2])
# in-place addtion
dn1 += dn2
self.assertEqual(dn1, self.dn3)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
dn1 = DN((self.attr1, self.value1))
dn1 += self.rdn2
self.assertEqual(dn1, self.dn3)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
dn1 = DN((self.attr1, self.value1))
dn1 += self.dn2
self.assertEqual(dn1, self.dn3)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
dn1 = DN((self.attr1, self.value1))
dn1 += self.str_dn2
self.assertEqual(dn1, self.dn3)
self.assertExpectedClass(DN, dn1, 'self')
for i in range(0, len(dn1)):
self.assertExpectedClass(DN, dn1[i], 'RDN')
for j in range(0, len(dn1[i])):
self.assertExpectedClass(DN, dn1[i][j], 'AVA')
# concatenation
dn1 = DN((self.attr1, self.value1))
dn3 = dn1 + dn2
self.assertEqual(dn3, self.dn3)
self.assertExpectedClass(DN, dn3, 'self')
for i in range(0, len(dn3)):
self.assertExpectedClass(DN, dn3[i], 'RDN')
for j in range(0, len(dn3[i])):
self.assertExpectedClass(DN, dn3[i][j], 'AVA')
dn1 = DN((self.attr1, self.value1))
dn3 = dn1 + self.rdn2
self.assertEqual(dn3, self.dn3)
self.assertExpectedClass(DN, dn3, 'self')
for i in range(0, len(dn3)):
self.assertExpectedClass(DN, dn3[i], 'RDN')
for j in range(0, len(dn3[i])):
self.assertExpectedClass(DN, dn3[i][j], 'AVA')
dn3 = dn1 + self.str_rdn2
self.assertEqual(dn3, self.dn3)
self.assertExpectedClass(DN, dn3, 'self')
for i in range(0, len(dn3)):
self.assertExpectedClass(DN, dn3[i], 'RDN')
self.assertExpectedClass(DN, dn3[i][0], 'AVA')
dn3 = dn1 + self.str_dn2
self.assertEqual(dn3, self.dn3)
self.assertExpectedClass(DN, dn3, 'self')
self.assertExpectedClass(DN, dn3, 'self')
for i in range(0, len(dn3)):
self.assertExpectedClass(DN, dn3[i], 'RDN')
for j in range(0, len(dn3[i])):
self.assertExpectedClass(DN, dn3[i][j], 'AVA')
dn3 = dn1 + self.dn2
self.assertEqual(dn3, self.dn3)
self.assertExpectedClass(DN, dn3, 'self')
self.assertExpectedClass(DN, dn3, 'self')
for i in range(0, len(dn3)):
self.assertExpectedClass(DN, dn3[i], 'RDN')
for j in range(0, len(dn3[i])):
self.assertExpectedClass(DN, dn3[i][j], 'AVA')
def test_find(self):
# -10 -9 -8 -7 -6 -5 -4 -3 -2 -1
dn = DN('t=0,t=1,cn=bob,t=3,t=4,t=5,cn=bob,t=7,t=8,t=9')
pat = DN('cn=bob')
# forward
self.assertEqual(dn.find(pat), 2)
self.assertEqual(dn.find(pat, 1), 2)
self.assertEqual(dn.find(pat, 1, 3), 2)
self.assertEqual(dn.find(pat, 2, 3), 2)
self.assertEqual(dn.find(pat, 6), 6)
self.assertEqual(dn.find(pat, 7), -1)
self.assertEqual(dn.find(pat, 1, 2), -1)
with self.assertRaises(ValueError):
self.assertEqual(dn.index(pat, 7), -1)
with self.assertRaises(ValueError):
self.assertEqual(dn.index(pat, 1, 2), -1)
# reverse
self.assertEqual(dn.rfind(pat), 6)
self.assertEqual(dn.rfind(pat, -4), 6)
self.assertEqual(dn.rfind(pat, 6), 6)
self.assertEqual(dn.rfind(pat, 6, 8), 6)
self.assertEqual(dn.rfind(pat, 6, 8), 6)
self.assertEqual(dn.rfind(pat, -8), 6)
self.assertEqual(dn.rfind(pat, -8, -4), 6)
self.assertEqual(dn.rfind(pat, -8, -5), 2)
<|fim▁hole|> self.assertEqual(dn.rfind(pat, 7), -1)
self.assertEqual(dn.rfind(pat, -3), -1)
with self.assertRaises(ValueError):
self.assertEqual(dn.rindex(pat, 7), -1)
with self.assertRaises(ValueError):
self.assertEqual(dn.rindex(pat, -3), -1)
def test_replace(self):
# pylint: disable=no-member
dn = DN('t=0,t=1,t=2,t=3,t=4,t=5,t=6,t=7,t=8,t=9')
with self.assertRaises(AttributeError):
dn.replace # pylint: disable=pointless-statement
def test_hashing(self):
# create DN's that are equal but differ in case
dn1 = DN((self.attr1.lower(), self.value1.upper()))
dn2 = DN((self.attr1.upper(), self.value1.lower()))
# DNs that are equal should hash to the same value.
self.assertEqual(dn1, dn2)
# Good, everyone's equal, now verify their hash values
self.assertEqual(hash(dn1), hash(dn2))
# Different DN objects with the same value should
# map to 1 common key and 1 member in a set. The key and
# member are based on the object's value.
dn1_a = DN(self.dn1)
dn1_b = DN(self.dn1)
dn2_a = DN(self.dn2)
dn2_b = DN(self.dn2)
dn3_a = DN(self.dn3)
dn3_b = DN(self.dn3)
self.assertEqual(dn1_a, dn1_b)
self.assertEqual(dn2_a, dn2_b)
self.assertEqual(dn3_a, dn3_b)
d = dict()
s = set()
d[dn1_a] = str(dn1_a)
d[dn1_b] = str(dn1_b)
d[dn2_a] = str(dn2_a)
d[dn2_b] = str(dn2_b)
s.add(dn1_a)
s.add(dn1_b)
s.add(dn2_a)
s.add(dn2_b)
self.assertEqual(len(d), 2)
self.assertEqual(len(s), 2)
self.assertEqual(sorted(d), sorted([dn1_a, dn2_a]))
self.assertEqual(sorted(s), sorted([dn1_a, dn2_a]))
self.assertTrue(dn1_a in d)
self.assertTrue(dn1_b in d)
self.assertTrue(dn2_a in d)
self.assertTrue(dn2_b in d)
self.assertFalse(dn3_a in d)
self.assertFalse(dn3_b in d)
self.assertTrue(dn1_a in s)
self.assertTrue(dn1_b in s)
self.assertTrue(dn2_a in s)
self.assertTrue(dn2_b in s)
self.assertFalse(dn3_a in s)
self.assertFalse(dn3_b in s)
def test_x500_text(self):
# null DN x500 ordering and LDAP ordering are the same
nulldn = DN()
self.assertEqual(nulldn.ldap_text(), nulldn.x500_text())
# reverse a DN with a single RDN
self.assertEqual(self.dn1.ldap_text(), self.dn1.x500_text())
# reverse a DN with 2 RDNs
dn3_x500 = self.dn3.x500_text()
dn3_rev = DN(self.rdn2, self.rdn1)
self.assertEqual(dn3_rev.ldap_text(), dn3_x500)
# reverse a longer DN
longdn_x500 = self.base_container_dn.x500_text()
longdn_rev = DN(longdn_x500)
l = len(self.base_container_dn)
for i in range(l):
self.assertEqual(longdn_rev[i], self.base_container_dn[l-1-i])
class TestEscapes(unittest.TestCase):
def setUp(self):
self.privilege = 'R,W privilege'
self.dn_str_hex_escape = 'cn=R\\2cW privilege,cn=privileges,cn=pbac,dc=idm,dc=lab,dc=bos,dc=redhat,dc=com'
self.dn_str_backslash_escape = 'cn=R\\,W privilege,cn=privileges,cn=pbac,dc=idm,dc=lab,dc=bos,dc=redhat,dc=com'
def test_escape(self):
dn = DN(self.dn_str_hex_escape)
self.assertEqual(dn['cn'], self.privilege)
self.assertEqual(dn[0].value, self.privilege)
dn = DN(self.dn_str_backslash_escape)
self.assertEqual(dn['cn'], self.privilege)
self.assertEqual(dn[0].value, self.privilege)
class TestInternationalization(unittest.TestCase):
def setUp(self):
# Hello in Arabic
self.arabic_hello_utf8 = (b'\xd9\x85\xd9\x83\xd9\x8a\xd9\x84' +
b'\xd8\xb9\x20\xd9\x85\xd8\xa7\xd9' +
b'\x84\xd9\x91\xd8\xb3\xd9\x84\xd8\xa7')
self.arabic_hello_unicode = self.arabic_hello_utf8.decode('utf-8')
def assert_equal_utf8(self, obj, b):
if six.PY2:
self.assertEqual(str(obj), b)
else:
self.assertEqual(str(obj), b.decode('utf-8'))
@contextlib.contextmanager
def fail_py3(self, exception_type):
try:
yield
except exception_type:
if six.PY2:
raise
def test_i18n(self):
self.assertEqual(self.arabic_hello_utf8,
self.arabic_hello_unicode.encode('utf-8'))
# AVA's
# test attr i18n
ava1 = AVA(self.arabic_hello_unicode, 'foo')
self.assertIsInstance(ava1.attr, unicode)
self.assertIsInstance(ava1.value, unicode)
self.assertEqual(ava1.attr, self.arabic_hello_unicode)
self.assert_equal_utf8(ava1, self.arabic_hello_utf8 + b'=foo')
with self.fail_py3(TypeError):
ava1 = AVA(self.arabic_hello_utf8, 'foo')
if six.PY2:
self.assertIsInstance(ava1.attr, unicode)
self.assertIsInstance(ava1.value, unicode)
self.assertEqual(ava1.attr, self.arabic_hello_unicode)
self.assert_equal_utf8(ava1, self.arabic_hello_utf8 + b'=foo')
# test value i18n
ava1 = AVA('cn', self.arabic_hello_unicode)
self.assertIsInstance(ava1.attr, unicode)
self.assertIsInstance(ava1.value, unicode)
self.assertEqual(ava1.value, self.arabic_hello_unicode)
self.assert_equal_utf8(ava1, b'cn=' + self.arabic_hello_utf8)
with self.fail_py3(TypeError):
ava1 = AVA('cn', self.arabic_hello_utf8)
if six.PY2:
self.assertIsInstance(ava1.attr, unicode)
self.assertIsInstance(ava1.value, unicode)
self.assertEqual(ava1.value, self.arabic_hello_unicode)
self.assert_equal_utf8(ava1, b'cn=' + self.arabic_hello_utf8)
# RDN's
# test attr i18n
rdn1 = RDN((self.arabic_hello_unicode, 'foo'))
self.assertIsInstance(rdn1.attr, unicode)
self.assertIsInstance(rdn1.value, unicode)
self.assertEqual(rdn1.attr, self.arabic_hello_unicode)
self.assert_equal_utf8(rdn1, self.arabic_hello_utf8 + b'=foo')
with self.fail_py3(TypeError):
rdn1 = RDN((self.arabic_hello_utf8, 'foo'))
if six.PY2:
self.assertIsInstance(rdn1.attr, unicode)
self.assertIsInstance(rdn1.value, unicode)
self.assertEqual(rdn1.attr, self.arabic_hello_unicode)
self.assertEqual(str(rdn1), self.arabic_hello_utf8 + b'=foo')
# test value i18n
rdn1 = RDN(('cn', self.arabic_hello_unicode))
self.assertIsInstance(rdn1.attr, unicode)
self.assertIsInstance(rdn1.value, unicode)
self.assertEqual(rdn1.value, self.arabic_hello_unicode)
self.assert_equal_utf8(rdn1, b'cn=' + self.arabic_hello_utf8)
with self.fail_py3(TypeError):
rdn1 = RDN(('cn', self.arabic_hello_utf8))
if six.PY2:
self.assertIsInstance(rdn1.attr, unicode)
self.assertIsInstance(rdn1.value, unicode)
self.assertEqual(rdn1.value, self.arabic_hello_unicode)
self.assertEqual(str(rdn1), b'cn=' + self.arabic_hello_utf8)
# DN's
# test attr i18n
dn1 = DN((self.arabic_hello_unicode, 'foo'))
self.assertIsInstance(dn1[0].attr, unicode)
self.assertIsInstance(dn1[0].value, unicode)
self.assertEqual(dn1[0].attr, self.arabic_hello_unicode)
self.assert_equal_utf8(dn1, self.arabic_hello_utf8 + b'=foo')
with self.fail_py3(TypeError):
dn1 = DN((self.arabic_hello_utf8, 'foo'))
if six.PY2:
self.assertIsInstance(dn1[0].attr, unicode)
self.assertIsInstance(dn1[0].value, unicode)
self.assertEqual(dn1[0].attr, self.arabic_hello_unicode)
self.assertEqual(str(dn1), self.arabic_hello_utf8 + b'=foo')
# test value i18n
dn1 = DN(('cn', self.arabic_hello_unicode))
self.assertIsInstance(dn1[0].attr, unicode)
self.assertIsInstance(dn1[0].value, unicode)
self.assertEqual(dn1[0].value, self.arabic_hello_unicode)
self.assert_equal_utf8(dn1, b'cn=' + self.arabic_hello_utf8)
with self.fail_py3(TypeError):
dn1 = DN(('cn', self.arabic_hello_utf8))
if six.PY2:
self.assertIsInstance(dn1[0].attr, unicode)
self.assertIsInstance(dn1[0].value, unicode)
self.assertEqual(dn1[0].value, self.arabic_hello_unicode)
self.assertEqual(str(dn1), b'cn=' + self.arabic_hello_utf8)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>GumbelFit_opt3.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
To be used in conjunction with:
NR099910-004-10006 - Repeated lowering, OrcaFlex Gumbel Script
NR099910-004-10001 - Lifting Analysis Methodology - Probabilistic Approach
===============================================================================================
Version 13
Corrected bug in WriteResults():
Exception: Excel worksheet name 'pennant line Max Effective Tension 3.50m' must be <= 31 chars.
14.03.2016
===============================================================================================
Version 12
Small change: confidence_Level passed as an argument to plotProbability().
rarossi, 29.02.2016
===============================================================================================
Version 11
Performance optimisation in gumbelFit() and summaryDataFrame().
Minor change required in createResultsPanel().
Basically using pandas.stuff by individual indexing is slow.
It is better to move things around in bunches and assigning by slicing.
Test case: > 4x faster
%timeit runfile('GumbelFit.py')
1 loops, best of 3: 20.2 s per loop
%timeit runfile('GumbelFit_opt.py')
1 loops, best of 3: 4.47 s per loop
by rarossi, 05.01.2016
===============================================================================================
Version 10
Changes:
- Both moment estimators and MLE distribution parameters are used, and results for both presented
in Excel files and plot.
- Revised the Gumbel fit plot. Plots can be found in a separate subfolder. The different confidence
levels analysed are shown in the plot, together with estimates
- For minimum samples containing zero values a warning is now given, and a Gumbel fitting is not
performed. Instead, the sample empirical value for the considered confidence level is reported.
- Results files updated.
by rlohne, 12.11.2015
===============================================================================================
Version 9
Changes:
- Major change in format of output spreadsheets:
Statistical results: name identifier replaced by 3 columns: Hs, Tp and WaveDir
Summary of predicted min max: idem as above. Also added one column at the start with the
confidence level and merged all confidence levels tabs into the same sheet. This is to have all
results in the same page. This file also saved as a text file for convenience
- Roll back to allowing white spaces in names, since this is unavoidable due to Orcaflex loads
names, e.g, 'Bend Moment'. The error was caused due to empty column in the end of results file
resultant from a small bug in postCalcActions.py. postCalcActions.py corrected.
- Removal ambiguous flag UseMLE. Only UseMomentEstimators is kept. If set to False then MLE is used
- Add support for Abs variable, fitting then like Max variables.
- Fix identation keeping 4-spaces throughout the code and max line width of 100 characters.
by rarossi, 25.08.2015
===============================================================================================
Version 8
Changes from previous version:
Some cleanup in code
Restructured plotting<|fim▁hole|>names do not have any spaces in them. Use underscores etc.
Some small changes to make it Python 3 compatible. It has also been tested and found
working on Python 2.7.
===============================================================================================
@author: rlohne
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats as ss
import os
# from time import time
def readResultFile(InputFile):
"""Read input file, and return data frame with results, together with number of columns"""
df = pd.read_table(InputFile)
sample = pd.DataFrame(df)
numRowsSample = len(sample)
numColsSample = len(sample.columns)
return sample, numRowsSample, numColsSample
def createResultsPanel(numRows, numCols, seedNo, confidence_Level, sample):
""""Create empty data panel for results (matrix)"""
ind = ['none']*(10+3*len(confidence_Level))
ind[0:9] = 'Hs', 'Tp', 'WaveDir', 'StDev', 'Mean', 'Max', 'Min', 'beta ME', 'mu ME', 'beta MLE'
ind[10] = 'mu MLE'
count = 0
for i in range(len(confidence_Level)):
count = i + 11
a = str(confidence_Level[i])
ind[count] = 'g ME (' + a + ')'
for i in range(len(confidence_Level)):
count = i + 11 + len(confidence_Level)
a = str(confidence_Level[i])
ind[count] = 'g MLE(' + a + ')'
for i in range(len(confidence_Level)):
count = i + 11 + 2*len(confidence_Level)
a = str(confidence_Level[i])
ind[count] = 'sample (' + a + ')'
seaStates = numRows/seedNo
colnames = [_ for _ in range(int(seaStates))]
# Create a panel that holds all data frames
name = ['none']*(len(sample.columns)-3)
for i in range(len(sample.columns)-3):
name[i] = sample.columns[i+3]
results = pd.Panel(items=name, major_axis=colnames, minor_axis=ind, dtype='O')
#
# Sketch to start thinking about converting this Panel into a MultiIndex'd DataFrame
# First try to make the result as similar as possible to the Panel.
# Alternativelly, the index could be replaced by the sea state tuple (Hs, Tp, WaveDir), but
# doing so would mean a lot more work here...
#
mindex = pd.MultiIndex.from_product([name, ind], names=['loads', 'params'])
res_df = pd.DataFrame(np.zeros(shape=(len(colnames), len(mindex))),
index=colnames, columns=mindex)
#
# Convertions:
#
# Using Panel Using DataFrame
# results.major_axis == res_df.index
# mindex = res_df.columns
# results.minor_axis == mindex.levels[1] # !!!sorting order not kept!!! not used
# results.items == mindex.levels[0]
# results.iloc[row, column, :] == res_df.iloc[column][mindex.levels[0][row]]
# results.iloc[row, column]['Hs'] == res_df.iloc[column][mindex.levels[0][row]]['Hs']
return res_df # , colnames
def gumbelFit(confidence_Level, sample, results, seedNo, colnames):
"""Fill in results, Calculate statistics"""
evalv = ['none']*(seedNo)
# Define Euler constant used for Gumbel statistics
gamma = 0.5772 # Euler constant
noCL = len(confidence_Level)
for row in range(len(sample.columns)-3):
c = 0
tmp_data = [0]*(11+3*noCL) # need to update this 14 here !!!
for column in range(len(results.index)):
evalv = sample.iloc[c:c+seedNo, row+3].tolist()
sortEvalv = sorted(evalv)
c = (column+1)*seedNo
tmp_data[0] = sample.iloc[column*seedNo, 0] # Hs
tmp_data[1] = sample.iloc[column*seedNo, 1] # Tp
tmp_data[2] = sample.iloc[column*seedNo, 2] # WaveDir
tmp_data[3] = np.std(evalv)
tmp_data[4] = np.average(evalv)
tmp_data[5] = np.max(evalv)
tmp_data[6] = np.min(evalv)
# Check if column name contains 'Min'.
# If true, sample is assumed to be minima, and left skewed distribution is used
if 'Min' in sample.columns[row+3]:
muMLE, betaMLE = ss.gumbel_l.fit(evalv)
betaMoment = tmp_data[3]*(np.sqrt(6))/np.pi
muMoment = tmp_data[4]+gamma*betaMoment
tmp_data[7] = betaMoment # beta ME
tmp_data[8] = muMoment # mu ME
tmp_data[9] = betaMLE # beta MLE
tmp_data[10] = muMLE # mu MLE
count = 0
for i in range(len(confidence_Level)):
count = i + 11
if 0 not in evalv:
tmp_data[count] = ss.gumbel_l.ppf((1-confidence_Level[i]),
muMoment, betaMoment)
tmp_data[count+noCL] = ss.gumbel_l.ppf((1-confidence_Level[i]),
muMLE, betaMLE)
else:
tmp_data[count] = 'use sample value'
tmp_data[count+noCL] = 'use sample value'
sampleIndex = seedNo-(confidence_Level[i])*seedNo
enoughSeeds = seedNo >= round(1/(1-confidence_Level[i]), 4)
if enoughSeeds:
tmp_data[count+2*noCL] = sortEvalv[int(sampleIndex)-1]
else:
tmp_data[count+2*noCL] = 'need to run more seeds for this confidence level'
elif 'Max' in sample.columns[row+3] or 'Abs' in sample.columns[row+3]:
# Else, sample is maxima or max absolute, right skewed distribution is to be used.
muMLE, betaMLE = ss.gumbel_r.fit(evalv)
betaMoment = tmp_data[3]*(np.sqrt(6))/np.pi
muMoment = tmp_data[4]-gamma*betaMoment
tmp_data[7] = betaMoment # beta ME
tmp_data[8] = muMoment # mu ME
tmp_data[9] = betaMLE # beta MLE
tmp_data[10] = muMLE # mu MLE
count = 0
for i in range(len(confidence_Level)):
count = i + 11
if 0 not in evalv:
tmp_data[count] = ss.gumbel_r.ppf((confidence_Level[i]),
muMoment, betaMoment)
tmp_data[count+noCL] = ss.gumbel_r.ppf((confidence_Level[i]),
muMLE, betaMLE)
else:
tmp_data[count] = 'use sample value'
tmp_data[count+noCL] = 'use sample value'
sampleIndex = confidence_Level[i]*seedNo
enoughSeeds = seedNo >= round(1/(1-confidence_Level[i]), 4)
if enoughSeeds:
tmp_data[count+2*noCL] = sortEvalv[int(sampleIndex)-1]
else:
tmp_data[count+2*noCL] = 'need to run more seeds for this confidence level'
else:
tmp_data[7] = 'Error! Name must contain Max, Min or Abs.'
# finally feed tmp_data into the results dataframe
# this is done for performance, since item assignment by index in pandas
# panels is VERY slow...
results.iloc[column][results.columns.levels[0][row]] = tmp_data
return results
def plotProbability(results, sample, colnames, seedNo, confidence_Level,
Objectplot, PlotWd, PlotHs, PlotT):
""""Make diagnosis plots"""
if not os.path.isdir('Plots'):
os.mkdir('Plots')
evalv = ['none']*(seedNo)
loads_names = results.columns.levels[0]
for row in range(len(loads_names)):
c = 0
for column in range(len(results.items)):
evalv = sample.iloc[c:c+seedNo, row+3].tolist()
sortEvalv = sorted(evalv)
c = (column+1)*seedNo
if (loads_names[row] in Objectplot and sample.iloc[c-seedNo, 2] in PlotWd and
sample.iloc[c-seedNo, 0] in PlotHs and sample.iloc[c-seedNo, 1] in PlotT):
fig = plt.figure(num=None, figsize=(12, 12), dpi=240, facecolor='w', edgecolor='k')
savepng = True
if savepng: figpng = plt.figure(num=None, figsize=(165/25.4, 90/25.4), dpi=96,
facecolor='w', edgecolor='k')
betaME = results.iloc[column][loads_names[row]]['beta ME']
muME = results.iloc[column][loads_names[row]]['mu ME']
betaMLE = results.iloc[column][loads_names[row]]['beta MLE']
muMLE = results.iloc[column][loads_names[row]]['mu MLE']
# First supblot is histogram of observations and pdf of the fitted distribution
ax = fig.add_subplot(211)
n, bins, patches = ax.hist(evalv, 10, histtype='bar',
normed="1", cumulative=False)
plt.setp(patches, 'facecolor', 'g', 'alpha', 0.5)
name = sample.columns[row+3]
ax.set_xlabel(name)
a = min(evalv)-0.05*min(evalv)
b = max(evalv)+0.05*min(evalv)
pdfsample = np.linspace(a, b, 1000)
if 'Min' in sample.columns[row+3]:
yME = ss.gumbel_l.pdf(pdfsample, muME, betaME) # Create Gumbel PDF
yMLE = ss.gumbel_l.pdf(pdfsample, muMLE, betaMLE)
elif 'Max' in sample.columns[row+3] or 'Abs' in sample.columns[row+3]:
yME = ss.gumbel_r.pdf( pdfsample, muME, betaME)
yMLE = ss.gumbel_r.pdf( pdfsample, muMLE, betaMLE)
ax.plot(pdfsample, yME, 'r', pdfsample, yMLE, 'b')
ax.legend(('Gumbel - ME', 'Gumbel - MLE'), bbox_to_anchor=(0.01, 0.99),
loc=2, borderaxespad=0.)
# Second subplot is the Gumbel plot (log log) showing fitted distribution
# as a straight line, and observations as scatter points
ae = fig.add_subplot(212)
if savepng: aepng = figpng.add_subplot(111)
sampleRange = np.array(range(1, seedNo+1))
factor = float(1)/float((seedNo+1))
sampleCDF = np.multiply(sampleRange, factor)
if 'Min' in sample.columns[row+3]:
loglogValueME = [ss.gumbel_l.ppf(1-conf, muME, betaME)
for conf in confidence_Level]
loglogValueMLE = [ss.gumbel_l.ppf(1-conf, muMLE, betaMLE)
for conf in confidence_Level]
a = sorted(evalv)
a.append(loglogValueME[-1])
b = sorted(evalv)
b.append(loglogValueMLE[-1])
loglog_cdfME = -np.log(-ss.gumbel_l.logsf(a, muME, betaME))
loglog_cdfMLE = -np.log(-ss.gumbel_l.logsf(b, muMLE, betaMLE))
ae.scatter(sorted(evalv), -np.log(-np.log(1-sampleCDF)),
marker='*', color='k')
ae.plot(a, loglog_cdfME, 'r')
ae.plot(b, loglog_cdfMLE, 'b')
ae.set_ylabel('Cumulative probability')
ylim = [-np.log(-np.log(1-confidence_Level[0]))-1,
max(-np.log(-np.log(confidence_Level[-1]))+1,
-np.log(-np.log(1-sampleCDF[-1]))+1)]
ae.set_ylim(ylim[0], ylim[1])
loglogConf = [-np.log(-np.log(conf)) for conf in confidence_Level]
xlim = [min(sorted(evalv)[0], min(loglogValueME), min(loglogValueMLE)),
sorted(evalv)[-1]]
ae.set_xlim(xlim[0], xlim[1])
if savepng:
aepng.scatter(sorted(evalv), -np.log(-np.log(1-sampleCDF)),
marker='*', color='k')
aepng.plot(a, loglog_cdfME, 'r')
aepng.plot(b, loglog_cdfMLE, 'b')
aepng.set_ylabel('Cumulative probability')
aepng.set_ylim(ylim[0], ylim[1])
aepng.set_xlim(xlim[0], xlim[1])
for i in range(len(confidence_Level)):
ae.plot([xlim[0], xlim[1]], [loglogConf[i], loglogConf[i]],
'k--', alpha=0.2)
ae.annotate(str(round(confidence_Level[i], 4)), xy=(xlim[1],
loglogConf[i]), xytext=(xlim[1], loglogConf[i]))
ae.plot([loglogValueME[i], loglogValueME[i]], [ylim[0], loglogConf[i]],
'r--')
ae.annotate(str(round(loglogValueME[i], 2)),
xy=(loglogValueME[i], ylim[0]),
xytext=(loglogValueME[i], ylim[0]-2),
arrowprops=dict(arrowstyle="->", color='red'))
ae.plot([loglogValueMLE[i], loglogValueMLE[i]], [ylim[0], loglogConf[i]],
'b--')
ae.annotate(str(round(loglogValueMLE[i], 2)),
xy=(loglogValueMLE[i], ylim[0]),
xytext=(loglogValueMLE[i], ylim[0]-1),
arrowprops=dict(arrowstyle="->", color='blue'))
if savepng:
aepng.plot([xlim[0], xlim[1]], [loglogConf[i], loglogConf[i]], 'k--',
alpha=0.2)
aepng.annotate(str(round(confidence_Level[i], 4)),
xy=(xlim[1], loglogConf[i]),
xytext=(xlim[1], loglogConf[i]))
aepng.plot([loglogValueME[i], loglogValueME[i]],
[ylim[0], loglogConf[i]], 'r--')
aepng.annotate(str(round(loglogValueME[i], 2)),
xy=(loglogValueME[i], ylim[0]),
xytext=(loglogValueME[i], ylim[0]-2),
arrowprops=dict(arrowstyle="->", color='red'))
aepng.plot([loglogValueMLE[i], loglogValueMLE[i]],
[ylim[0], loglogConf[i]], 'b--')
aepng.annotate(str(round(loglogValueMLE[i], 2)),
xy=(loglogValueMLE[i], ylim[0]),
xytext=(loglogValueMLE[i], ylim[0]-1),
arrowprops=dict(arrowstyle="->", color='blue'))
rank = seedNo-(confidence_Level[i])*seedNo
enoughSeeds = seedNo >= round(1/(1-confidence_Level[i]), 4)
if enoughSeeds:
x = sortEvalv[int(rank)-1]
y = -np.log(-np.log(1-sampleCDF[int(rank)-1]))
ae.annotate('p'+str(confidence_Level[i])+' = '+str(round(x, 2)),
xy=(x, y), xytext=(x, y+1.0),
arrowprops=dict(arrowstyle="->", color='black'))
if savepng:
aepng.annotate('p'+str(confidence_Level[i])+' = '+str(round(x, 2)),
xy=(x, y), xytext=(x, y+1.0),
arrowprops=dict(arrowstyle="->", color='black'))
elif 'Max' in sample.columns[row+3] or 'Abs' in sample.columns[row+3]:
loglogValueME = [ss.gumbel_r.ppf(conf, muME, betaME)
for conf in confidence_Level]
loglogValueMLE = [ss.gumbel_r.ppf(conf, muMLE, betaMLE)
for conf in confidence_Level]
a = sorted(evalv)
a.append(loglogValueME[-1])
b = sorted(evalv)
b.append(loglogValueMLE[-1])
loglog_cdfME = -np.log(-ss.gumbel_r.logcdf(a, muME, betaME))
loglog_cdfMLE = -np.log(-ss.gumbel_r.logcdf(b, muMLE, betaMLE))
ae.scatter(sorted(evalv), -np.log(-np.log(sampleCDF)), marker='*', color='k')
ae.plot(a, loglog_cdfME, 'r')
ae.plot(b, loglog_cdfMLE, 'b')
ae.set_ylabel('Cumulative probability')
ylim = [-np.log(-np.log(1-confidence_Level[0]))-1,
max(-np.log(-np.log(confidence_Level[-1]))+1,
-np.log(-np.log(1-sampleCDF[-1]))+1)]
ae.set_ylim(ylim[0], ylim[1])
loglogConf = [-np.log(-np.log(conf)) for conf in confidence_Level]
xlim = [sorted(evalv)[0], max(sorted(evalv)[-1], max(loglogValueME),
max(loglogValueMLE))]
ae.set_xlim(xlim[0], xlim[1])
if savepng:
aepng.scatter(sorted(evalv), -np.log(-np.log(sampleCDF)),
marker='*', color='k')
aepng.plot(a, loglog_cdfME, 'r')
aepng.plot(b, loglog_cdfMLE, 'b')
aepng.set_ylabel('Cumulative probability')
aepng.set_ylim(ylim[0], ylim[1])
aepng.set_xlim(xlim[0], xlim[1])
for i in range(len(confidence_Level)):
ae.plot([xlim[0], xlim[1]], [loglogConf[i], loglogConf[i]],
'k--', alpha=0.2)
ae.annotate(str(round(confidence_Level[i], 4)),
xy=(xlim[1], loglogConf[i]),
xytext=(xlim[1], loglogConf[i]))
ae.plot([loglogValueME[i], loglogValueME[i]],
[ylim[0], loglogConf[i]], 'r--')
ae.annotate(str(round(loglogValueME[i], 2)),
xy=(loglogValueME[i], ylim[0]),
xytext=(loglogValueME[i], ylim[0]-2),
arrowprops=dict(arrowstyle="->", color='red'))
ae.plot([loglogValueMLE[i], loglogValueMLE[i]], [-2, loglogConf[i]], 'b--')
ae.annotate(str(round(loglogValueMLE[i], 2)),
xy=(loglogValueMLE[i], ylim[0]),
xytext=(loglogValueMLE[i], ylim[0]-1),
arrowprops=dict(arrowstyle="->", color='blue'))
if savepng:
aepng.plot([xlim[0], xlim[1]], [loglogConf[i], loglogConf[i]],
'k--', alpha=0.2)
aepng.annotate(str(round(confidence_Level[i], 4)),
xy=(xlim[1], loglogConf[i]),
xytext=(xlim[1], loglogConf[i]))
aepng.plot([loglogValueME[i], loglogValueME[i]],
[ylim[0], loglogConf[i]], 'r--')
aepng.annotate(str(round(loglogValueME[i], 2)),
xy=(loglogValueME[i], ylim[0]),
xytext=(loglogValueME[i], ylim[0]-2),
arrowprops=dict(arrowstyle="->", color='red'))
aepng.plot([loglogValueMLE[i], loglogValueMLE[i]],
[-2, loglogConf[i]], 'b--')
aepng.annotate(str(round(loglogValueMLE[i], 2)),
xy=(loglogValueMLE[i], ylim[0]),
xytext=(loglogValueMLE[i], ylim[0]-1),
arrowprops=dict(arrowstyle="->", color='blue'))
rank = confidence_Level[i]*seedNo
enoughSeeds = seedNo >= round(1/(1-confidence_Level[i]), 4)
if enoughSeeds:
x = sortEvalv[int(rank)-1]
y = -np.log(-np.log(sampleCDF[int(rank)-1]))
ae.annotate('p'+str(confidence_Level[i])+' = '+str(round(x, 2)),
xy=(x, y), xytext=(x, y-1.0),
arrowprops=dict(arrowstyle="->", color='black'))
if savepng:
aepng.annotate('p'+str(confidence_Level[i])+' = '+str(round(x, 2)),
xy=(x, y), xytext=(x, y-1.0),
arrowprops=dict(arrowstyle="->", color='black'))
name = '%s Hs %.2f Tp %d wdir %d' % (sample.columns[row+3],
sample.iloc[c-seedNo, 0],
sample.iloc[c-seedNo, 1],
sample.iloc[c-seedNo, 2])
fig.tight_layout(pad=0, w_pad=0, h_pad=0)
if savepng: figpng.tight_layout(pad=0, w_pad=0, h_pad=0)
os.chdir('Plots')
fig.savefig('Gumbel-plot '+name+'.pdf', bbox_inches='tight')
plt.close(fig)
if savepng:
figpng.savefig('Gumbel-plot '+name+'.png', bbox_inches='tight')
plt.close(figpng)
os.chdir('..')
def summaryDataFrame(results, confidence_Level):
""""Create summary data frame containing Gumbel estimates"""
# Swap params and loads at the columns' hierarchy
res_swap = results.swaplevel(i=0, j=1, axis=1)
# create ME, MLE and sample dataFrames and add to a summary_resultsel
index = ['Confidence level', 'Hs', 'Tp', 'WaveDir']
# python >= 3.5 - cool!:
# for k, nm in enumerate([['ME', *['g ME (%s)' % str(c) for c in confidence_Level]],
# ['MLE', *['g MLE(%s)' % str(c) for c in confidence_Level]],
# ['sample', *['sample (%s)' % str(c) for c in confidence_Level]]]):
# python <= 3.4 - bleh!:
for k, nm in enumerate([
[item for sublist in
[['ME'], ['g ME (%s)' % str(c) for c in confidence_Level]]
for item in sublist],
[item for sublist in
[['MLE'], ['g MLE(%s)' % str(c) for c in confidence_Level]]
for item in sublist],
[item for sublist in
[['sample'], ['sample (%s)' % str(c) for c in confidence_Level]]
for item in sublist]
]):
for i, c in enumerate(confidence_Level):
df = res_swap[nm[i+1]] # ############## STOPPED HERE #####
df['Confidence level'] = [c]*len(df)
df['Hs'] = results.iloc[0, :, 0]
df['Tp'] = results.iloc[0, :, 1]
df['WaveDir'] = results.iloc[0, :, 2]
if i == 0: # In the 1st i-iteraction
df0 = df.set_index(index).reset_index() # create a DataFrame with the
else: # 1st conf. level. Then update
df0 = df0.append(df.set_index(index).reset_index()) # this df in the next iters.
if k == 0: # In the first k-iteraction, create a Panel with
summary = pd.Panel({nm[0]: df0}) # the 'ME' DataFrame. Then in the next iteractions,
else: # update this panel with the 'MLE' and finally
summary[nm[0]] = df0 # with the 'sample' DataFrames.
summary['ME and sample'] = summary['ME'] # Add to panel the 'XX and sample' DataFrames,
summary['MLE and sample'] = summary['MLE'] # starting with a bare copy of the existing DFs,
for method in ['ME', 'MLE']: # and then replacing occurrences of
for varname in results.items: # 'use sample value' by the sample value.
idx = summary[method][varname] == 'use sample value'
summary[method+' and sample'][varname][idx] = summary['sample'][varname][idx]
return summary
def writeResults(results, summaryResults, StatResultsFile, SummaryFile, seaStates):
"""Write results to Excel and text file"""
# results.to_excel(StatResultsFile, index=False)
# excel tab name max length is 31 characters
results_short = results.rename(items=lambda s: s[:31])
results_short.to_excel(StatResultsFile, index=False)
summaryResults.to_excel(SummaryFile, index=False)
return None
def main(InputFile, confidence_Level, seedNo, StatResultsFile, SummaryFile, Plot, Objectplot,
PlotWd, PlotHs, PlotT):
""""========================MAIN=============================="""
# #t00 = time()
# Read result file
# #print('Reading input')
sample, numRowsSample, numColsSample = readResultFile(InputFile)
# Create panel for all results
# #t0 = time()
results, colnames = createResultsPanel(numRowsSample, numColsSample, seedNo,
confidence_Level, sample)
# #t_cr = time() - t0
# Do Gumbel fit
# #print('Gumbel fit')
# #t0 = time()
results = gumbelFit(confidence_Level, sample, results, seedNo, colnames)
# #t_gf = time()-t0
# Creates a summary file giving predicted max/min for each load case and object analysed
# #print('Summarising and writing results to file')
# #t0 = time()
SummaryResults = summaryDataFrame(results, confidence_Level)
# #t_sm = time()-t0
# Creates a result file giving all statistical results for Gumbel fit
seaStates = int(numRowsSample/seedNo)
# #t0 = time()
writeResults(results, SummaryResults, StatResultsFile, SummaryFile, seaStates)
# # t_wr = time()-t0
# Plot if required
if Plot:
print('Plotting')
plotProbability(results, sample, colnames, seedNo, confidence_Level,
Objectplot, PlotWd, PlotHs, PlotT)
# #print('Done')
# #ttot = time()-t00
# #print('tot\tgumbel\tsummary\twrite\tcreate')
# #print('%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t' % (ttot, t_gf, t_sm, t_wr, t_cr))
return results, SummaryResults
if __name__ == '__main__':
# --------------------USER INPUT----------------------------#
# Define number of seeds run for each load case
seedNo = 10
# Define confidence level
confidence_Level = [0.9, 0.99]
# Define if you want histogram and corresponding Gumbel fit plot for each load case
# True = yes, False = no
Plot = True
# Also, specify which object you want plots for (Fill in object name (column header from
# Input file)), and for which sea states you want plotting for
Objectplot = ['CraneWire Max Tension', 'CraneWire Min Tension',
'sling1 Max Tension', 'sling1 Min Tension',
'sling2 Max Tension', 'sling2 Min Tension',
'sling3 Max Tension', 'sling3 Min Tension',
'sling4 Max Tension', 'sling4 Min Tension']
PlotWd = [165, 180, 195]
PlotHs = [2.3, 3.5]
PlotT = [7, 8, 14]
# Specify input file that contains data
InputFile = 'Results.txt'
# Specify file name for summary results
SummaryFile = 'Summary of predicted max_min_opt.xlsx'
# Specify file name for statistical results
StatResultsFile = 'Statistical results_opt.xlsx'
# #-----------------END USER INPUT------------------------#
Results, SummaryResults = main(InputFile, confidence_Level, seedNo, StatResultsFile,
SummaryFile, Plot, Objectplot, PlotWd, PlotHs, PlotT)<|fim▁end|> | Changed the way result file is read. Previous version required that the Result txt-file be opened
and saved using Excel, as an error occured if not. Now this is fixed, but requires that object |
<|file_name|>dom_html_head_element.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files.git)
// DO NOT EDIT
use crate::DOMElement;
use crate::DOMEventTarget;
use crate::DOMHTMLElement;
use crate::DOMNode;
use crate::DOMObject;
use glib::object::Cast;
use glib::object::IsA;
use glib::signal::connect_raw;
use glib::signal::SignalHandlerId;
use glib::translate::*;
use std::boxed::Box as Box_;
use std::fmt;
use std::mem::transmute;
glib::wrapper! {
pub struct DOMHTMLHeadElement(Object<ffi::WebKitDOMHTMLHeadElement, ffi::WebKitDOMHTMLHeadElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget;
match fn {
type_ => || ffi::webkit_dom_html_head_element_get_type(),
}
}
pub const NONE_DOMHTML_HEAD_ELEMENT: Option<&DOMHTMLHeadElement> = None;
pub trait DOMHTMLHeadElementExt: 'static {
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_head_element_get_profile")]
fn profile(&self) -> Option<glib::GString>;
#[cfg_attr(feature = "v2_22", deprecated)]
#[doc(alias = "webkit_dom_html_head_element_set_profile")]
fn set_profile(&self, value: &str);
fn connect_property_profile_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<DOMHTMLHeadElement>> DOMHTMLHeadElementExt for O {
fn profile(&self) -> Option<glib::GString> {
unsafe {
from_glib_full(ffi::webkit_dom_html_head_element_get_profile(
self.as_ref().to_glib_none().0,
))
}
}
fn set_profile(&self, value: &str) {
unsafe {
ffi::webkit_dom_html_head_element_set_profile(
self.as_ref().to_glib_none().0,
value.to_glib_none().0,
);
}
}
fn connect_property_profile_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe extern "C" fn notify_profile_trampoline<P, F: Fn(&P) + 'static>(
this: *mut ffi::WebKitDOMHTMLHeadElement,
_param_spec: glib::ffi::gpointer,
f: glib::ffi::gpointer,
) where
P: IsA<DOMHTMLHeadElement>,
{
let f: &F = &*(f as *const F);
f(&DOMHTMLHeadElement::from_glib_borrow(this).unsafe_cast_ref())
}
unsafe {
let f: Box_<F> = Box_::new(f);
connect_raw(
self.as_ptr() as *mut _,
b"notify::profile\0".as_ptr() as *const _,
Some(transmute::<_, unsafe extern "C" fn()>(
notify_profile_trampoline::<Self, F> as *const (),
)),
Box_::into_raw(f),
)
}
}<|fim▁hole|>impl fmt::Display for DOMHTMLHeadElement {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str("DOMHTMLHeadElement")
}
}<|fim▁end|> | }
|
<|file_name|>test_gpu.py<|end_file_name|><|fim▁begin|>import unittest
import slowboy.gpu
import slowboy.interrupts
from tests.mock_interrupt_controller import MockInterruptController
STAT_IE_ALL_MASK = (slowboy.gpu.STAT_LYC_IE_MASK |
slowboy.gpu.STAT_OAM_IE_MASK |
slowboy.gpu.STAT_HBLANK_IE_MASK |
slowboy.gpu.STAT_VBLANK_IE_MASK)
class TestGPU(unittest.TestCase):
def setUp(self):
self.gpu = slowboy.gpu.GPU()
self.interrupt_controller = MockInterruptController()
def test_constructor(self):
self.assertEqual(len(self.gpu.vram), 0x2000)
self.assertEqual(len(self.gpu.oam), 0xa0)
self.assertEqual(self.gpu.lcdc, 0x91)
self.assertEqual(self.gpu.scy, 0x00)
self.assertEqual(self.gpu.scx, 0x00)
self.assertEqual(self.gpu.ly, 0x00)
self.assertEqual(self.gpu.lyc, 0x00)
self.assertEqual(self.gpu.bgp, 0xfc)
self.assertEqual(self.gpu.obp0, 0xff)
self.assertEqual(self.gpu.obp1, 0xff)
self.assertEqual(self.gpu.wy, 0x00)
self.assertEqual(self.gpu.wx, 0x00)
# LYC=LY, Mode.OAM_READ
self.assertEqual(self.gpu.stat, 0x04 | 0x02)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_READ)
self.assertEqual(self.gpu.mode_clock, 0)
def test_mode(self):
# Force ClockListener.notify and verify mode state transitions
for i in range(144):
# OAM_READ (2)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_READ)
self.assertEqual(self.gpu.mode_clock, 0)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_READ.value)
# OAM_VRAM_READ (3)
self.gpu.notify(0, 80)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.OAM_VRAM_READ)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_VRAM_READ.value)
self.assertEqual(self.gpu.mode_clock, 0)
# HBLANK (0)
self.gpu.notify(0, 172)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.H_BLANK)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.H_BLANK.value)
self.assertEqual(self.gpu.mode_clock, 0)
self.gpu.notify(0, 204)
# VBLANK (1)
self.assertEqual(self.gpu.mode, slowboy.gpu.Mode.V_BLANK)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.V_BLANK.value)
self.assertEqual(self.gpu.mode_clock, 0)
def test_stat_mode(self):
# Initial mode is OAM_READ
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_READ.value)
<|fim▁hole|>
self.gpu.mode = slowboy.gpu.Mode.H_BLANK
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.H_BLANK.value)
self.gpu.mode = slowboy.gpu.Mode.V_BLANK
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.V_BLANK.value)
def test_stat_oam_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_OAM_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_OAM_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.OAM_READ
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_lyc_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_LYC_IE_MASK
self.gpu.ly = self.gpu.lyc
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_hblank_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_HBLANK_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_HBLANK_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.H_BLANK
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test_stat_vblank_interrupt(self):
self.gpu.load_interrupt_controller(self.interrupt_controller)
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_VBLANK_IE_MASK, 0)
self.gpu.stat |= slowboy.gpu.STAT_VBLANK_IE_MASK
self.gpu.mode = slowboy.gpu.Mode.V_BLANK
self.assertEqual(self.interrupt_controller.last_interrupt,
slowboy.interrupts.InterruptType.stat)
def test__update_vram(self):
# TODO
self.fail('Not implemented: test__update_vram')
def test_colorto8bit(self):
self.assertRaises(ValueError, slowboy.gpu.colorto8bit, 4)
self.assertEqual(slowboy.gpu.colorto8bit(0), 0xff)
self.assertEqual(slowboy.gpu.colorto8bit(1), 0xaa)
self.assertEqual(slowboy.gpu.colorto8bit(2), 0x55)
self.assertEqual(slowboy.gpu.colorto8bit(3), 0x00)
def test_bgp(self):
# 11 11 11 00
self.assertEqual(self.gpu.bgp, 0xfc)
self.assertEqual(self.gpu._palette, [0xff, 0x00, 0x00, 0x00])
# 00 01 10 11
self.gpu.bgp = 0x1b
self.assertEqual(self.gpu.bgp, 0x1b)
self.assertEqual(self.gpu._palette, [0x00, 0x55, 0xaa, 0xff])
def test_obp(self):
self.assertEqual(self.gpu.obp0, 0xff)
self.assertEqual(self.gpu._sprite_palette0, [0xff, 0x00, 0x00, 0x00])
self.assertEqual(self.gpu.obp1, 0xff)
self.assertEqual(self.gpu._sprite_palette1, [0xff, 0x00, 0x00, 0x00])
# 00 01 10 11
self.gpu.obp0 = 0x1b
self.assertEqual(self.gpu.obp0, 0x1b)
self.assertEqual(self.gpu._sprite_palette0, [0xff, 0x55, 0xaa, 0xff])
# 11 10 01 00
self.gpu.obp1 = 0xe4
self.assertEqual(self.gpu.obp1, 0xe4)
self.assertEqual(self.gpu._sprite_palette1, [0xff, 0xaa, 0x55, 0x00])
def test_ly_lyc(self):
self.assertEqual(self.gpu.ly, 0)
# Changing LYC so that LYC != LY should clear STAT LYC flag
self.gpu.lyc = 5
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK, 0)
# Make LY = LYC -- STAT LYC flag should be set
self.gpu.ly = 5
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK,
slowboy.gpu.STAT_LYC_FLAG_MASK)
# Changing LY so that LYC != LY should *also* clear STAT LYC flag
self.gpu.ly = 6
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK, 0)
# Make LYC = LY -- should also set STAT LYC flag
self.gpu.lyc = 6
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_LYC_FLAG_MASK,
slowboy.gpu.STAT_LYC_FLAG_MASK)
def test_wx_wy(self):
self.assertEqual(self.gpu.wx, 0)
self.assertEqual(self.gpu.wy, 0)
self.gpu.wx = 7
self.assertEqual(self.gpu._wx, 0)
self.gpu.wy = 0
self.assertEqual(self.gpu._wy, 0)<|fim▁end|> | self.gpu.mode = slowboy.gpu.Mode.OAM_VRAM_READ
self.assertEqual(self.gpu.stat & slowboy.gpu.STAT_MODE_MASK,
slowboy.gpu.Mode.OAM_VRAM_READ.value) |
<|file_name|>revertable.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2005 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Actions that can be reverted, i.e. undone and redone."""
import aeidon
__all__ = ("RevertableAction", "RevertableActionGroup",)
class RevertableAction:
"""
Action that can be reverted, i.e. undone and redone.
:ivar description: Short one line description
:ivar docs: Sequence of :attr:`aeidon.documents` items affected
:ivar register: :attr:`aeidon.registers` item for action taken
:ivar revert_args: Arguments passed to the revert method
:ivar revert_function: Method called to revert this action
:ivar revert_kwargs: Keyword arguments passed to the revert method
"""
def __init__(self, **kwargs):
"""
Initialize a :class:`RevertableAction` instance.
`kwargs` can contain any of the names of public instance variables,
of which :attr:`description`, :attr:`docs`, :attr:`register` and
:attr:`revert_function` are required to be set eventually, either with
`kwargs` or direct assignment later.
"""
self.description = None
self.docs = None
self.register = None
self.revert_args = ()
self.revert_function = None
self.revert_kwargs = {}
for key, value in kwargs.items():
setattr(self, key, value)
def _get_reversion_register(self):
"""Return the :attr:`aeidon.registers` item for reversion."""
if self.register.shift == 1:
return aeidon.registers.UNDO
if self.register.shift == -1:
return aeidon.registers.REDO
raise ValueError("Invalid register: {!r}"
.format(self.register))
def revert(self):
"""Call the reversion function."""
kwargs = self.revert_kwargs.copy()
kwargs["register"] = self._get_reversion_register()
return self.revert_function(*self.revert_args, **kwargs)<|fim▁hole|>
class RevertableActionGroup:
"""
Group of :class:`RevertableAction`.
:ivar actions: Sequence of :class:`RevertableAction` in group
:ivar description: Short one line description
"""
def __init__(self, **kwargs):
"""
Initialize a :class:`RevertableAction` instance.
`kwargs` can contain any of the names of public instance variables,
of which :attr:`actions` and :attr:`description` are required to be
set eventually, either with `kwargs` or direct assignment later.
"""
self.actions = None
self.description = None
for key, value in kwargs.items():
setattr(self, key, value)<|fim▁end|> | |
<|file_name|>SessionOperationExecutor.ts<|end_file_name|><|fim▁begin|>import { OperationExecutor } from "./OperationExecutor";
import { InMemoryDocumentSessionOperations } from "../Session/InMemoryDocumentSessionOperations";
import { DocumentStoreBase } from "../DocumentStoreBase";
import * as deprecate from "deprecate";
import { throwError } from "../../Exceptions";
/**
* For internal session use only
*/
export class SessionOperationExecutor extends OperationExecutor {
private readonly _session: InMemoryDocumentSessionOperations;
/**
* This constructor should not be used
*/
public constructor(store: DocumentStoreBase);
public constructor(store: DocumentStoreBase, databaseName: string);
public constructor(session: InMemoryDocumentSessionOperations);
public constructor(sessionOrStore: DocumentStoreBase | InMemoryDocumentSessionOperations, databaseName?: string) {
if (arguments.length === 1 && sessionOrStore instanceof DocumentStoreBase) {
super(sessionOrStore);
deprecate("Passing document store only to SessionOperationExecutor ctor is deprecated.");
this._session = null;
} else if (arguments.length === 1 && sessionOrStore instanceof InMemoryDocumentSessionOperations) {
super(sessionOrStore.documentStore, sessionOrStore.databaseName);
this._session = sessionOrStore;
} else {
super(sessionOrStore as DocumentStoreBase, databaseName);
this._session = null;
}
<|fim▁hole|> }
public forDatabase(databaseName: string): never {
return throwError("InvalidOperationException", "The method is not supported.");
}
}<|fim▁end|> | |
<|file_name|>ScrollableLabelLarge.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
ORCA Open Remote Control Application
Copyright (C) 2013-2020 Carsten Thielepape
Please contact me by : http://www.orca-remote.org/
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from kivy.lang import Builder
from kivy.uix.recycleview import RecycleView
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.widget import Widget
from kivy.metrics import dp
from kivy.uix.label import Label
from kivy.properties import StringProperty
from kivy.properties import Property
from kivy.properties import BoundedNumericProperty
from kivy.properties import NumericProperty
from kivy.properties import AliasProperty
# noinspection PyProtectedMember
from kivy.properties import dpi2px
from kivy.graphics.opengl import GL_MAX_TEXTURE_SIZE
from ORCA.widgets.core.Label import cLabel
from ORCA.widgets.core.TouchRectangle import cTouchRectangle
from ORCA.utils.TypeConvert import ToUnicode
from ORCA.utils.TypeConvert import ToHex
from ORCA.utils.RemoveNoClassArgs import RemoveNoClassArgs
__all__ = ['cScrollableLabelLarge']
Builder.load_string('''
<cScrollableLabelLargeInner>:
RecycleBoxLayout:
default_size_hint: 1, None
size_hint: None,None
height: self.minimum_height
''')
# noinspection PyUnusedLocal
class cScrollableLabelLarge(Widget):
""" Main Widget to display a large text
By default, x and y scrolling is enabled
Horizontal scrolling can be disabled by passing
noxscroll = False
Supports background color for the Label
As implementation, it is a Widget which contains a Background (if color is given)
and a customized RecycleView
"""
text = StringProperty('')
#font_size = Property('20sp')
def __init__(self, **kwargs):
kwargsInner={}
for k in kwargs:
if k not in ["size_hint","size","pos","pos_hint"]:
kwargsInner[k]=kwargs[k]
self.oScrollableLabelLargeInner=cScrollableLabelLargeInner(**kwargsInner)
super(self.__class__, self).__init__(**RemoveNoClassArgs(dInArgs=kwargs,oObject=Widget))
self.oBackGround = None
if "background_color" in kwargs:
self.oBackGround=cTouchRectangle(size=self.size,pos=self.pos, background_color=kwargs["background_color"])
self.add_widget(self.oBackGround)
del kwargs["background_color"]
self.oScrollableLabelLargeInner.size = self.size
self.oScrollableLabelLargeInner.pos = self.pos
self.add_widget(self.oScrollableLabelLargeInner)
self.bind(pos=self.update_graphics_pos,size=self.update_graphics_size)
def update_graphics_pos(self, instance, value):
""" Updates the child widget position (Backgrund and Recycleview) """
if self.oBackGround is not None:
<|fim▁hole|> self.oScrollableLabelLargeInner.pos = value
def update_graphics_size(self, instance, value):
""" Updates the child widget size (Backgrund and Recycleview) """
if self.oBackGround is not None:
self.oBackGround.size = value
self.oScrollableLabelLargeInner.size = value
def IncreaseFontSize(self,*args):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.IncreaseFontSize(args)
def DecreaseFontSize(self,*args):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.DecreaseFontSize(args)
def on_text(self, instance, value):
""" Pass through function for the Recycleview """
self.oScrollableLabelLargeInner.text=value
def on_oOrcaWidget(self, instance, value):
""" Passes the OrcaWidget to the Childs """
if self.oBackGround is not None:
self.oBackGround.oOrcaWidget=value
self.oScrollableLabelLargeInner.oOrcaWidget=value
def _get_font_size(self):
"""Returns the Font Size """
return self.oScrollableLabelLargeInner.fFontSize
def _set_font_size(self, value):
"""Passes the change of font size """
self.oScrollableLabelLargeInner.font_size = value
def EnableWidget(self, *, bEnable:bool) -> bool:
if bEnable:
if self.oBackGround:
self.oBackGround.opacity = self.oScrollableLabelLargeInner.oOrcaWidget.fOrgOpacity
self.oScrollableLabelLargeInner.opacity = self.oScrollableLabelLargeInner.oOrcaWidget.fOrgOpacity
else:
if self.oBackGround:
self.oBackGround.opacity = 0.0
self.oScrollableLabelLargeInner.opacity = 0.0
font_size = AliasProperty(_get_font_size, _set_font_size)
# noinspection PyUnusedLocal
class cLineLayoutBase(BoxLayout):
""" embedded class to present a single line of text """
text = StringProperty("")
font_size = NumericProperty(0)
def __init__(self, **kwargs):
super(self.__class__,self).__init__(**RemoveNoClassArgs(dInArgs=kwargs,oObject=BoxLayout))
self.oLabel = cLabel(**self.oScrollableLabelLargeInner.kwFontArgs)
if self.oScrollableLabelLargeInner.oOrcaWidget is not None:
self.oLabel.oOrcaWidget = self.oScrollableLabelLargeInner.oOrcaWidget
self.add_widget(self.oLabel)
def on_size(self,*largs):
""" Updates the child widget size (label) """
self.oLabel.height = self.height
self.oLabel.text_size = self.size
def on_text(self,instance,value):
""" sets the text """
self.oLabel.text=value
def on_font_size(self,instance,value):
""" sets the font size """
self.oLabel.font_size=value
# noinspection PyProtectedMember,PyUnusedLocal
class cScrollableLabelLargeInner(RecycleView):
""" The "real' scrollable label (without background) """
# to have similar properties as a Label
font_size = Property('20sp')
text = StringProperty('')
oOrcaWidget = Property(None)
# Internal Property which handles fonmt resizing (not working as RecycleView can't manage change of cached widget)
fFontSize = BoundedNumericProperty(dpi2px(20,'sp'), min=4.0, max=96.0,errorhandler=lambda x: 96.0 if x > 96.0 else 4.0)
def __init__(self, **kwargs):
#we create a new class on the fly top ass the font args to the creation process, as the view adapter creates without arguments
self.cLineLayout=type('cLineLayout', cLineLayoutBase.__bases__, dict(cLineLayoutBase.__dict__))
# passes myself to the embedded class. Not good style but Recycleview limits passing customized parameters
self.cLineLayout.oScrollableLabelLargeInner=self
self.oOrcaWidget = kwargs.get('ORCAWIDGET',None)
# maximal len (in chars) of a single ine of the given text
self.iMaxLen = 0
# Setting the scrolltypes / bars for the Recycleview
self.scroll_type = ['bars', 'content']
self.scroll_wheel_distance = dp(114)
self.bar_width = dp(10)
# The original passed Data array
self.aData = []
# Internal Flag to distinguish between first show and (re) setting text
self.bInit = False
# The maximum width of a char
self.iMaxCharwidth = 0
# The maximum characters per line
self.iMaxCharsPerLine = 0
if "font_size" in kwargs:
self.on_font_size(None,kwargs["font_size"])
# Retrieving the genuine font properties of a label to pass only those arguments to the label (removing pos, hints, background colors , etc
self.aFontProperties = Label._font_properties+("background_color",)
# standard font args, if nothing is given
self.kwFontArgs = {"halign" : "left","valign": "top", "max_lines":1,"font_size":20}
# add / update the font args to be passed to the Label
for k in kwargs:
if k in self.aFontProperties:
self.kwFontArgs[k]=kwargs[k]
self.kwFontArgs["font_size"]=self.fFontSize
self.kwFontArgs.pop("text",None)
# Parameter Flag to disable horizontal scrolling
self.bNoXScroll = kwargs.get("noxscroll",False)
self.bMarkup = kwargs.get("markup", False)
#A dummy label to get th width a the larges character
self.oLabel = Label(**RemoveNoClassArgs(dInArgs=self.kwFontArgs,oObject=Label))
super(self.__class__, self).__init__(**RemoveNoClassArgs(dInArgs=kwargs,oObject=RecycleView))
# This manages the distance between lines
self.layout_manager.default_size = (None,self.oLabel._label.get_extents('W')[1])
#self.layout_manager.default_size = (None, self.fFontSize*1.1)
self.layout_manager.orientation = 'vertical'
# we need to handle size changes
self.bind(size=self.update_size)
self.bind(text=self.on_textinner)
self.text = kwargs.get("text","")
def on_fFontSize(self, instance, value):
""" Will handle font size changes """
if self.layout_manager is not None:
self.kwFontArgs["font_size"]=self.fFontSize
self.oLabel.font_size = self.fFontSize
self.layout_manager.default_size = (None,self.oLabel._label.get_extents('W')[1])
self.SetData(self.aData)
def on_font_size(self, instance, value):
"""Helper function to manage strings with metrics passed as arguments (eg '12dp') """
try:
fValue=float(value)
except:
fValue=dpi2px(value[:-2],value[-2:])
self.fFontSize=fValue
def on_textinner(self, instance, value):
""" helper to have a Label like functionality to set the caption """
self.update_size(None,None)
def IncreaseFontSize(self,*args):
""" Increase the Font size """
self.fFontSize +=1.0
def DecreaseFontSize(self,*args):
""" Decrease the Font size """
self.fFontSize -=1.0
def SetData(self, aData):
""" Passes the data to the Recycle view and sets the layout manager size """
self.data = [{'text': ToUnicode(x),"font_size":self.fFontSize} for x in aData]
if self.bNoXScroll:
self.layout_manager.width=self.width
else:
self.layout_manager.width= self.iMaxCharwidth * self.iMaxCharsPerLine
self.viewclass = self.cLineLayout
self.refresh_from_data()
def update_size(self, instance, value):
""" Fits the text into layout_manager line.
If noxscroll, all line with be split up to fit to the widget size.
if x scrolling is enabled, we look, if the the maximum line length exceed the TEXTURE SIZE.
In that case we split the lines as well and set the scrolling window size to the texture size.
if x scrolling is enabled, and all lines fit to the texture size, we pass the unchanged array """
if self.size==[100,100]:
return
aData = []
bDoLineBreak = False
self.iMaxCharwidth = self.oLabel._label.get_extents('W')[0]
self.iMaxCharsPerLine = int(self.width/self.iMaxCharwidth)
if not self.bNoXScroll:
self.aData = self.text.split('\n')
self.iMaxLen=len(max(self.aData,key=len))
if (self.iMaxCharwidth*self.iMaxLen)>GL_MAX_TEXTURE_SIZE:
self.iMaxCharsPerLine=int(GL_MAX_TEXTURE_SIZE/self.iMaxCharwidth)
bDoLineBreak = True
else:
self.iMaxCharsPerLine=self.iMaxLen
else:
bDoLineBreak = True
if bDoLineBreak:
if self.oLabel is not None:
if len(self.text)>10000:
aData = self.text.split('\n')
i=0
iEnd=len(aData)
while i<iEnd:
if len(aData[i])>self.iMaxCharsPerLine:
aData.insert(i+1,aData[i][self.iMaxCharsPerLine:])
aData[i]=aData[i][:self.iMaxCharsPerLine]
iEnd+=1
i+=1
else:
self.oLabel.size = self.size
self.oLabel.text_size = (self.width,None)
self.oLabel.text = self.text
self.oLabel._label.render()
aData=[]
for oLine in self.oLabel._label._cached_lines:
if len(oLine.words)>0:
uText= u''
for oWord in oLine.words:
if self.bMarkup:
uText+=self.AddMarkUps(oWord)
else:
uText+=oWord.text
aData.append(uText)
else:
aData.append(u'')
self.oLabel.text = ""
self.aData = aData
self.SetData(aData)
else:
self.SetData(self.aData)
def AddMarkUps(self,oWord):
uText=oWord.text
if oWord.options["bold"]:
uText=self.AddMarkUp(uText,"b")
if oWord.options["italic"]:
uText=self.AddMarkUp(uText,"i")
if oWord.options["underline"]:
uText=self.AddMarkUp(uText,"u")
if oWord.options["strikethrough"]:
uText=self.AddMarkUp(uText,"s")
if oWord.options["font_name"] != "Roboto":
uText=self.AddMarkUp(uText,"font",oWord.options["font_name"])
if oWord.options["font_size"] != self.fFontSize:
uText=self.AddMarkUp(uText,"size",ToUnicode(oWord.options["font_size"]))
if oWord.options["color"] != [1,1,1,1]:
uHexColor = u''
for iColor in oWord.options["color"]:
uHexColor+=ToHex(int(iColor*255))
uText=self.AddMarkUp(uText,"color",'#'+uHexColor)
return uText
# noinspection PyMethodMayBeStatic
def AddMarkUp(self,uText,uMarkUp,uValue=None):
if uValue is None:
return "[{1}]{0}[/{1}]".format(uText,uMarkUp)
else:
return "[{1}={2}]{0}[/{1}]".format(uText,uMarkUp,uValue)<|fim▁end|> | self.oBackGround.pos = value
|
<|file_name|>rpc_server.go<|end_file_name|><|fim▁begin|>package rpc
import (
"github.com/golang/glog"
"github.com/oikomi/FishChatServer2/common/ecode"
"github.com/oikomi/FishChatServer2/protocol/external"
"github.com/oikomi/FishChatServer2/protocol/rpc"
"github.com/oikomi/FishChatServer2/server/access/conf"
"github.com/oikomi/FishChatServer2/server/access/global"
sd "github.com/oikomi/FishChatServer2/service_discovery/etcd"
"golang.org/x/net/context"
"google.golang.org/grpc"
"net"
)
type RPCServer struct {
}
func (s *RPCServer) SendP2PMsgFromJob(ctx context.Context, in *rpc.ASSendP2PMsgFromJobReq) (res *rpc.ASSendP2PMsgFromJobRes, err error) {
glog.Info("access recive SendP2PMsgFromJob")
glog.Info(global.GSessions)
if session, ok := global.GSessions[in.TargetUID]; ok {
glog.Info("session is online")
if err = session.Send(&external.ResSendP2PMsg{
Cmd: external.SendP2PMsgCMD,
ErrCode: ecode.OK.Uint32(),
ErrStr: ecode.OK.String(),
SourceUID: in.SourceUID,
TargetUID: in.TargetUID,
MsgID: in.MsgID,
Msg: in.Msg,
}); err != nil {<|fim▁hole|> ErrCode: ecode.ServerErr.Uint32(),
ErrStr: ecode.ServerErr.String(),
}
return
}
} else {
// offline msg
}
res = &rpc.ASSendP2PMsgFromJobRes{
ErrCode: ecode.OK.Uint32(),
ErrStr: ecode.OK.String(),
}
return
}
func (s *RPCServer) SendNotify(ctx context.Context, in *rpc.ASSendNotifyReq) (res *rpc.ASSendNotifyRes, err error) {
glog.Info("access recive SendNotify")
glog.Info(global.GSessions)
if session, ok := global.GSessions[in.UID]; ok {
glog.Info("session is online")
if err = session.Send(&external.ResNotify{
Cmd: external.NotifyCMD,
ErrCode: ecode.OK.Uint32(),
ErrStr: ecode.OK.String(),
CurrentID: in.CurrentID,
}); err != nil {
glog.Error(err)
res = &rpc.ASSendNotifyRes{
ErrCode: ecode.ServerErr.Uint32(),
ErrStr: ecode.ServerErr.String(),
}
return
}
} else {
// offline
}
res = &rpc.ASSendNotifyRes{
ErrCode: ecode.OK.Uint32(),
ErrStr: ecode.OK.String(),
}
return
}
// func (s *RPCServer) SendGroupMsg(ctx context.Context, in *rpc.ASSendGroupMsgReq) (res *rpc.ASSendGroupMsgRes, err error) {
// glog.Info("access recive SendGroupMsg")
// return
// }
func RPCServerInit() {
glog.Info("[access] rpc server init at " + conf.Conf.RPCServer.Addr)
lis, err := net.Listen(conf.Conf.RPCServer.Proto, conf.Conf.RPCServer.Addr)
if err != nil {
glog.Error(err)
panic(err)
}
err = sd.Register(conf.Conf.ServiceDiscoveryServer.ServiceName, conf.Conf.ServiceDiscoveryServer.RPCAddr, conf.Conf.ServiceDiscoveryServer.EtcdAddr, conf.Conf.ServiceDiscoveryServer.Interval, conf.Conf.ServiceDiscoveryServer.TTL)
if err != nil {
glog.Error(err)
panic(err)
}
s := grpc.NewServer()
rpc.RegisterAccessServerRPCServer(s, &RPCServer{})
s.Serve(lis)
}<|fim▁end|> | glog.Error(err)
res = &rpc.ASSendP2PMsgFromJobRes{ |
<|file_name|>all_14.js<|end_file_name|><|fim▁begin|>var searchData=
[
['udp',['Udp',['../class_cool_time.html#a4e23216a8121ca79d0fb019f30884b92',1,'CoolTime']]],
['unsubscribe',['unsubscribe',['../class_cool_pub_sub_client.html#a850554280e314d6b5c33c73fd9e809fc',1,'CoolPubSubClient']]],
['update',['update',['../class_cool_board.html#a8612756d3f73198cdde857a66f0fe690',1,'CoolBoard::update()'],['../class_cool_time.html#aae601f795452cfa48d9fb337aed483a8',1,'CoolTime::update()']]],
['updateconfigfiles',['updateConfigFiles',['../class_cool_file_system.html#adfa8e2e80641ae6f0cceabd348a9b841',1,'CoolFileSystem']]],<|fim▁hole|> ['uv',['uv',['../struct_cool_board_sensors_1_1light_active.html#a0e6cfc311425a31f32c32fc3b834ffb8',1,'CoolBoardSensors::lightActive']]]
];<|fim▁end|> | ['user',['user',['../class_cool_m_q_t_t.html#a8cd47e45d457f908d4b4390b35aaee83',1,'CoolMQTT']]],
['useractive',['userActive',['../class_cool_board.html#a6395459131d6889a3005f79c7a35e964',1,'CoolBoard']]],
['userdata',['userData',['../class_cool_board.html#ae7358fb6e623cfc81b775f5f1734909b',1,'CoolBoard']]], |
<|file_name|>take_until.rs<|end_file_name|><|fim▁begin|>use consumer::*;
use std::cell::Cell;
use std::rc::Rc;
use stream::*;
/// Emit items until it receive an item from another stream.
///
/// This struct is created by the
/// [`take_until()`](./trait.Stream.html#method.take_until) method on
/// [Stream](./trait.Stream.html). See its documentation for more.
#[must_use = "stream adaptors are lazy and do nothing unless consumed"]
pub struct TakeUntil<S, T> {
stream: S,
trigger: T,
}
struct TakeUntilState<C> {
consumer: C,
is_closed: Rc<Cell<bool>>,
}
struct TriggerConsumer {
is_closed: Rc<Cell<bool>>,
}
impl<T> Consumer<T> for TriggerConsumer {
fn emit(&mut self, _: T) -> bool {
self.is_closed.set(true);
false
}
}
impl<C, T> Consumer<T> for TakeUntilState<C>
where C: Consumer<T>
{
fn emit(&mut self, item: T) -> bool {
!self.is_closed.get() && self.consumer.emit(item)
}
}
impl<S, T> Stream for TakeUntil<S, T>
where S: Stream,
T: Stream
{
type Item = S::Item;
fn consume<C>(self, consumer: C)
where C: Consumer<Self::Item>
{
let is_closed = Rc::new(Cell::new(false));
self.trigger.consume(TriggerConsumer { is_closed: is_closed.clone() });
self.stream.consume(TakeUntilState {
consumer: consumer,
is_closed: is_closed,
});
}
}
impl<S, T> TakeUntil<S, T> {
pub fn new(stream: S, trigger: T) -> Self {
TakeUntil {
stream: stream,<|fim▁hole|>}<|fim▁end|> | trigger: trigger,
}
} |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup#, find_packages, Extension
import distutils.command.build as _build
import setuptools.command.install as _install
import sys
import os
import os.path as op
import distutils.spawn as ds
import distutils.dir_util as dd
import posixpath
def run_cmake(arg=""):
"""
Forcing to run cmake
"""
if ds.find_executable('cmake') is None:
print "CMake is required to build zql"
print "Please install cmake version >= 2.8 and re-run setup"
sys.exit(-1)
print "Configuring zql build with CMake.... "
cmake_args = arg
try:
build_dir = op.join(op.split(__file__)[0], 'build')
dd.mkpath(build_dir)
os.chdir("build")
ds.spawn(['cmake', '..'] + cmake_args.split())
ds.spawn(['make', 'clean'])
ds.spawn(['make'])
os.chdir("..")
except ds.DistutilsExecError:<|fim▁hole|> sys.exit(-1)
class build(_build.build):
def run(self):
run_cmake()
# Now populate the extension module attribute.
#self.distribution.ext_modules = get_ext_modules()
_build.build.run(self)
class install(_install.install):
def run(self):
if not posixpath.exists("src/zq.so"):
run_cmake()
ds.spawn(['make', 'install'])
#self.distribution.ext_modules = get_ext_modules()
self.do_egg_install()
with open('README.txt') as file:
clips6_long_desc = file.read()
setup(
name = "zq",
version = '0.6',
description = 'ZQL - Zabbix Query Language',
install_requires = ["cython", "msgpack-python", "simplejson", "hy", "pyfiglet",
"gevent", "json", "termcolor", "humanfriendly", "ipaddr", "pyfscache",
"Cheetah", "dateparser", "pygithub",
],
requires = [],
include_package_data = True,
url = 'https://github.com/vulogov/zq/',
author='Vladimir Ulogov',
author_email = '[email protected]',
maintainer_email = '[email protected]',
license = "GNU GPL Versin 3",
long_description = clips6_long_desc,
keywords = "zql, monitoring, zabbix",
platforms = ['GNU/Linux','Unix','Mac OS-X'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Monitoring',
'Topic :: System :: Networking :: Monitoring',
'Topic :: System :: Systems Administration',
'Environment :: Console',
'Environment :: Console :: Curses'
],
# ext_modules is not present here. This will be generated through CMake via the
# build or install commands
cmdclass={'install':install,'build': build},
zip_safe=False,
packages = ['zq'],
package_data = {
'zq': ['zq.so', '*.pyx', '*.pyi']
}
)<|fim▁end|> | print "Error while running cmake"
print "run 'setup.py build --help' for build options"
print "You may also try editing the settings in CMakeLists.txt file and re-running setup" |
<|file_name|>if-check.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|> if x < 2u {
return false;
} else if x == 2u { return true; } else { return even(x - 2u); }
}
fn foo(x: uint) {
if even(x) {
info2!("{}", x);
} else {
fail2!();
}
}
pub fn main() { foo(2u); }<|fim▁end|> | // except according to those terms.
fn even(x: uint) -> bool { |
<|file_name|>RoutingNodes.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.routing;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
/**
* {@link RoutingNodes} represents a copy the routing information contained in
* the {@link ClusterState cluster state}.
*/
public class RoutingNodes implements Iterable<RoutingNode> {
private final MetaData metaData;
private final ClusterBlocks blocks;
private final RoutingTable routingTable;
private final Map<String, RoutingNode> nodesToShards = new HashMap<>();
private final UnassignedShards unassignedShards = new UnassignedShards(this);
private final Map<ShardId, List<ShardRouting>> assignedShards = new HashMap<>();
private final ImmutableOpenMap<String, ClusterState.Custom> customs;
private final boolean readOnly;
private int inactivePrimaryCount = 0;
private int inactiveShardCount = 0;
private int relocatingShards = 0;
private final Map<String, ObjectIntHashMap<String>> nodesPerAttributeNames = new HashMap<>();
private final Map<String, Recoveries> recoveryiesPerNode = new HashMap<>();
public RoutingNodes(ClusterState clusterState) {
this(clusterState, true);
}
public RoutingNodes(ClusterState clusterState, boolean readOnly) {
this.readOnly = readOnly;
this.metaData = clusterState.metaData();
this.blocks = clusterState.blocks();
this.routingTable = clusterState.routingTable();
this.customs = clusterState.customs();
Map<String, List<ShardRouting>> nodesToShards = new HashMap<>();
// fill in the nodeToShards with the "live" nodes
for (ObjectCursor<DiscoveryNode> cursor : clusterState.nodes().dataNodes().values()) {
nodesToShards.put(cursor.value.id(), new ArrayList<>());
}
// fill in the inverse of node -> shards allocated
// also fill replicaSet information
for (ObjectCursor<IndexRoutingTable> indexRoutingTable : routingTable.indicesRouting().values()) {
for (IndexShardRoutingTable indexShard : indexRoutingTable.value) {
assert indexShard.primary != null;
for (ShardRouting shard : indexShard) {
// to get all the shards belonging to an index, including the replicas,
// we define a replica set and keep track of it. A replica set is identified
// by the ShardId, as this is common for primary and replicas.
// A replica Set might have one (and not more) replicas with the state of RELOCATING.
if (shard.assignedToNode()) {
List<ShardRouting> entries = nodesToShards.computeIfAbsent(shard.currentNodeId(), k -> new ArrayList<>());
final ShardRouting sr = getRouting(shard, readOnly);
entries.add(sr);
assignedShardsAdd(sr);
if (shard.relocating()) {
relocatingShards++;
entries = nodesToShards.computeIfAbsent(shard.relocatingNodeId(), k -> new ArrayList<>());
// add the counterpart shard with relocatingNodeId reflecting the source from which
// it's relocating from.
ShardRouting targetShardRouting = shard.buildTargetRelocatingShard();
addInitialRecovery(targetShardRouting);
if (readOnly) {
targetShardRouting.freeze();
}
entries.add(targetShardRouting);
assignedShardsAdd(targetShardRouting);
} else if (shard.active() == false) { // shards that are initializing without being relocated
if (shard.primary()) {
inactivePrimaryCount++;
}
inactiveShardCount++;
addInitialRecovery(shard);
}
} else {
final ShardRouting sr = getRouting(shard, readOnly);
assignedShardsAdd(sr);
unassignedShards.add(sr);
}
}
}
}
for (Map.Entry<String, List<ShardRouting>> entry : nodesToShards.entrySet()) {
String nodeId = entry.getKey();
this.nodesToShards.put(nodeId, new RoutingNode(nodeId, clusterState.nodes().get(nodeId), entry.getValue()));
}
}
private void addRecovery(ShardRouting routing) {
addRecovery(routing, true, false);
}
private void removeRecovery(ShardRouting routing) {
addRecovery(routing, false, false);
}
public void addInitialRecovery(ShardRouting routing) {
addRecovery(routing,true, true);
}
private void addRecovery(final ShardRouting routing, final boolean increment, final boolean initializing) {
final int howMany = increment ? 1 : -1;
assert routing.initializing() : "routing must be initializing: " + routing;
Recoveries.getOrAdd(recoveryiesPerNode, routing.currentNodeId()).addIncoming(howMany);
final String sourceNodeId;
if (routing.relocatingNodeId() != null) { // this is a relocation-target
sourceNodeId = routing.relocatingNodeId();
if (routing.primary() && increment == false) { // primary is done relocating
int numRecoveringReplicas = 0;
for (ShardRouting assigned : assignedShards(routing)) {
if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) {
numRecoveringReplicas++;
}
}
// we transfer the recoveries to the relocated primary
recoveryiesPerNode.get(sourceNodeId).addOutgoing(-numRecoveringReplicas);
recoveryiesPerNode.get(routing.currentNodeId()).addOutgoing(numRecoveringReplicas);
}
} else if (routing.primary() == false) { // primary without relocationID is initial recovery
ShardRouting primary = findPrimary(routing);
if (primary == null && initializing) {
primary = routingTable.index(routing.index().getName()).shard(routing.shardId().id()).primary;
} else if (primary == null) {
throw new IllegalStateException("replica is initializing but primary is unassigned");
}
sourceNodeId = primary.currentNodeId();
} else {
sourceNodeId = null;
}
if (sourceNodeId != null) {
Recoveries.getOrAdd(recoveryiesPerNode, sourceNodeId).addOutgoing(howMany);
}
}
public int getIncomingRecoveries(String nodeId) {
return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getIncoming();
}
public int getOutgoingRecoveries(String nodeId) {
return recoveryiesPerNode.getOrDefault(nodeId, Recoveries.EMPTY).getOutgoing();
}
private ShardRouting findPrimary(ShardRouting routing) {
List<ShardRouting> shardRoutings = assignedShards.get(routing.shardId());
ShardRouting primary = null;
if (shardRoutings != null) {
for (ShardRouting shardRouting : shardRoutings) {
if (shardRouting.primary()) {
if (shardRouting.active()) {
return shardRouting;
} else if (primary == null) {
primary = shardRouting;
} else if (primary.relocatingNodeId() != null) {
primary = shardRouting;
}
}
}
}
return primary;
}
private static ShardRouting getRouting(ShardRouting src, boolean readOnly) {
if (readOnly) {
src.freeze(); // we just freeze and reuse this instance if we are read only
} else {
src = new ShardRouting(src);
}
return src;
}
@Override
public Iterator<RoutingNode> iterator() {
return Collections.unmodifiableCollection(nodesToShards.values()).iterator();
}
public RoutingTable routingTable() {
return routingTable;
}
public RoutingTable getRoutingTable() {
return routingTable();
}
public MetaData metaData() {
return this.metaData;
}
public MetaData getMetaData() {
return metaData();
}
public ClusterBlocks blocks() {
return this.blocks;
}
public ClusterBlocks getBlocks() {
return this.blocks;
}
public ImmutableOpenMap<String, ClusterState.Custom> customs() {
return this.customs;
}
public <T extends ClusterState.Custom> T custom(String type) { return (T) customs.get(type); }
public UnassignedShards unassigned() {
return this.unassignedShards;
}
public RoutingNodesIterator nodes() {
return new RoutingNodesIterator(nodesToShards.values().iterator());
}
public RoutingNode node(String nodeId) {
return nodesToShards.get(nodeId);
}
public ObjectIntHashMap<String> nodesPerAttributesCounts(String attributeName) {
ObjectIntHashMap<String> nodesPerAttributesCounts = nodesPerAttributeNames.get(attributeName);
if (nodesPerAttributesCounts != null) {
return nodesPerAttributesCounts;
}
nodesPerAttributesCounts = new ObjectIntHashMap<>();
for (RoutingNode routingNode : this) {
String attrValue = routingNode.node().attributes().get(attributeName);
nodesPerAttributesCounts.addTo(attrValue, 1);
}
nodesPerAttributeNames.put(attributeName, nodesPerAttributesCounts);
return nodesPerAttributesCounts;
}
/**
* Returns <code>true</code> iff this {@link RoutingNodes} instance has any unassigned primaries even if the
* primaries are marked as temporarily ignored.
*/
public boolean hasUnassignedPrimaries() {
return unassignedShards.getNumPrimaries() + unassignedShards.getNumIgnoredPrimaries() > 0;
}
/**
* Returns <code>true</code> iff this {@link RoutingNodes} instance has any unassigned shards even if the
* shards are marked as temporarily ignored.
* @see UnassignedShards#isEmpty()
* @see UnassignedShards#isIgnoredEmpty()
*/
public boolean hasUnassignedShards() {
return unassignedShards.isEmpty() == false || unassignedShards.isIgnoredEmpty() == false;
}
public boolean hasInactivePrimaries() {
return inactivePrimaryCount > 0;
}
public boolean hasInactiveShards() {
return inactiveShardCount > 0;
}
public int getRelocatingShardCount() {
return relocatingShards;
}
/**
* Returns the active primary shard for the given ShardRouting or <code>null</code> if
* no primary is found or the primary is not active.
*/
public ShardRouting activePrimary(ShardRouting shard) {
for (ShardRouting shardRouting : assignedShards(shard.shardId())) {
if (shardRouting.primary() && shardRouting.active()) {
return shardRouting;
}
}
return null;
}
/**
* Returns one active replica shard for the given ShardRouting shard ID or <code>null</code> if
* no active replica is found.
*/
public ShardRouting activeReplica(ShardRouting shard) {
for (ShardRouting shardRouting : assignedShards(shard.shardId())) {
if (!shardRouting.primary() && shardRouting.active()) {
return shardRouting;
}
}
return null;
}
/**
* Returns all shards that are not in the state UNASSIGNED with the same shard
* ID as the given shard.
*/
public Iterable<ShardRouting> assignedShards(ShardRouting shard) {
return assignedShards(shard.shardId());
}
/**
* Returns <code>true</code> iff all replicas are active for the given shard routing. Otherwise <code>false</code>
*/
public boolean allReplicasActive(ShardRouting shardRouting) {
final List<ShardRouting> shards = assignedShards(shardRouting.shardId());
if (shards.isEmpty() || shards.size() < this.routingTable.index(shardRouting.index().getName()).shard(shardRouting.id()).size()) {
return false; // if we are empty nothing is active if we have less than total at least one is unassigned
}
for (ShardRouting shard : shards) {
if (!shard.active()) {
return false;
}
}
return true;
}
public List<ShardRouting> shards(Predicate<ShardRouting> predicate) {
List<ShardRouting> shards = new ArrayList<>();
for (RoutingNode routingNode : this) {
for (ShardRouting shardRouting : routingNode) {
if (predicate.test(shardRouting)) {
shards.add(shardRouting);
}
}
}
return shards;
}
public List<ShardRouting> shardsWithState(ShardRoutingState... state) {
// TODO these are used on tests only - move into utils class
List<ShardRouting> shards = new ArrayList<>();
for (RoutingNode routingNode : this) {
shards.addAll(routingNode.shardsWithState(state));
}
for (ShardRoutingState s : state) {
if (s == ShardRoutingState.UNASSIGNED) {
unassigned().forEach(shards::add);
break;
}
}
return shards;
}
public List<ShardRouting> shardsWithState(String index, ShardRoutingState... state) {
// TODO these are used on tests only - move into utils class
List<ShardRouting> shards = new ArrayList<>();
for (RoutingNode routingNode : this) {
shards.addAll(routingNode.shardsWithState(index, state));
}
for (ShardRoutingState s : state) {
if (s == ShardRoutingState.UNASSIGNED) {
for (ShardRouting unassignedShard : unassignedShards) {
if (unassignedShard.index().equals(index)) {
shards.add(unassignedShard);
}
}
break;
}
}
return shards;
}
public String prettyPrint() {
StringBuilder sb = new StringBuilder("routing_nodes:\n");
for (RoutingNode routingNode : this) {
sb.append(routingNode.prettyPrint());
}
sb.append("---- unassigned\n");
for (ShardRouting shardEntry : unassignedShards) {
sb.append("--------").append(shardEntry.shortSummary()).append('\n');
}
return sb.toString();
}
/**
* Moves a shard from unassigned to initialize state
*
* @param existingAllocationId allocation id to use. If null, a fresh allocation id is generated.
*/
public void initialize(ShardRouting shard, String nodeId, @Nullable String existingAllocationId, long expectedSize) {
ensureMutable();
assert shard.unassigned() : shard;
shard.initialize(nodeId, existingAllocationId, expectedSize);
node(nodeId).add(shard);
inactiveShardCount++;
if (shard.primary()) {
inactivePrimaryCount++;
}
addRecovery(shard);
assignedShardsAdd(shard);
}
/**
* Relocate a shard to another node, adding the target initializing
* shard as well as assigning it. And returning the target initializing
* shard.
*/
public ShardRouting relocate(ShardRouting shard, String nodeId, long expectedShardSize) {
ensureMutable();
relocatingShards++;
shard.relocate(nodeId, expectedShardSize);
ShardRouting target = shard.buildTargetRelocatingShard();
node(target.currentNodeId()).add(target);
assignedShardsAdd(target);
addRecovery(target);
return target;
}
/**
* Mark a shard as started and adjusts internal statistics.
*/
public void started(ShardRouting shard) {
ensureMutable();
assert !shard.active() : "expected an initializing shard " + shard;
if (shard.relocatingNodeId() == null) {
// if this is not a target shard for relocation, we need to update statistics
inactiveShardCount--;
if (shard.primary()) {
inactivePrimaryCount--;
}
}
removeRecovery(shard);
shard.moveToStarted();
}
/**
* Cancels a relocation of a shard that shard must relocating.
*/
public void cancelRelocation(ShardRouting shard) {
ensureMutable();
relocatingShards--;
shard.cancelRelocation();
}
/**
* swaps the status of a shard, making replicas primary and vice versa.
*
* @param shards the shard to have its primary status swapped.
*/
public void swapPrimaryFlag(ShardRouting... shards) {
ensureMutable();
for (ShardRouting shard : shards) {
if (shard.primary()) {
shard.moveFromPrimary();
if (shard.unassigned()) {
unassignedShards.primaries--;
}
} else {
shard.moveToPrimary();
if (shard.unassigned()) {
unassignedShards.primaries++;
}
}
}
}
private static final List<ShardRouting> EMPTY = Collections.emptyList();
private List<ShardRouting> assignedShards(ShardId shardId) {
final List<ShardRouting> replicaSet = assignedShards.get(shardId);
return replicaSet == null ? EMPTY : Collections.unmodifiableList(replicaSet);
}
/**
* Cancels the give shard from the Routing nodes internal statistics and cancels
* the relocation if the shard is relocating.
*/
private void remove(ShardRouting shard) {
ensureMutable();
if (!shard.active() && shard.relocatingNodeId() == null) {
inactiveShardCount--;
assert inactiveShardCount >= 0;
if (shard.primary()) {
inactivePrimaryCount--;
}
} else if (shard.relocating()) {
cancelRelocation(shard);
}
assignedShardsRemove(shard);
if (shard.initializing()) {
removeRecovery(shard);
}
}
private void assignedShardsAdd(ShardRouting shard) {
if (shard.unassigned()) {
// no unassigned
return;
}
List<ShardRouting> shards = assignedShards.computeIfAbsent(shard.shardId(), k -> new ArrayList<>());
assert assertInstanceNotInList(shard, shards);
shards.add(shard);
}
private boolean assertInstanceNotInList(ShardRouting shard, List<ShardRouting> shards) {
for (ShardRouting s : shards) {
assert s != shard;
}
return true;
}
private void assignedShardsRemove(ShardRouting shard) {
ensureMutable();
final List<ShardRouting> replicaSet = assignedShards.get(shard.shardId());
if (replicaSet != null) {
final Iterator<ShardRouting> iterator = replicaSet.iterator();
while(iterator.hasNext()) {
// yes we check identity here
if (shard == iterator.next()) {
iterator.remove();
return;
}
}
assert false : "Illegal state";
}
}
public boolean isKnown(DiscoveryNode node) {
return nodesToShards.containsKey(node.getId());
}
public void addNode(DiscoveryNode node) {
ensureMutable();
RoutingNode routingNode = new RoutingNode(node.id(), node);
nodesToShards.put(routingNode.nodeId(), routingNode);
}
public RoutingNodeIterator routingNodeIter(String nodeId) {
final RoutingNode routingNode = nodesToShards.get(nodeId);
if (routingNode == null) {
return null;
}
return new RoutingNodeIterator(routingNode);
}
public RoutingNode[] toArray() {
return nodesToShards.values().toArray(new RoutingNode[nodesToShards.size()]);
}
public void reinitShadowPrimary(ShardRouting candidate) {
ensureMutable();
if (candidate.relocating()) {
cancelRelocation(candidate);
}
candidate.reinitializeShard();
inactivePrimaryCount++;
inactiveShardCount++;
}
/**
* Returns the number of routing nodes
*/
public int size() {
return nodesToShards.size();
}
public static final class UnassignedShards implements Iterable<ShardRouting> {
private final RoutingNodes nodes;
private final List<ShardRouting> unassigned;
private final List<ShardRouting> ignored;
private int primaries = 0;
private int ignoredPrimaries = 0;
public UnassignedShards(RoutingNodes nodes) {
this.nodes = nodes;
unassigned = new ArrayList<>();
ignored = new ArrayList<>();
}
public void add(ShardRouting shardRouting) {
if(shardRouting.primary()) {
primaries++;
}
unassigned.add(shardRouting);
}
public void sort(Comparator<ShardRouting> comparator) {
CollectionUtil.timSort(unassigned, comparator);
}
/**
* Returns the size of the non-ignored unassigned shards
*/
public int size() { return unassigned.size(); }
/**
* Returns the size of the temporarily marked as ignored unassigned shards
*/
public int ignoredSize() { return ignored.size(); }
/**
* Returns the number of non-ignored unassigned primaries
*/
public int getNumPrimaries() {
return primaries;
}
/**
* Returns the number of temporarily marked as ignored unassigned primaries
*/
public int getNumIgnoredPrimaries() { return ignoredPrimaries; }
@Override
public UnassignedIterator iterator() {
return new UnassignedIterator();
}
/**
* The list of ignored unassigned shards (read only). The ignored unassigned shards
* are not part of the formal unassigned list, but are kept around and used to build
* back the list of unassigned shards as part of the routing table.
*/
public List<ShardRouting> ignored() {
return Collections.unmodifiableList(ignored);
}
/**
* Marks a shard as temporarily ignored and adds it to the ignore unassigned list.
* Should be used with caution, typically,
* the correct usage is to removeAndIgnore from the iterator.
* @see #ignored()
* @see UnassignedIterator#removeAndIgnore()
* @see #isIgnoredEmpty()
*/
public void ignoreShard(ShardRouting shard) {
if (shard.primary()) {
ignoredPrimaries++;
}
ignored.add(shard);
}
public class UnassignedIterator implements Iterator<ShardRouting> {
private final Iterator<ShardRouting> iterator;
private ShardRouting current;
public UnassignedIterator() {
this.iterator = unassigned.iterator();
}
@Override
public boolean hasNext() {
return iterator.hasNext();
}
@Override
public ShardRouting next() {
return current = iterator.next();
}
/**
* Initializes the current unassigned shard and moves it from the unassigned list.
*
* @param existingAllocationId allocation id to use. If null, a fresh allocation id is generated.
*/
public void initialize(String nodeId, @Nullable String existingAllocationId, long expectedShardSize) {
innerRemove();
nodes.initialize(new ShardRouting(current), nodeId, existingAllocationId, expectedShardSize);
}
/**
* Removes and ignores the unassigned shard (will be ignored for this run, but
* will be added back to unassigned once the metadata is constructed again).
* Typically this is used when an allocation decision prevents a shard from being allocated such
* that subsequent consumers of this API won't try to allocate this shard again.
*/
public void removeAndIgnore() {
innerRemove();
ignoreShard(current);
}
/**
* Unsupported operation, just there for the interface. Use {@link #removeAndIgnore()} or
* {@link #initialize(String, String, long)}.
*/
@Override
public void remove() {
throw new UnsupportedOperationException("remove is not supported in unassigned iterator, use removeAndIgnore or initialize");
}
private void innerRemove() {
nodes.ensureMutable();
iterator.remove();
if (current.primary()) {
primaries--;
}
}
}
/**
* Returns <code>true</code> iff this collection contains one or more non-ignored unassigned shards.
*/
public boolean isEmpty() {
return unassigned.isEmpty();
}
/**
* Returns <code>true</code> iff any unassigned shards are marked as temporarily ignored.
* @see UnassignedShards#ignoreShard(ShardRouting)
* @see UnassignedIterator#removeAndIgnore()
*/
public boolean isIgnoredEmpty() {
return ignored.isEmpty();
}
public void shuffle() {
Randomness.shuffle(unassigned);
}
/**
* Drains all unassigned shards and returns it.
* This method will not drain ignored shards.
*/
public ShardRouting[] drain() {
ShardRouting[] mutableShardRoutings = unassigned.toArray(new ShardRouting[unassigned.size()]);
unassigned.clear();
primaries = 0;
return mutableShardRoutings;
}
}
/**
* Calculates RoutingNodes statistics by iterating over all {@link ShardRouting}s
* in the cluster to ensure the book-keeping is correct.
* For performance reasons, this should only be called from asserts
*
* @return this method always returns <code>true</code> or throws an assertion error. If assertion are not enabled
* this method does nothing.
*/
public static boolean assertShardStats(RoutingNodes routingNodes) {
boolean run = false;
assert (run = true); // only run if assertions are enabled!
if (!run) {
return true;
}
int unassignedPrimaryCount = 0;
int unassignedIgnoredPrimaryCount = 0;
int inactivePrimaryCount = 0;
int inactiveShardCount = 0;
int relocating = 0;
Map<Index, Integer> indicesAndShards = new HashMap<>();
for (RoutingNode node : routingNodes) {
for (ShardRouting shard : node) {
if (!shard.active() && shard.relocatingNodeId() == null) {
if (!shard.relocating()) {
inactiveShardCount++;
if (shard.primary()) {
inactivePrimaryCount++;
}
}
}
if (shard.relocating()) {
relocating++;
}
Integer i = indicesAndShards.get(shard.index());
if (i == null) {
i = shard.id();
}
indicesAndShards.put(shard.index(), Math.max(i, shard.id()));
}
}
// Assert that the active shard routing are identical.
Set<Map.Entry<Index, Integer>> entries = indicesAndShards.entrySet();
final List<ShardRouting> shards = new ArrayList<>();
for (Map.Entry<Index, Integer> e : entries) {
Index index = e.getKey();
for (int i = 0; i < e.getValue(); i++) {
for (RoutingNode routingNode : routingNodes) {
for (ShardRouting shardRouting : routingNode) {
if (shardRouting.index().equals(index) && shardRouting.id() == i) {
shards.add(shardRouting);
}
}
}
List<ShardRouting> mutableShardRoutings = routingNodes.assignedShards(new ShardId(index, i));
assert mutableShardRoutings.size() == shards.size();
for (ShardRouting r : mutableShardRoutings) {
assert shards.contains(r);
shards.remove(r);
}
assert shards.isEmpty();
}
}
for (ShardRouting shard : routingNodes.unassigned()) {
if (shard.primary()) {
unassignedPrimaryCount++;
}
}
for (ShardRouting shard : routingNodes.unassigned().ignored()) {
if (shard.primary()) {
unassignedIgnoredPrimaryCount++;
}
}
for (Map.Entry<String, Recoveries> recoveries : routingNodes.recoveryiesPerNode.entrySet()) {<|fim▁hole|> int outgoing = 0;
RoutingNode routingNode = routingNodes.nodesToShards.get(node);
if (routingNode != null) { // node might have dropped out of the cluster
for (ShardRouting routing : routingNode) {
if (routing.initializing()) {
incoming++;
} else if (routing.relocating()) {
outgoing++;
}
if (routing.primary() && (routing.initializing() && routing.relocatingNodeId() != null) == false) { // we don't count the initialization end of the primary relocation
List<ShardRouting> shardRoutings = routingNodes.assignedShards.get(routing.shardId());
for (ShardRouting assigned : shardRoutings) {
if (assigned.primary() == false && assigned.initializing() && assigned.relocatingNodeId() == null) {
outgoing++;
}
}
}
}
}
assert incoming == value.incoming : incoming + " != " + value.incoming;
assert outgoing == value.outgoing : outgoing + " != " + value.outgoing + " node: " + routingNode;
}
assert unassignedPrimaryCount == routingNodes.unassignedShards.getNumPrimaries() :
"Unassigned primaries is [" + unassignedPrimaryCount + "] but RoutingNodes returned unassigned primaries [" + routingNodes.unassigned().getNumPrimaries() + "]";
assert unassignedIgnoredPrimaryCount == routingNodes.unassignedShards.getNumIgnoredPrimaries() :
"Unassigned ignored primaries is [" + unassignedIgnoredPrimaryCount + "] but RoutingNodes returned unassigned ignored primaries [" + routingNodes.unassigned().getNumIgnoredPrimaries() + "]";
assert inactivePrimaryCount == routingNodes.inactivePrimaryCount :
"Inactive Primary count [" + inactivePrimaryCount + "] but RoutingNodes returned inactive primaries [" + routingNodes.inactivePrimaryCount + "]";
assert inactiveShardCount == routingNodes.inactiveShardCount :
"Inactive Shard count [" + inactiveShardCount + "] but RoutingNodes returned inactive shards [" + routingNodes.inactiveShardCount + "]";
assert routingNodes.getRelocatingShardCount() == relocating : "Relocating shards mismatch [" + routingNodes.getRelocatingShardCount() + "] but expected [" + relocating + "]";
return true;
}
public class RoutingNodesIterator implements Iterator<RoutingNode>, Iterable<ShardRouting> {
private RoutingNode current;
private final Iterator<RoutingNode> delegate;
public RoutingNodesIterator(Iterator<RoutingNode> iterator) {
delegate = iterator;
}
@Override
public boolean hasNext() {
return delegate.hasNext();
}
@Override
public RoutingNode next() {
return current = delegate.next();
}
public RoutingNodeIterator nodeShards() {
return new RoutingNodeIterator(current);
}
@Override
public void remove() {
delegate.remove();
}
@Override
public Iterator<ShardRouting> iterator() {
return nodeShards();
}
}
public final class RoutingNodeIterator implements Iterator<ShardRouting>, Iterable<ShardRouting> {
private final RoutingNode iterable;
private ShardRouting shard;
private final Iterator<ShardRouting> delegate;
private boolean removed = false;
public RoutingNodeIterator(RoutingNode iterable) {
this.delegate = iterable.mutableIterator();
this.iterable = iterable;
}
@Override
public boolean hasNext() {
return delegate.hasNext();
}
@Override
public ShardRouting next() {
removed = false;
return shard = delegate.next();
}
@Override
public void remove() {
ensureMutable();
delegate.remove();
RoutingNodes.this.remove(shard);
removed = true;
}
/** returns true if {@link #remove()} or {@link #moveToUnassigned(UnassignedInfo)} were called on the current shard */
public boolean isRemoved() {
return removed;
}
@Override
public Iterator<ShardRouting> iterator() {
return iterable.iterator();
}
public void moveToUnassigned(UnassignedInfo unassignedInfo) {
ensureMutable();
if (isRemoved() == false) {
remove();
}
ShardRouting unassigned = new ShardRouting(shard); // protective copy of the mutable shard
unassigned.moveToUnassigned(unassignedInfo);
unassigned().add(unassigned);
}
public ShardRouting current() {
return shard;
}
}
private void ensureMutable() {
if (readOnly) {
throw new IllegalStateException("can't modify RoutingNodes - readonly");
}
}
private static final class Recoveries {
private static final Recoveries EMPTY = new Recoveries();
private int incoming = 0;
private int outgoing = 0;
int getTotal() {
return incoming + outgoing;
}
void addOutgoing(int howMany) {
assert outgoing + howMany >= 0 : outgoing + howMany+ " must be >= 0";
outgoing += howMany;
}
void addIncoming(int howMany) {
assert incoming + howMany >= 0 : incoming + howMany+ " must be >= 0";
incoming += howMany;
}
int getOutgoing() {
return outgoing;
}
int getIncoming() {
return incoming;
}
public static Recoveries getOrAdd(Map<String, Recoveries> map, String key) {
Recoveries recoveries = map.get(key);
if (recoveries == null) {
recoveries = new Recoveries();
map.put(key, recoveries);
}
return recoveries;
}
}
}<|fim▁end|> | String node = recoveries.getKey();
final Recoveries value = recoveries.getValue();
int incoming = 0; |
<|file_name|>0006_auto_20150827_1019.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('formbuilder', '0005_auto_20150826_1600'),
]
operations = [
migrations.RemoveField(
model_name='choiceanswer',
name='option',
),
migrations.AddField(
model_name='choiceanswer',
name='choices',
field=models.ManyToManyField(related_name='answers', to='formbuilder.Option'),
),
migrations.AddField(
model_name='choiceanswer',
name='other',
field=models.TextField(blank=True),<|fim▁hole|> model_name='choiceanswer',
name='question',
field=models.ForeignKey(related_name='answers', to='formbuilder.Choice', null=True),
),
]<|fim▁end|> | ),
migrations.AddField( |
<|file_name|>missing_values_numeric.py<|end_file_name|><|fim▁begin|>import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from include.dataset_fnames import generate_station_data_fname
from include.feature_lists import numeric_features
from xgboost import XGBRegressor
def numeric_df_missing_values_summary():
for i, station_id in enumerate(sorted(numeric_features)):
# print station_id,
fname = generate_station_data_fname(station_id, sample_type='train', data_type='numeric', allow_nan_values=False)
features = ['Id'] + numeric_features[station_id]
station_df = pd.read_csv(fname, usecols=features, index_col=['Id'], dtype=object)
for feature in station_df.columns:
if station_df[feature].isnull().any():
print feature
# print
del station_df
# if (i == 10): break
def regresor_function(X_train, y_train):
regresor = XGBRegressor(objective='reg:linear', n_estimator=1000, learning_rate=0.01, base_score=0.005)
regresor.fit(X_train, y_train)
return regresor
def numeric_df_impute_missing_values_iteration(df, null_features, null_indices, iter):
print "Features with null data:", null_features
print "Iterations:", iter
full_features = df.columns
new_df = df.copy()
for iter_number in range(iter):
iter_df = new_df.copy()
for feature in null_features:
print "Iteration: ", iter_number, "Feature:", feature
train_features = list((set(full_features) - set([feature])))
train_indices = df[df[feature].notnull()].index
test_indices = df[df[feature].isnull()].index
X_train = new_df.loc[train_indices, train_features]
y_train = new_df.loc[train_indices, feature]
X_test = new_df.loc[test_indices, train_features] # feature == null
regresor = regresor_function(X_train.values, y_train.values)
y_predict = regresor.predict(X_test.values)
iter_df.loc[test_indices, feature] = y_predict
print iter_df.loc[2342802]
print iter_df.loc[2347536]
del new_df
new_df = iter_df.copy()<|fim▁hole|> # for each iteration
# cross_validation_check(new_df)
print new_df.loc[2342802]
print new_df.loc[2347536]
return new_df
def numeric_df_impute_missing_values(station_id, sample_type='train', iter=1):
fname = generate_station_data_fname(station_id=station_id, sample_type=sample_type, data_type='numeric', allow_nan_values=False)
features = ['Id'] + numeric_features[station_id]
station_df = pd.read_csv(fname, usecols=features, index_col=['Id'], dtype=np.float32)
station_df_null_features = []
station_df_null_indices = {}
for feature in station_df.columns:
if station_df[feature].isnull().any():
station_df_null_features.append(feature)
station_df_null_indices[feature] = station_df[station_df[feature].isnull()].index.tolist()
ret_df = numeric_df_impute_missing_values_iteration(station_df, station_df_null_features, station_df_null_indices, iter)
del station_df
# print station_df_null_features
# print station_df_null_indices
return ret_df
if __name__ == '__main__':
df = numeric_df_impute_missing_values('L0S09')
print df.head()<|fim▁end|> |
del iter_df |
<|file_name|>action_dlcontent.py<|end_file_name|><|fim▁begin|>"""
Downloads bootloader content for all arches for when the user doesn't want to supply their own.
Copyright 2009, Red Hat, Inc
Michael DeHaan <[email protected]>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import os
import urlgrabber
import clogger
class ContentDownloader:
def __init__(self,config,logger=None):
"""
Constructor
"""
self.config = config
self.settings = config.settings()
if logger is None:
logger = clogger.Logger()
self.logger = logger
def run(self,force=False):
"""
Download bootloader content for all of the latest bootloaders, since the user
has chosen to not supply their own. You may ask "why not get this from yum", though
Fedora has no IA64 repo, for instance, and we also want this to be able to work on Debian and
further do not want folks to have to install a cross compiler. For those that don't like this approach
they can still source their cross-arch bootloader content manually.
"""
content_server = "http://mdehaan.fedorapeople.org/loaders"
dest = "/var/lib/cobbler/loaders"
<|fim▁hole|> ( "%s/COPYING.elilo" % content_server, "%s/COPYING.elilo" % dest ),
( "%s/COPYING.yaboot" % content_server, "%s/COPYING.yaboot" % dest),
( "%s/COPYING.syslinux" % content_server, "%s/COPYING.syslinux" % dest),
( "%s/elilo-3.8-ia64.efi" % content_server, "%s/elilo-ia64.efi" % dest ),
( "%s/yaboot-1.3.14-12" % content_server, "%s/yaboot" % dest),
( "%s/pxelinux.0-3.61" % content_server, "%s/pxelinux.0" % dest),
( "%s/menu.c32-3.61" % content_server, "%s/menu.c32" % dest),
)
self.logger.info("downloading content required to netboot all arches")
for f in files:
src = f[0]
dst = f[1]
if os.path.exists(dst) and not force:
self.logger.info("path %s already exists, not overwriting existing content, use --force if you wish to update" % dst)
continue
self.logger.info("downloading %s to %s" % (src,dst))
urlgrabber.urlgrab(src,dst)
return True<|fim▁end|> | files = (
( "%s/README" % content_server, "%s/README" % dest ), |
<|file_name|>domtokenlist.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::{Attr, AttrHelpers};
use dom::bindings::codegen::Bindings::DOMTokenListBinding;
use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods;
use dom::bindings::error::{ErrorResult, Fallible};
use dom::bindings::error::Error::{InvalidCharacter, Syntax};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::element::{Element, AttributeHandlers};
use dom::node::window_from_node;
use util::str::{DOMString, HTML_SPACE_CHARACTERS, str_join};
use string_cache::Atom;
use std::borrow::ToOwned;
#[dom_struct]
#[derive(HeapSizeOf)]
pub struct DOMTokenList {
reflector_: Reflector,
element: JS<Element>,
local_name: Atom,
}
impl DOMTokenList {
pub fn new_inherited(element: &Element, local_name: Atom) -> DOMTokenList {
DOMTokenList {
reflector_: Reflector::new(),
element: JS::from_ref(element),
local_name: local_name,
}
}
pub fn new(element: &Element, local_name: &Atom) -> Root<DOMTokenList> {
let window = window_from_node(element);
reflect_dom_object(box DOMTokenList::new_inherited(element, local_name.clone()),
GlobalRef::Window(window.r()),
DOMTokenListBinding::Wrap)
}
}
trait PrivateDOMTokenListHelpers {
fn attribute(self) -> Option<Root<Attr>>;
fn check_token_exceptions(self, token: &str) -> Fallible<Atom>;
}
impl<'a> PrivateDOMTokenListHelpers for &'a DOMTokenList {
fn attribute(self) -> Option<Root<Attr>> {
let element = self.element.root();
element.r().get_attribute(&ns!(""), &self.local_name)
}
<|fim▁hole|> "" => Err(Syntax),
slice if slice.find(HTML_SPACE_CHARACTERS).is_some() => Err(InvalidCharacter),
slice => Ok(Atom::from_slice(slice))
}
}
}
// https://dom.spec.whatwg.org/#domtokenlist
impl<'a> DOMTokenListMethods for &'a DOMTokenList {
// https://dom.spec.whatwg.org/#dom-domtokenlist-length
fn Length(self) -> u32 {
self.attribute().map(|attr| {
let attr = attr.r();
attr.value().tokens().map(|tokens| tokens.len()).unwrap_or(0)
}).unwrap_or(0) as u32
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-item
fn Item(self, index: u32) -> Option<DOMString> {
self.attribute().and_then(|attr| {
let attr = attr.r();
attr.value().tokens().and_then(|tokens| {
tokens.get(index as usize).map(|token| (**token).to_owned())
})
})
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-contains
fn Contains(self, token: DOMString) -> Fallible<bool> {
self.check_token_exceptions(&token).map(|token| {
self.attribute().map(|attr| {
let attr = attr.r();
attr.value()
.tokens()
.expect("Should have parsed this attribute")
.iter()
.any(|atom| *atom == token)
}).unwrap_or(false)
})
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-add
fn Add(self, tokens: Vec<DOMString>) -> ErrorResult {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
for token in &tokens {
let token = try!(self.check_token_exceptions(&token));
if !atoms.iter().any(|atom| *atom == token) {
atoms.push(token);
}
}
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-remove
fn Remove(self, tokens: Vec<DOMString>) -> ErrorResult {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
for token in &tokens {
let token = try!(self.check_token_exceptions(&token));
atoms.iter().position(|atom| *atom == token).map(|index| {
atoms.remove(index)
});
}
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-toggle
fn Toggle(self, token: DOMString, force: Option<bool>) -> Fallible<bool> {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
let token = try!(self.check_token_exceptions(&token));
match atoms.iter().position(|atom| *atom == token) {
Some(index) => match force {
Some(true) => Ok(true),
_ => {
atoms.remove(index);
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(false)
}
},
None => match force {
Some(false) => Ok(false),
_ => {
atoms.push(token);
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(true)
}
}
}
}
// https://dom.spec.whatwg.org/#stringification-behavior
fn Stringifier(self) -> DOMString {
let tokenlist = self.element.root().r().get_tokenlist_attribute(&self.local_name);
str_join(&tokenlist, "\x20")
}
// check-tidy: no specs after this line
fn IndexedGetter(self, index: u32, found: &mut bool) -> Option<DOMString> {
let item = self.Item(index);
*found = item.is_some();
item
}
}<|fim▁end|> | fn check_token_exceptions(self, token: &str) -> Fallible<Atom> {
match token { |
<|file_name|>tutorial_5_solved.py<|end_file_name|><|fim▁begin|># ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
# Tutorial sample #5: Observations
import MalmoPython
import os
import sys
import time
import json
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 0) # flush print output immediately
def Menger(xorg, yorg, zorg, size, blocktype, variant, holetype):
#draw solid chunk
genstring = GenCuboidWithVariant(xorg,yorg,zorg,xorg+size-1,yorg+size-1,zorg+size-1,blocktype,variant) + "\n"
#now remove holes
unit = size
while (unit >= 3):<|fim▁hole|> y=yorg+j
genstring += GenCuboid(x+w,y+w,zorg,(x+2*w)-1,(y+2*w)-1,zorg+size-1,holetype) + "\n"
y=yorg+i
z=zorg+j
genstring += GenCuboid(xorg,y+w,z+w,xorg+size-1, (y+2*w)-1,(z+2*w)-1,holetype) + "\n"
genstring += GenCuboid(x+w,yorg,z+w,(x+2*w)-1,yorg+size-1,(z+2*w)-1,holetype) + "\n"
unit/=3
return genstring
def GenCuboid(x1, y1, z1, x2, y2, z2, blocktype):
return '<DrawCuboid x1="' + str(x1) + '" y1="' + str(y1) + '" z1="' + str(z1) + '" x2="' + str(x2) + '" y2="' + str(y2) + '" z2="' + str(z2) + '" type="' + blocktype + '"/>'
def GenCuboidWithVariant(x1, y1, z1, x2, y2, z2, blocktype, variant):
return '<DrawCuboid x1="' + str(x1) + '" y1="' + str(y1) + '" z1="' + str(z1) + '" x2="' + str(x2) + '" y2="' + str(y2) + '" z2="' + str(z2) + '" type="' + blocktype + '" variant="' + variant + '"/>'
missionXML='''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<Mission xmlns="http://ProjectMalmo.microsoft.com" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<About>
<Summary>Hello world!</Summary>
</About>
<ServerSection>
<ServerInitialConditions>
<Time>
<StartTime>1000</StartTime>
<AllowPassageOfTime>false</AllowPassageOfTime>
</Time>
<Weather>clear</Weather>
</ServerInitialConditions>
<ServerHandlers>
<FlatWorldGenerator generatorString="3;7,44*49,73,35:1,159:4,95:13,35:13,159:11,95:10,159:14,159:6,35:6,95:6;12;"/>
<DrawingDecorator>
<DrawSphere x="-27" y="70" z="0" radius="30" type="air"/>''' + Menger(-40, 40, -13, 27, "stone", "smooth_granite", "air") + '''
<DrawCuboid x1="-25" y1="39" z1="-2" x2="-29" y2="39" z2="2" type="lava"/>
<DrawCuboid x1="-26" y1="39" z1="-1" x2="-28" y2="39" z2="1" type="obsidian"/>
<DrawBlock x="-27" y="39" z="0" type="diamond_block"/>
</DrawingDecorator>
<ServerQuitFromTimeUp timeLimitMs="30000"/>
<ServerQuitWhenAnyAgentFinishes/>
</ServerHandlers>
</ServerSection>
<AgentSection mode="Survival">
<Name>MalmoTutorialBot</Name>
<AgentStart>
<Placement x="0.5" y="56.0" z="0.5" yaw="90"/>
<Inventory>
<InventoryItem slot="8" type="diamond_pickaxe"/>
</Inventory>
</AgentStart>
<AgentHandlers>
<ObservationFromFullStats/>
<ObservationFromGrid>
<Grid name="floor3x3">
<min x="-1" y="-1" z="-1"/>
<max x="1" y="-1" z="1"/>
</Grid>
</ObservationFromGrid>
<ContinuousMovementCommands turnSpeedDegs="180"/>
<InventoryCommands/>
<AgentQuitFromTouchingBlockType>
<Block type="diamond_block" />
</AgentQuitFromTouchingBlockType>
</AgentHandlers>
</AgentSection>
</Mission>'''
# Create default Malmo objects:
agent_host = MalmoPython.AgentHost()
try:
agent_host.parse( sys.argv )
except RuntimeError as e:
print 'ERROR:',e
print agent_host.getUsage()
exit(1)
if agent_host.receivedArgument("help"):
print agent_host.getUsage()
exit(0)
my_mission = MalmoPython.MissionSpec(missionXML, True)
my_mission_record = MalmoPython.MissionRecordSpec()
# Attempt to start a mission:
max_retries = 3
for retry in range(max_retries):
try:
agent_host.startMission( my_mission, my_mission_record )
break
except RuntimeError as e:
if retry == max_retries - 1:
print "Error starting mission:",e
exit(1)
else:
time.sleep(2)
# Loop until mission starts:
print "Waiting for the mission to start ",
world_state = agent_host.getWorldState()
while not world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
print
print "Mission running ",
agent_host.sendCommand("hotbar.9 1") #Press the hotbar key
agent_host.sendCommand("hotbar.9 0") #Release hotbar key - agent should now be holding diamond_pickaxe
agent_host.sendCommand("pitch 0.2") #Start looking downward slowly
time.sleep(1) #Wait a second until we are looking in roughly the right direction
agent_host.sendCommand("pitch 0") #Stop tilting the camera
agent_host.sendCommand("move 1") #And start running...
agent_host.sendCommand("attack 1") #Whilst flailing our pickaxe!
jumping = False
# Loop until mission ends:
while world_state.is_mission_running:
sys.stdout.write(".")
time.sleep(0.1)
world_state = agent_host.getWorldState()
for error in world_state.errors:
print "Error:",error.text
if world_state.number_of_observations_since_last_state > 0:
msg = world_state.observations[-1].text
observations = json.loads(msg)
grid = observations.get(u'floor3x3', 0)
if jumping and grid[4]!=u'lava':
agent_host.sendCommand("jump 0")
jumping = False
if grid[3]==u'lava':
agent_host.sendCommand("jump 1")
jumping = True
print
print "Mission ended"
# Mission has ended.<|fim▁end|> | w=unit/3
for i in xrange(0, size, unit):
for j in xrange(0, size, unit):
x=xorg+i |
<|file_name|>graph.py<|end_file_name|><|fim▁begin|>"""
graph.py
-------------
Deal with graph operations. Primarily deal with graphs in (n, 2)
edge list form, and abstract the backend graph library being used.
Currently uses networkx or scipy.sparse.csgraph backend.
"""
import numpy as np
import collections
from . import util
from . import grouping
from . import exceptions
from .constants import log, tol
from .geometry import faces_to_edges
try:
from scipy.sparse import csgraph, coo_matrix
except BaseException as E:
# re-raise exception when used
csgraph = exceptions.ExceptionModule(E)
coo_matrix = exceptions.closure(E)
try:
import networkx as nx
except BaseException as E:
# create a dummy module which will raise the ImportError
# or other exception only when someone tries to use networkx
nx = exceptions.ExceptionModule(E)
def face_adjacency(faces=None,
mesh=None,
return_edges=False):
"""
Returns an (n, 2) list of face indices.
Each pair of faces in the list shares an edge, making them adjacent.
Parameters
-----------
faces : (n, 3) int, or None
Vertex indices representing triangles
mesh : Trimesh object
If passed will used cached edges
instead of generating from faces
return_edges : bool
Return the edges shared by adjacent faces
Returns
----------
adjacency : (m, 2) int
Indexes of faces that are adjacent
edges: (m, 2) int
Only returned if return_edges is True
Indexes of vertices which make up the
edges shared by the adjacent faces
Examples
----------
This is useful for lots of things such as finding
face- connected components:
>>> graph = nx.Graph()
>>> graph.add_edges_from(mesh.face_adjacency)
>>> groups = nx.connected_components(graph_connected)
"""
if mesh is None:
# first generate the list of edges for the current faces
# also return the index for which face the edge is from
edges, edges_face = faces_to_edges(faces, return_index=True)
# make sure edge rows are sorted
edges.sort(axis=1)
else:
# if passed a mesh, used the cached values
edges = mesh.edges_sorted
edges_face = mesh.edges_face
# this will return the indices for duplicate edges
# every edge appears twice in a well constructed mesh
# so for every row in edge_idx:
# edges[edge_idx[*][0]] == edges[edge_idx[*][1]]
# in this call to group rows we discard edges which
# don't occur twice
edge_groups = grouping.group_rows(edges, require_count=2)
if len(edge_groups) == 0:
log.warning('No adjacent faces detected! Did you merge vertices?')
# the pairs of all adjacent faces
# so for every row in face_idx, self.faces[face_idx[*][0]] and
# self.faces[face_idx[*][1]] will share an edge
adjacency = edges_face[edge_groups]
# degenerate faces may appear in adjacency as the same value
nondegenerate = adjacency[:, 0] != adjacency[:, 1]
adjacency = adjacency[nondegenerate]
# sort pairs in-place so we can search for indexes with ordered pairs
adjacency.sort(axis=1)
if return_edges:
adjacency_edges = edges[edge_groups[:, 0][nondegenerate]]
assert len(adjacency_edges) == len(adjacency)
return adjacency, adjacency_edges
return adjacency
def face_adjacency_unshared(mesh):
"""
Return the vertex index of the two vertices not in the shared
edge between two adjacent faces
Parameters
----------
mesh : Trimesh object
Input mesh
Returns
-----------
vid_unshared : (len(mesh.face_adjacency), 2) int
Indexes of mesh.vertices
for degenerate faces without exactly
one unshared vertex per face it will be -1
"""
# the non- shared vertex index is the same shape
# as face_adjacency holding vertex indices vs face indices
vid_unshared = np.zeros_like(mesh.face_adjacency,
dtype=np.int64) - 1
# get the shared edges between adjacent faces
edges = mesh.face_adjacency_edges
# loop through the two columns of face adjacency
for i, fid in enumerate(mesh.face_adjacency.T):
# faces from the current column of face adjacency
faces = mesh.faces[fid]
# should have one True per row of (3,)
# index of vertex not included in shared edge
unshared = np.logical_not(np.logical_or(
faces == edges[:, 0].reshape((-1, 1)),
faces == edges[:, 1].reshape((-1, 1))))
# each row should have exactly one uncontained verted
row_ok = unshared.sum(axis=1) == 1
# any degenerate row should be ignored
unshared[~row_ok, :] = False
# set the
vid_unshared[row_ok, i] = faces[unshared]
return vid_unshared
def face_adjacency_radius(mesh):
"""
Compute an approximate radius between adjacent faces.
Parameters
--------------
mesh : trimesh.Trimesh
Returns
-------------
radii : (len(self.face_adjacency),) float
Approximate radius between faces
Parallel faces will have a value of np.inf
span : (len(self.face_adjacency),) float
Perpendicular projection distance of two
unshared vertices onto the shared edge
"""
# solve for the radius of the adjacent faces
# distance
# R = ------------------
# 2 * sin(theta / 2)
nonzero = mesh.face_adjacency_angles > np.radians(.01)
denominator = np.abs(
2.0 * np.sin(mesh.face_adjacency_angles[nonzero] / 1.0))
# consider the distance between the non- shared vertices of the
# face adjacency pair as the key distance
point_pairs = mesh.vertices[mesh.face_adjacency_unshared]
vectors = np.diff(point_pairs,
axis=1).reshape((-1, 3))
# the vertex indices of the shared edge for the adjacency pairx
edges = mesh.face_adjacency_edges
# unit vector along shared the edge
edges_vec = util.unitize(np.diff(mesh.vertices[edges],
axis=1).reshape((-1, 3)))
# the vector of the perpendicular projection to the shared edge
perp = np.subtract(
vectors, (util.diagonal_dot(
vectors, edges_vec).reshape(
(-1, 1)) * edges_vec))
# the length of the perpendicular projection
span = util.row_norm(perp)
# complete the values for non- infinite radii
radii = np.ones(len(mesh.face_adjacency)) * np.inf
radii[nonzero] = span[nonzero] / denominator
return radii, span
def vertex_adjacency_graph(mesh):
"""
Returns a networkx graph representing the vertices and
their connections in the mesh.
Parameters
----------
mesh : Trimesh object
Returns
---------
graph : networkx.Graph
Graph representing vertices and edges between
them where vertices are nodes and edges are edges
Examples
----------
This is useful for getting nearby vertices for a given vertex,
potentially for some simple smoothing techniques.
>>> graph = mesh.vertex_adjacency_graph
>>> graph.neighbors(0)
> [1, 3, 4]
"""
g = nx.Graph()
g.add_edges_from(mesh.edges_unique)
return g
def shared_edges(faces_a, faces_b):
"""
Given two sets of faces, find the edges which are in both sets.
Parameters
---------
faces_a : (n, 3) int
Array of faces
faces_b : (m, 3) int
Array of faces
Returns
---------
shared : (p, 2) int
Edges shared between faces
"""
e_a = np.sort(faces_to_edges(faces_a), axis=1)
e_b = np.sort(faces_to_edges(faces_b), axis=1)
shared = grouping.boolean_rows(
e_a, e_b, operation=np.intersect1d)
return shared
def facets(mesh, engine=None):
"""
Find the list of parallel adjacent faces.
Parameters
-----------
mesh : trimesh.Trimesh
engine : str
Which graph engine to use:
('scipy', 'networkx')
Returns
---------
facets : sequence of (n,) int
Groups of face indexes of
parallel adjacent faces.
"""
# what is the radius of a circle that passes through the perpendicular
# projection of the vector between the two non- shared vertices
# onto the shared edge, with the face normal from the two adjacent faces
radii = mesh.face_adjacency_radius
# what is the span perpendicular to the shared edge
span = mesh.face_adjacency_span
# a very arbitrary formula for declaring two adjacent faces
# parallel in a way that is hopefully (and anecdotally) robust
# to numeric error
# a common failure mode is two faces that are very narrow with a slight
# angle between them, so here we divide by the perpendicular span
# to penalize very narrow faces, and then square it just for fun
parallel = np.ones(len(radii), dtype=np.bool)
# if span is zero we know faces are small/parallel
nonzero = np.abs(span) > tol.zero
# faces with a radii/span ratio larger than a threshold pass
parallel[nonzero] = (radii[nonzero] /
span[nonzero]) ** 2 > tol.facet_threshold
# run connected components on the parallel faces to group them
components = connected_components(
mesh.face_adjacency[parallel],
nodes=np.arange(len(mesh.faces)),
min_len=2,
engine=engine)
return components
def split(mesh, only_watertight=True, adjacency=None, engine=None, **kwargs):
"""
Split a mesh into multiple meshes from face
connectivity.
If only_watertight is true it will only return
watertight meshes and will attempt to repair
single triangle or quad holes.
Parameters
----------
mesh : trimesh.Trimesh
only_watertight: bool
Only return watertight components
adjacency : (n, 2) int
Face adjacency to override full mesh
engine : str or None
Which graph engine to use
Returns
----------
meshes : (m,) trimesh.Trimesh
Results of splitting
"""
if adjacency is None:
adjacency = mesh.face_adjacency
# if only watertight the shortest thing we can split has 3 triangles
if only_watertight:
min_len = 4
else:
min_len = 1
components = connected_components(
edges=adjacency,
nodes=np.arange(len(mesh.faces)),
min_len=min_len,
engine=engine)
meshes = mesh.submesh(
components, only_watertight=only_watertight, **kwargs)
return meshes
def connected_components(edges,
min_len=1,
nodes=None,
engine=None):
"""
Find groups of connected nodes from an edge list.
Parameters
-----------
edges : (n, 2) int
Edges between nodes
nodes : (m, ) int or None
List of nodes that exist
min_len : int
Minimum length of a component group to return
engine : str or None
Which graph engine to use (None for automatic):
(None, 'networkx', 'scipy')
Returns
-----------
components : (n,) sequence of (*,) int
Nodes which are connected
"""
def components_networkx():
"""
Find connected components using networkx
"""
graph = nx.from_edgelist(edges)
# make sure every face has a node, so single triangles
# aren't discarded (as they aren't adjacent to anything)
if min_len <= 1:
graph.add_nodes_from(nodes)
iterable = nx.connected_components(graph)
# newer versions of networkx return sets rather than lists
components = np.array(
[np.array(list(i), dtype=np.int64)
for i in iterable if len(i) >= min_len])
return components
def components_csgraph():
"""
Find connected components using scipy.sparse.csgraph
"""
# label each node
labels = connected_component_labels(edges,
node_count=node_count)
# we have to remove results that contain nodes outside
# of the specified node set and reindex
contained = np.zeros(node_count, dtype=np.bool)
contained[nodes] = True
index = np.arange(node_count, dtype=np.int64)[contained]
components = grouping.group(labels[contained], min_len=min_len)
components = np.array([index[c] for c in components])
return components
# check input edges
edges = np.asanyarray(edges, dtype=np.int64)
# if no nodes were specified just use unique
if nodes is None:
nodes = np.unique(edges)
# exit early if we have no nodes
if len(nodes) == 0:
return np.array([])
elif len(edges) == 0:
if min_len <= 1:
return np.reshape(nodes, (-1, 1))
else:
return np.array([])
if not util.is_shape(edges, (-1, 2)):
raise ValueError('edges must be (n, 2)!')
# find the maximum index referenced in either nodes or edges
counts = [0]
if len(edges) > 0:
counts.append(edges.max())
if len(nodes) > 0:
counts.append(nodes.max())
node_count = np.max(counts) + 1
# remove edges that don't have both nodes in the node set
mask = np.zeros(node_count, dtype=np.bool)
mask[nodes] = True
edges_ok = mask[edges].all(axis=1)
edges = edges[edges_ok]
# networkx is pure python and is usually 5-10x slower than scipy
engines = collections.OrderedDict((
('scipy', components_csgraph),
('networkx', components_networkx)))
# if a graph engine has explicitly been requested use it
if engine in engines:
return engines[engine]()
# otherwise, go through our ordered list of graph engines
# until we get to one that has actually been installed
for function in engines.values():
try:
return function()
# will be raised if the library didn't import correctly above
except NameError:
continue
raise ImportError('No connected component engines available!')
def connected_component_labels(edges, node_count=None):
"""
Label graph nodes from an edge list, using scipy.sparse.csgraph
Parameters
-----------
edges : (n, 2) int
Edges of a graph
node_count : int, or None
The largest node in the graph.
Returns
----------
labels : (node_count,) int
Component labels for each node
"""
matrix = edges_to_coo(edges, node_count)
body_count, labels = csgraph.connected_components(
matrix, directed=False)
if node_count is not None:
assert len(labels) == node_count
return labels
def split_traversal(traversal,
edges,
edges_hash=None):
"""
Given a traversal as a list of nodes, split the traversal
if a sequential index pair is not in the given edges.
Parameters
--------------
edges : (n, 2) int
Graph edge indexes
traversal : (m,) int
Traversal through edges
edge_hash : (n,)
Edges sorted on axis=1 and
passed to grouping.hashable_rows
Returns
---------------
split : sequence of (p,) int
"""
traversal = np.asanyarray(traversal,
dtype=np.int64)
# hash edge rows for contains checks
if edges_hash is None:
edges_hash = grouping.hashable_rows(
np.sort(edges, axis=1))
# turn the (n,) traversal into (n-1, 2) edges
trav_edge = np.column_stack((traversal[:-1],
traversal[1:]))
# hash each edge so we can compare to edge set
trav_hash = grouping.hashable_rows(
np.sort(trav_edge, axis=1))
# check if each edge is contained in edge set
contained = np.in1d(trav_hash, edges_hash)
# exit early if every edge of traversal exists
if contained.all():
# just reshape one traversal
split = [traversal]
else:
# find contiguous groups of contained edges
blocks = grouping.blocks(contained,
min_len=1,
only_nonzero=True)
# turn edges back in to sequence of traversals
split = [np.append(trav_edge[b][:, 0],
trav_edge[b[-1]][1])
for b in blocks]
# close traversals if necessary
for i, t in enumerate(split):
# make sure elements of sequence are numpy arrays
split[i] = np.asanyarray(split[i], dtype=np.int64)
# don't close if its a single edge
if len(t) <= 2:
continue
# make sure it's not already closed
edge = np.sort([t[0], t[-1]])
if edge.ptp() == 0:
continue
close = grouping.hashable_rows(edge.reshape((1, 2)))[0]
# if we need the edge add it
if close in edges_hash:
split[i] = np.append(t, t[0]).astype(np.int64)
result = np.array(split)
return result
def fill_traversals(traversals, edges, edges_hash=None):
"""
Convert a traversal of a list of edges into a sequence of
traversals where every pair of consecutive node indexes
is an edge in a passed edge list
Parameters
-------------
traversals : sequence of (m,) int
Node indexes of traversals of a graph
edges : (n, 2) int
Pairs of connected node indexes
edges_hash : None, or (n,) int
Edges sorted along axis 1 then hashed
using grouping.hashable_rows
Returns
--------------
splits : sequence of (p,) int
Node indexes of connected traversals
"""
# make sure edges are correct type
edges = np.asanyarray(edges, dtype=np.int64)
# make sure edges are sorted
edges.sort(axis=1)
# if there are no traversals just return edges
if len(traversals) == 0:
return edges.copy()
# hash edges for contains checks
if edges_hash is None:
edges_hash = grouping.hashable_rows(edges)
splits = []
for nodes in traversals:
# split traversals to remove edges
# that don't actually exist
splits.extend(split_traversal(
traversal=nodes,
edges=edges,
edges_hash=edges_hash))
# turn the split traversals back into (n, 2) edges
included = util.vstack_empty([np.column_stack((i[:-1], i[1:]))
for i in splits])
if len(included) > 0:
# sort included edges in place
included.sort(axis=1)
# make sure any edges not included in split traversals
# are just added as a length 2 traversal
splits.extend(grouping.boolean_rows(
edges,
included,
operation=np.setdiff1d))
else:
# no edges were included, so our filled traversal
# is just the original edges copied over
splits = edges.copy()
return splits
def traversals(edges, mode='bfs'):
"""
Given an edge list generate a sequence of ordered depth
first search traversals using scipy.csgraph routines.
Parameters
------------
edges : (n, 2) int
Undirected edges of a graph
mode : str
Traversal type, 'bfs' or 'dfs'
Returns
-----------
traversals : (m,) sequence of (p,) int
Ordered DFS or BFS traversals of the graph.
"""
edges = np.array(edges, dtype=np.int64)
if len(edges) == 0:
return []
elif not util.is_shape(edges, (-1, 2)):
raise ValueError('edges are not (n, 2)!')
# pick the traversal method
mode = str(mode).lower().strip()
if mode == 'bfs':
func = csgraph.breadth_first_order
elif mode == 'dfs':
func = csgraph.depth_first_order
else:<|fim▁hole|> raise ValueError('traversal mode must be either dfs or bfs')
# make sure edges are sorted so we can query
# an ordered pair later
edges.sort(axis=1)
# set of nodes to make sure we get every node
nodes = set(edges.reshape(-1))
# coo_matrix for csgraph routines
graph = edges_to_coo(edges)
# we're going to make a sequence of traversals
traversals = []
while len(nodes) > 0:
# starting at any node
start = nodes.pop()
# get an (n,) ordered traversal
ordered = func(graph,
i_start=start,
return_predecessors=False,
directed=False).astype(np.int64)
traversals.append(ordered)
# remove the nodes we've consumed
nodes.difference_update(ordered)
return traversals
def edges_to_coo(edges, count=None, data=None):
"""
Given an edge list, return a boolean scipy.sparse.coo_matrix
representing the edges in matrix form.
Parameters
------------
edges : (n, 2) int
Edges of a graph
count : int
The total number of nodes in the graph
if None: count = edges.max() + 1
data : (n,) any
Assign data to each edge, if None will
be bool True for each specified edge
Returns
------------
matrix: (count, count) scipy.sparse.coo_matrix
Sparse COO
"""
edges = np.asanyarray(edges, dtype=np.int64)
if not (len(edges) == 0 or
util.is_shape(edges, (-1, 2))):
raise ValueError('edges must be (n, 2)!')
# if count isn't specified just set it to largest
# value referenced in edges
if count is None:
count = edges.max() + 1
count = int(count)
# if no data is specified set every specified edge
# to True
if data is None:
data = np.ones(len(edges), dtype=np.bool)
matrix = coo_matrix((data, edges.T),
dtype=data.dtype,
shape=(count, count))
return matrix
def neighbors(edges, max_index=None, directed=False):
"""
Find the neighbors for each node in an edgelist graph.
TODO : re-write this with sparse matrix operations
Parameters
------------
edges : (n, 2) int
Connected nodes
directed : bool
If True, only connect edges in one direction
Returns
---------
neighbors : sequence
Vertex index corresponds to set of other vertex indices
"""
neighbors = collections.defaultdict(set)
if directed:
[neighbors[edge[0]].add(edge[1])
for edge in edges]
else:
[(neighbors[edge[0]].add(edge[1]),
neighbors[edge[1]].add(edge[0]))
for edge in edges]
if max_index is None:
max_index = edges.max() + 1
array = [list(neighbors[i]) for i in range(max_index)]
return array
def smoothed(mesh, angle=None, facet_minarea=15):
"""
Return a non- watertight version of the mesh which
will render nicely with smooth shading by
disconnecting faces at sharp angles to each other.
Parameters
-----------
mesh : trimesh.Trimesh
Source geometry
angle : float or None
Angle in radians face pairs with angles
smaller than this will appear smoothed
facet_minarea : float or None
Minimum area fraction to consider
IE for `facets_minarea=25` only facets larger
than `mesh.area / 25` will be considered.
Returns
---------
smooth : trimesh.Trimesh
Geometry with disconnected face patches
"""
if angle is None:
angle = np.radians(30)
# if the mesh has no adjacent faces return a copy
if len(mesh.face_adjacency) == 0:
return mesh.copy()
# face pairs below angle threshold
angle_ok = mesh.face_adjacency_angles <= angle
# subset of face adjacency
adjacency = mesh.face_adjacency[angle_ok]
# coplanar groups of faces
facets = []
nodes = None
# collect coplanar regions for smoothing
if facet_minarea is not None:
areas = mesh.area_faces
min_area = mesh.area / facet_minarea
try:
# we can survive not knowing facets
# exclude facets with few faces
facets = [f for f in mesh.facets
if areas[f].sum() > min_area]
if len(facets) > 0:
# mask for removing adjacency pairs where
# one of the faces is contained in a facet
mask = np.ones(len(mesh.faces),
dtype=np.bool)
mask[np.hstack(facets)] = False
# apply the mask to adjacency
adjacency = adjacency[
mask[adjacency].all(axis=1)]
# nodes are no longer every faces
nodes = np.unique(adjacency)
except BaseException:
log.warning('failed to calculate facets',
exc_info=True)
# run connected components on facet adjacency
components = connected_components(
adjacency,
min_len=1,
nodes=nodes).tolist()
# add back coplanar groups if any exist
if len(facets) > 0:
components.extend(facets)
if len(components) == 0:
# if no components for some reason
# just return a copy of the original mesh
return mesh.copy()
# add back any faces that were missed
unique = np.unique(np.hstack(components))
if len(unique) != len(mesh.faces):
# things like single loose faces
# or groups below facet_minlen
broke = np.setdiff1d(
np.arange(len(mesh.faces)), unique)
components.extend(broke.reshape((-1, 1)))
# get a submesh as a single appended Trimesh
smooth = mesh.submesh(components,
only_watertight=False,
append=True)
# store face indices from original mesh
smooth.metadata['original_components'] = components
# smoothed should have exactly the same number of faces
if len(smooth.faces) != len(mesh.faces):
log.warning('face count in smooth wrong!')
return smooth
def is_watertight(edges, edges_sorted=None):
"""
Parameters
-----------
edges : (n, 2) int
List of vertex indices
edges_sorted : (n, 2) int
Pass vertex indices sorted on axis 1 as a speedup
Returns
---------
watertight : boolean
Whether every edge is shared by an even
number of faces
winding : boolean
Whether every shared edge is reversed
"""
# passing edges_sorted is a speedup only
if edges_sorted is None:
edges_sorted = np.sort(edges, axis=1)
# group sorted edges
groups = grouping.group_rows(
edges_sorted, require_count=2)
watertight = bool((len(groups) * 2) == len(edges))
# are opposing edges reversed
opposing = edges[groups].reshape((-1, 4))[:, 1:3].T
# wrap the weird numpy bool
winding = bool(np.equal(*opposing).all())
return watertight, winding
def graph_to_svg(graph):
"""
Turn a networkx graph into an SVG string
using graphviz `dot`.
Parameters
----------
graph: networkx graph
Returns
---------
svg: string, pictoral layout in SVG format
"""
import tempfile
import subprocess
with tempfile.NamedTemporaryFile() as dot_file:
nx.drawing.nx_agraph.write_dot(graph, dot_file.name)
svg = subprocess.check_output(['dot', dot_file.name, '-Tsvg'])
return svg
def multigraph_paths(G, source, cutoff=None):
"""
For a networkx MultiDiGraph, find all paths from a source node
to leaf nodes. This function returns edge instance numbers
in addition to nodes, unlike networkx.all_simple_paths.
Parameters
---------------
G : networkx.MultiDiGraph
Graph to evaluate
source : hashable
Node to start traversal at
cutoff : int
Number of nodes to visit
If None will visit all nodes
Returns
----------
traversals : (n,) list of [(node, edge instance index), ] paths
Traversals of the multigraph
"""
if cutoff is None:
cutoff = (len(G.edges()) * len(G.nodes())) + 1
# the path starts at the node specified
current = [(source, 0)]
# traversals we need to go back and do
queue = []
# completed paths
traversals = []
for i in range(cutoff):
# paths are stored as (node, instance) so
# get the node of the last place visited
current_node = current[-1][0]
# get all the children of the current node
child = G[current_node]
if len(child) == 0:
# we have no children, so we are at the end of this path
# save the path as a completed traversal
traversals.append(current)
# if there is nothing on the queue, we are done
if len(queue) == 0:
break
# otherwise continue traversing with the next path
# on the queue
current = queue.pop()
else:
# oh no, we have multiple edges from current -> child
start = True
# iterate through child nodes and edge instances
for node in child.keys():
for instance in child[node].keys():
if start:
# if this is the first edge, keep it on the
# current traversal and save the others for later
current.append((node, instance))
start = False
else:
# this child has multiple instances
# so we will need to traverse them multiple times
# we appended a node to current, so only take the
# first n-1 visits
queue.append(current[:-1] + [(node, instance)])
return traversals
def multigraph_collect(G, traversal, attrib=None):
"""
Given a MultiDiGraph traversal, collect attributes along it.
Parameters
-------------
G: networkx.MultiDiGraph
traversal: (n) list of (node, instance) tuples
attrib: dict key, name to collect. If None, will return all
Returns
-------------
collected: (len(traversal) - 1) list of attributes
"""
collected = []
for u, v in util.pairwise(traversal):
attribs = G[u[0]][v[0]][v[1]]
if attrib is None:
collected.append(attribs)
else:
collected.append(attribs[attrib])
return collected<|fim▁end|> | |
<|file_name|>web.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import time
import random
import socket
from flask import Flask, render_template, redirect, url_for, request, jsonify
import config
log = None
# classes
class Agent():
def __init__(self, ip, cw=True, node=None, state='initial'):
self.ip = ip
self.cw = cw
self.state = state
self.node = node
def __repr__(self):
return 'Agent: ip {}, direction CW: {}, state: {}, node: {}'.format(self.ip, self.cw, self.state, self.node)
class Node():
def __init__(self, label):
assert isinstance(label, int), 'Node constructor accepts numeric label only'
self.label = label
# list of agent ips in the current node
self.agents = []
def add_agent(self, agent_ip):
# add an agent ip to the list of agents in the current node
self.agents.append(agent_ip)
def __repr__(self):
return '<Node {}: [{}]>'.format(self.label, ' | '.join(str(app.agents[ip]) for ip in self.agents))
class Ring():
def __init__(self, n_nodes):
self._nodes = [Node(i) for i in range(n_nodes)]
self.n_nodes = n_nodes
def get_node(self, label):
return self._nodes[label]
def next(self, agent):
"""Return next node."""
i = 1 if agent.cw else -1
return self._nodes[(agent.node+i) % self.n_nodes]
def prev(self, agent):
"""Return prev node."""
i = -1 if agent.cw else 1
return self._nodes[(agent.node+i) % self.n_nodes]
def blocked(self, agent):
"""Check if the next node is blocked."""
next_node = self.next(agent)
if agent.ip == app.malicious_ip:
return len(next_node.agents) > 0
else:
return app.malicious_ip in next_node.agents
def random_place_agents(self):
"""Randomly place agents in the ring."""
#a = app.agents[app.agents_ips[0]]
#a.node = 1
#self.get_node(1).add_agent(a.ip)
#a.cw = False
#a = app.agents[app.agents_ips[1]]
#a.node = 2
#self.get_node(2).add_agent(a.ip)
#a.cw = False
#a = app.agents[app.agents_ips[2]]
#a.node = 4
#self.get_node(4).add_agent(a.ip)
#a.cw = True
#a = app.agents[app.malicious_ip]
#a.node = 6
#self.get_node(6).add_agent(a.ip)
#a.cw = True
# True = clockwise
# False = counterclockwise
a = app.agents[app.agents_ips[0]]
a.node = 3
self.get_node(3).add_agent(a.ip)
a.cw = False
a = app.agents[app.agents_ips[1]]
a.node = 6
self.get_node(6).add_agent(a.ip)
a.cw = False
a = app.agents[app.agents_ips[2]]
a.node = 5
self.get_node(5).add_agent(a.ip)
a.cw = True
a = app.agents[app.malicious_ip]
a.node = 1
self.get_node(1).add_agent(a.ip)
a.cw = False
return
# at most 1 agent per node, randomize direction in case of unoriented ring
for agent, node in zip(app.agents.values(), random.sample(self._nodes, len(app.agents.keys()))):
agent.cw = True if config.oriented else random.choice([True, False])
agent.node = node.label
self.get_node(node.label).add_agent(agent.ip)
def dump(self):
ring = dict()
for node in self._nodes:
ring[str(node.label)] = [(app.agents[a].ip, str(app.agents[a].cw), app.agents[a].state, app.agents[a].node) for a in node.agents]
return ring
def __repr__(self):
return ', '.join(str(node) for node in self._nodes)
class MTFGRServer(Flask):
'''Wrapper around the Flask class used to store additional information.'''
def __init__(self, *args, **kwargs):
super(MTFGRServer, self).__init__(*args, **kwargs)
self.ring = Ring(config.n_nodes)
self.agents_ips = config.agents_ips
self.agents = dict()
self.malicious_ip = config.malicious_ip
self.oriented = config.oriented
self.started = False
# instance of the web application
app = MTFGRServer(__name__)
<|fim▁hole|># auxiliary functions
def _reset():
"""Reset the global variables by parsing again the config file."""
import config
global log
app.ring = Ring(config.n_nodes)
app.agents = {ip: Agent(ip) for ip in config.agents_ips}
app.malicious_ip = config.malicious_ip
app.agents[app.malicious_ip] = Agent(app.malicious_ip, state='malicious')
app.oriented = config.oriented
app.started = False
app.ring.random_place_agents()
log = open('/tmp/ev3.log', 'a')
log.write('\n\nIIIIIIIIIINNNNNNNNNIIIIIIIIIIITTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTTT\\n\n')
# views
def _communicate_start():
"""Instruct each bot to start."""
port = 31337
for ip in app.agents_ips[::-1] + [app.malicious_ip]:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((ip, port))
# s.sendall(b'Go!\n')
s.close()
@app.route('/start')
def start():
app.started = True
try:
_communicate_start()
except Exception:
pass
return redirect(url_for('index'))
@app.route('/reset')
def reset():
_reset()
return redirect(url_for('index'))
@app.route('/status')
def global_status():
"""Get the whole ring status."""
return jsonify(**app.ring.dump())
@app.route('/get/<agent_ip>')
def get_status(agent_ip):
"""Get the list of agents in the current node."""
agent = app.agents[agent_ip]
# aggiungere blocked
return jsonify(agents=[app.agents[ip].state for ip in app.ring.get_node(agent.node).agents if ip != agent_ip],
blocked=app.ring.blocked(agent))
@app.route('/set/<agent_ip>', methods=['GET'])
def set_status(agent_ip):
global log
turned = request.args.get('turned') == '1'
state = request.args.get('state')
stopped = request.args.get('stopped') == '1'
# logging
sss = '\n\n[Request] {} - ip: {}, turned: {}, state: {}, stopped: {}\n'.format(time.time(), agent_ip, turned, state, stopped)
log.write(sss)
log.write('[Status pre]\n')
log.write(str(app.ring.dump()))
agent = app.agents[agent_ip]
agent.state = state
agent.cw = agent.cw if not turned else not agent.cw
blocked = app.ring.blocked(agent)
if not blocked and not stopped:
# advance to the next node if not blocked
node = app.ring.get_node(agent.node)
next_node = app.ring.next(agent)
agent.node = next_node.label
node.agents.remove(agent_ip)
next_node.add_agent(agent_ip)
log.write('\n[Status post]\n')
log.write(str(app.ring.dump()))
return jsonify(blocked=blocked)
@app.route('/')
def index():
return render_template('base.html', started=app.started)
def main():
app.run(host='0.0.0.0', debug=config.debug)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>construct_erc2-chromatin15state-all_files.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
output_dir = "erc2-chromatin15state-all-files"
if not os.path.exists(output_dir):
sys.stderr.write("Creating dir [%s]...\n" % (output_dir))
os.makedirs(output_dir)
prefix = "/home/cbreeze/for_Alex"
suffix = "_15_coreMarks_mnemonics.bed"
marks = [ '1_TssA',
'2_TssAFlnk',
'3_TxFlnk',
'4_Tx',
'5_TxWk',
'6_EnhG',
'7_Enh',
'8_ZNF/Rpts',
'9_Het',
'10_TssBiv',
'11_BivFlnk',
'12_EnhBiv',
'13_ReprPC',
'14_ReprPCWk',
'15_Quies' ]
all = [ 'E001',
'E002',
'E003',
'E004',
'E005',
'E006',
'E007',
'E008',
'E009',
'E010',
'E011',
'E012',
'E013',
'E014',
'E015',
'E016',
'E017',
'E018',
'E019',
'E020',
'E021',
'E022',
'E023',
'E024',
'E025',
'E026',
'E027',
'E028',
'E029',
'E030',
'E031',
'E032',<|fim▁hole|> 'E034',
'E035',
'E036',
'E037',
'E038',
'E039',
'E040',
'E041',
'E042',
'E043',
'E044',
'E045',
'E046',
'E047',
'E048',
'E049',
'E050',
'E051',
'E052',
'E053',
'E054',
'E055',
'E056',
'E057',
'E058',
'E059',
'E061',
'E062',
'E063',
'E065',
'E066',
'E067',
'E068',
'E069',
'E070',
'E071',
'E072',
'E073',
'E074',
'E075',
'E076',
'E077',
'E078',
'E079',
'E080',
'E081',
'E082',
'E083',
'E084',
'E085',
'E086',
'E087',
'E088',
'E089',
'E090',
'E091',
'E092',
'E093',
'E094',
'E095',
'E096',
'E097',
'E098',
'E099',
'E100',
'E101',
'E102',
'E103',
'E104',
'E105',
'E106',
'E107',
'E108',
'E109',
'E110',
'E111',
'E112',
'E113',
'E114',
'E115',
'E116',
'E117',
'E118',
'E119',
'E120',
'E121',
'E122',
'E123',
'E124',
'E125',
'E126',
'E127',
'E128',
'E129' ]
# prefix, suffix, marks, all
for sample in all:
fns = {}
fhs = {}
# set up output file handles for all combinations of per-sample and marks
for mark in marks:
fns[mark] = os.path.join(output_dir, "%s_%s.bed" % (sample, mark.replace('/', '-')))
sys.stderr.write("Setting up output handle to [%s]...\n" % (fns[mark]))
fhs[mark] = open(fns[mark], "w")
# split per-sample mnemonics to per-sample, per-mark file
psm_fn = "%s/%s%s" % (prefix, sample, suffix)
sys.stderr.write("Reading PSM [%s]...\n" % (psm_fn))
with open(psm_fn, "r") as psm_fh:
for line in psm_fh:
(chr, start, stop, state_call) = line.strip().split('\t')
fhs[state_call].write('\t'.join([chr, start, stop]) + '\n')
# close handles
for mark in marks:
sys.stderr.write("Closing output handle to [%s]...\n" % (fns[mark]))
fhs[mark].close()
fns[mark] = None
fhs[mark] = None<|fim▁end|> | 'E033', |
<|file_name|>transfer.py<|end_file_name|><|fim▁begin|>import logging
import multiprocessing
import os
from mimetypes import guess_type
from django.conf import settings
from django.core.cache import cache
from django.db import connection
from zerver.lib.avatar_hash import user_avatar_path
from zerver.lib.upload import S3UploadBackend, upload_image_to_s3
from zerver.models import Attachment, RealmEmoji, UserProfile
s3backend = S3UploadBackend()
def transfer_uploads_to_s3(processes: int) -> None:
# TODO: Eventually, we'll want to add realm icon and logo
transfer_avatars_to_s3(processes)
transfer_message_files_to_s3(processes)
transfer_emoji_to_s3(processes)
def _transfer_avatar_to_s3(user: UserProfile) -> None:
avatar_path = user_avatar_path(user)
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path) + ".original"
try:
with open(file_path, "rb") as f:
s3backend.upload_avatar_image(f, user, user)
logging.info("Uploaded avatar for %s in realm %s", user.id, user.realm.name)
except FileNotFoundError:
pass
def transfer_avatars_to_s3(processes: int) -> None:
users = list(UserProfile.objects.all())
if processes == 1:
for user in users:
_transfer_avatar_to_s3(user)
else: # nocoverage
connection.close()
cache._cache.disconnect_all()
with multiprocessing.Pool(processes) as p:
for out in p.imap_unordered(_transfer_avatar_to_s3, users):
pass
def _transfer_message_files_to_s3(attachment: Attachment) -> None:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", attachment.path_id)
try:
with open(file_path, "rb") as f:
guessed_type = guess_type(attachment.file_name)[0]
upload_image_to_s3(
s3backend.uploads_bucket,
attachment.path_id,
guessed_type,
attachment.owner,
f.read(),
)
logging.info("Uploaded message file in path %s", file_path)
except FileNotFoundError: # nocoverage
pass
def transfer_message_files_to_s3(processes: int) -> None:<|fim▁hole|> for attachment in attachments:
_transfer_message_files_to_s3(attachment)
else: # nocoverage
connection.close()
cache._cache.disconnect_all()
with multiprocessing.Pool(processes) as p:
for out in p.imap_unordered(_transfer_message_files_to_s3, attachments):
pass
def _transfer_emoji_to_s3(realm_emoji: RealmEmoji) -> None:
if not realm_emoji.file_name or not realm_emoji.author:
return # nocoverage
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=realm_emoji.realm.id,
emoji_file_name=realm_emoji.file_name,
)
emoji_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path) + ".original"
try:
with open(emoji_path, "rb") as f:
s3backend.upload_emoji_image(f, realm_emoji.file_name, realm_emoji.author)
logging.info("Uploaded emoji file in path %s", emoji_path)
except FileNotFoundError: # nocoverage
pass
def transfer_emoji_to_s3(processes: int) -> None:
realm_emojis = list(RealmEmoji.objects.filter())
if processes == 1:
for realm_emoji in realm_emojis:
_transfer_emoji_to_s3(realm_emoji)
else: # nocoverage
connection.close()
cache._cache.disconnect_all()
with multiprocessing.Pool(processes) as p:
for out in p.imap_unordered(_transfer_emoji_to_s3, realm_emojis):
pass<|fim▁end|> | attachments = list(Attachment.objects.all())
if processes == 1: |
<|file_name|>lambdaFluc2dCreator.py<|end_file_name|><|fim▁begin|>import subprocess
import sys
import os
# This code is meant to manage running multiple instances of my KMCLib codes at the same time,
# in the name of time efficiency
numLambda = 512
numStepsEquilib = 1600000
numStepsAnal = 16000
numStepsSnapshot = 1000
numStepsReq = 16000
sysWidth = 32
sysLength = 32
analInterval = 1
numPasses = 100
timeInterval = 1.0
dataLocation = "dim2Runs/lambdaScan1/"
lambdaMin = 0.05
lambdaMax = 1.25
rateStepSize = (lambdaMax-lambdaMin)/float(numLambda-1)
runningJobs = []
for rateIndex in range(0, numLambda):
currentRate = lambdaMin + rateStepSize*rateIndex
botConc = 0.99
topConc = 0.01
jobInput = "2dSteadyFlow.py "+str(botConc)+" "+str(topConc)+" "+str(currentRate)+" "+str(sysWidth)+" "+str(sysLength)+" "+str(analInterval)+" "+str(numStepsEquilib)+" "+str(numStepsSnapshot)+" "+str(numStepsAnal)+" "+str(numStepsReq)+" "+str(numPasses)+" "+str(timeInterval)+" "+dataLocation+str(rateIndex)+"\n"
with open("jobInputs/testInput."+str(jobIndex), 'w') as f:<|fim▁hole|> f.write(jobInput)
jobIndex += 1<|fim▁end|> | |
<|file_name|>userInfo.js<|end_file_name|><|fim▁begin|>/*=============================
= Views =
=============================*/
App.Views.UserInfo = Backbone.View.extend({
el: $('#user-info'),
events: {
"click #settings" : "clickSettings",
"click #logout" : "clickLogout"
},
clickLogout: function(event) {
$.ajax({
url: 'auth/logout',
type: 'get',<|fim▁hole|> dataType: 'json',
success: function(data) {
if (data.result == 'Success') {
window.location = '/'; //Reload index page
} else {
alert('Logout failed'); //Alert on fail
}
},
error: function (xhr, ajaxOptions, thrownError) {
console.log('Logout failed (hard)');
}
});
},
clickSettings: function(event) {
console.log('Settings clicked');
}
});<|fim▁end|> | |
<|file_name|>IUser.java<|end_file_name|><|fim▁begin|>/*
* jHears, acoustic fingerprinting framework.
* Copyright (C) 2009-2010 Juha Heljoranta.
*
* This file is part of jHears.
*
* jHears is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* jHears is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with jHears. If not, see <http://www.gnu.org/licenses/>.
*/
/**
*
*/
package org.jhears.server;
import java.util.Map;
<|fim▁hole|>
Long getId();
Map<String, String> getProperties();
}<|fim▁end|> | public interface IUser {
String getName(); |
<|file_name|>jmlib.js<|end_file_name|><|fim▁begin|>'use strict';
/*
* jmlib-js - Portable JuggleMaster Library (JavaScript Version)
* Version 2.0
* (C) Per Johan Groland 2006-2016
*
* Based on JMLib 2.0, (C) Per Johan Groland and Gary Briggs
*
* Based on JuggleMaster Version 1.60
* Copyright (c) 1995-1996 Ken Matsuoka
*
* You may redistribute and/or modify JMLib_js under the terms of the
* Modified BSD License as published in various places online or in the
* COPYING.jmlib file in the package you downloaded.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* Modified BSD License for more details.
*/
/* eslint-disable no-var, eqeqeq, no-constant-condition */
var Ball = require('./ball'),
Arm = require('./arm'),
Hand = require('./hand'),
validator = require('./validator');
// JMLib class
function JMLib(errorCallback) {
var i;
this.errorCallback = errorCallback;
// public variables
this.ap = new Arm();
this.rhand = new Ball();
this.lhand = new Ball();
this.handpoly = new Hand();
this.handpoly_ex = new Hand();
this.b = new Array(JMLib.BMAX);
for (i = 0; i < JMLib.BMAX; i++) {
this.b[i] = new Ball();
}
this.dpm = 0;
this.gx_max = 0;
this.gx_min = 0;
this.gy_max = 0;
this.gy_min = 0;
this.imageWidth = 0;
this.imageHeight = 0;
this.status = 0;
// protected variables
this.balln = 0;
this.bm1 = 0;
this.arm_x = 0;
this.arm_y = 0;
this.hand_x = 0;
this.hand_y = 0;
this.horCenter = 0;
this.verCenter = 0;
this.styledata = new Array(JMLib.STYLEMAX * 4);
this.style_len = 0;
this.ga = 0.0;
this.dwell_ratio = 0.0;
this.height_ratio = 0.0;
this.base = 0;
this.mirror = 0;
this.time_count = 0;
this.time_period = 0;
this.cSpeed = 0.0;
this.beep = 0;
this.syn = 0;
this.hand_on = 0;
this.fpu = true;
this.tw = 0;
this.aw = 0;
this.pmax = 0;
this.lastError = null;
//JML_INT32 patt[LMAX][MMAX];
this.patt = new Array(JMLib.LMAX);
for (i = 0; i < JMLib.LMAX; i++) {
this.patt[i] = new Array(JMLib.MMAX);
for (var j = 0; j < JMLib.MMAX; j++) {
this.patt[i][j] = 0;
}
}
this.patts = new Array(JMLib.LMAX);
this.pattw = 0;
this.r = new Array(JMLib.LMAX * 2);
this.smode = 0;
this.high0 = new Array(JMLib.BMAX + 1);
this.high = new Array(JMLib.BMAX + 1);
this.kw0 = 0; // XR/KW [m]
this.siteswap = ""; // The current siteswap
this.pattname = ""; // The name of the current pattern
this.steps = new Array(JMLib.LMAX); // used to print the site on screen
this.stylename = ""; // The name of the current style
// Scaling
this.scalingMethod = 0;
this.scaleImageWidth = 0;
this.scaleImageHeight = 0;
this.possible_styles = new Array();
this.possible_styles[0] = "Normal";
this.possible_styles[1] = "Reverse";
this.possible_styles[2] = "Shower";
this.possible_styles[3] = "Mills Mess";
this.possible_styles[4] = "Center";
this.possible_styles[5] = "Windmill";
//this.possible_styles[6] = "Random";
//this.self = this;
this.initialize();
}
// Misc. utility functions
JMLib.max = function (a, b) {
if (a > b) return a;
return b;
};
JMLib.min = function (a, b) {
if (a < b) return a;
return b;
};
JMLib.jijo = function (x) {
return x * x;
};
// Calculates |x|
JMLib.xabs = function (x) {
if (x < 0) return -x;
return x;
};
// Calculates x^y
JMLib.xpow = function (x, y) {
for (var i = 0; i < y - 1; i++) {
x *= y;
//document.write("i=" + i + " x=" + x + "<br>");
}
return x;
};
JMLib.random = function (x) {
return Math.floor(x * Math.random() % 1);
};
// For status
JMLib.ST_NONE = 0;
JMLib.ST_PAUSE = 1;
JMLib.ST_JUGGLE = 2;
// Misc. constants
JMLib.KW = 0.25;
JMLib.DW = 290; // max of dpm
/* low mem profile
JMLib.XR = 255; // Accuracy of x axis, set higher for large screens 255
JMLib.BMAX = 35; // max number of balls
JMLib.MMAX = 11; // Max multiplex, double+1
*/
// high mem profile
JMLib.XR = 1024; // Accuracy of x axis, set higher for large screens 255
JMLib.BMAX = 630; // max number of balls
JMLib.MMAX = 71; // Max multiplex, double+1
//
JMLib.LMAX = 76; // max column of a pattern
JMLib.OBJECT_HAND = 0x01;
JMLib.OBJECT_UNDER = 0x02;
JMLib.OBJECT_MOVE = 0x04;
JMLib.OBJECT_MOVE2 = 0x08;
JMLib.STYLEMAX = 3000; // maximum length of style data
JMLib.JML_MAX_SECTIONLEN = 40;
JMLib.JML_MAX_NAMELEN = 56;
JMLib.JML_MAX_SITELEN = 56;
JMLib.JML_MAX_STYLELEN = 56;
// high mem profile
//JMLib.JML_MAX_STYLELEN = 500;
JMLib.SPEED_MAX = 2.0;
JMLib.SPEED_MIN = 0.1;
JMLib.SPEED_DEF = 1.0;
JMLib.HR_MAX = 1.00;
JMLib.HR_MIN = 0.04;
JMLib.HR_DEF = 0.17;
JMLib.DR_MAX = 0.90;
JMLib.DR_MIN = 0.10;
JMLib.DR_DEF = 0.50;
JMLib.FS_MAX = 10;
JMLib.FS_MIN = 1;
JMLib.FS_DEF = 1;
// Scaling methods
JMLib.SCALING_METHOD_CLASSIC = 1;
JMLib.SCALING_METHOD_DYNAMIC = 2;
JMLib.prototype.getSiteposStart = function () {
if (this.syn && this.time_period % 2 == 1) {
return this.steps[this.time_period - 1];
}
return this.steps[this.time_period];
};
JMLib.prototype.getSiteposStop = function () {
if (this.syn && this.time_period % 2 == 0) {
return this.steps[this.time_period + 2];
}
return this.steps[this.time_period + 1];
};
JMLib.prototype.getSiteposLen = function () {
return this.getSiteposStop() - this.getSiteposStart();
};
JMLib.prototype.getiterations = function () {
return this.dpm;
};
JMLib.prototype.initialize = function () {
// Set default values
this.ga = 9.8;
this.dwell_ratio = 0.5;
this.height_ratio = 0.20;
this.mirror = 0;
this.cSpeed = JMLib.SPEED_DEF;
this.syn = 0;
this.hand_on = 1;
this.hand_x = 0;
this.hand_y = 0;
this.scalingMethod = JMLib.SCALING_METHOD_CLASSIC;
this.smode = 50.0;
this.status = JMLib.ST_NONE;
//fixme: add random number seed here
this.setWindowSize(480, 400);
this.setPatternDefault();
};
JMLib.prototype.shutdown = function () {};
JMLib.prototype.error = function (msg) {
this.lastError = msg;
if (this.errorCallback) {
this.errorCallback(msg);
}
};
JMLib.prototype.setWindowSizeDefault = function () {
this.setWindowSize(480, 400);
};
JMLib.prototype.setWindowSize = function (width, height) {
if (width <= 0 || height <= 0) return false;
if (this.scalingMethod == JMLib.SCALING_METHOD_DYNAMIC) {
this.scaleImageWidth = width;
this.scaleImageHeight = height;
return true;
}
// store current status and stop juggling
var oldStatus = this.status;
this.stopJuggle();
// set size
this.imageWidth = width;
this.imageHeight = height;
this.horCenter = parseInt(this.imageWidth / 2);
this.verCenter = parseInt(this.imageHeight / 2);
// recalculate pattern
if (oldStatus != JMLib.ST_NONE) this.startJuggle();
// restore state
this.status = oldStatus;
return true;
};
JMLib.prototype.setScalingMethod = function (scalingMethod) {
// no change
if (this.scalingMethod == scalingMethod) return;
if (scalingMethod == JMLib.SCALING_METHOD_DYNAMIC) {
this.scaleImageWidth = this.imageWidth;
this.scaleImageHeight = this.imageHeight;
this.setWindowSizeDefault();
this.scalingMethod = JMLib.SCALING_METHOD_DYNAMIC;
} else {
//SCALING_METHOD_CLASSIC
this.calingMethod = JMLib.SCALING_METHOD_CLASSIC;
this.setWindowSize(this.scaleImageWidth, this.scaleImageHeight);
}
};
JMLib.prototype.SCALING_METHOD_CLASSIC = JMLib.SCALING_METHOD_CLASSIC;
JMLib.prototype.SCALING_METHOD_DYNAMIC = JMLib.SCALING_METHOD_DYNAMIC;
JMLib.prototype.getImageWidth = function () {
if (this.scalingMethod == JMLib.SCALING_METHOD_CLASSIC) return this.imageWidth;else // SCALING_METHOD_DYNAMIC
return this.scaleImageWidth;
};
JMLib.prototype.getImageHeight = function () {
if (this.scalingMethod == JMLib.SCALING_METHOD_CLASSIC) return this.imageHeight;else // SCALING_METHOD_DYNAMIC
return this.scaleImageHeight;
};
JMLib.prototype.getBallRadius = function () {
var baseRadius = parseInt(11 * this.dpm / JMLib.DW);
if (this.scalingMethod == JMLib.SCALING_METHOD_CLASSIC) {
return baseRadius;
} else {
var zoomFactorY = this.scaleImageHeight / this.imageHeight;
zoomFactorY *= 0.9;
if (this.scaleImageWidth < this.scaleImageHeight) zoomFactorY *= this.scaleImageWidth / this.scaleImageHeight;
return parseInt(baseRadius * zoomFactorY); // convert return value to int
}
};
JMLib.prototype.doCoordTransform = function () {
var i;
var zoomFactorX = this.scaleImageWidth / this.imageWidth;
var zoomFactorY = this.scaleImageHeight / this.imageHeight;
// adjust for aspect ratio
if (this.scaleImageWidth < this.scaleImageHeight) zoomFactorY *= this.scaleImageWidth / this.scaleImageHeight;else zoomFactorX *= this.scaleImageHeight / this.scaleImageWidth;
zoomFactorX *= 1.1;
zoomFactorY *= 0.9;
// Adjust coordinates
// head
this.ap.hx -= parseInt(this.imageWidth / 2);
this.ap.hy -= parseInt(this.imageHeight / 2);
this.ap.hx = parseInt(this.ap.hx * zoomFactorX);
this.ap.hy = parseInt(this.ap.hy * zoomFactorY);
this.ap.hr = parseInt(this.ap.hr * zoomFactorY);
this.ap.hx += parseInt(this.scaleImageWidth / 2);
this.ap.hy += parseInt(this.scaleImageHeight / 2);
// juggler
for (i = 0; i < 6; i++) {
this.ap.rx[i] -= parseInt(this.imageWidth / 2);
this.ap.ry[i] -= parseInt(this.imageHeight / 2);
this.ap.rx[i] = parseInt(this.ap.rx[i] * zoomFactorX);
this.ap.ry[i] = parseInt(this.ap.ry[i] * zoomFactorY);
this.ap.rx[i] += parseInt(this.scaleImageWidth / 2);
this.ap.ry[i] += parseInt(this.scaleImageHeight / 2);
this.ap.lx[i] -= parseInt(this.imageWidth / 2);
this.ap.ly[i] -= parseInt(this.imageHeight / 2);
this.ap.lx[i] = parseInt(this.ap.lx[i] * zoomFactorX);
this.ap.ly[i] = parseInt(this.ap.ly[i] * zoomFactorY);
this.ap.lx[i] += parseInt(this.scaleImageWidth / 2);
this.ap.ly[i] += parseInt(this.scaleImageHeight / 2);
}
// hands
this.rhand.gx -= parseInt(this.imageWidth / 2);
this.rhand.gy -= parseInt(this.imageHeight / 2);
this.rhand.gx = parseInt(this.rhand.gx * zoomFactorX);
this.rhand.gy = parseInt(this.rhand.gy * zoomFactorY);
this.rhand.gx += parseInt(this.scaleImageWidth / 2);
this.rhand.gy += parseInt(this.scaleImageHeight / 2);
this.lhand.gx -= parseInt(this.imageWidth / 2);
this.lhand.gy -= parseInt(this.imageHeight / 2);
this.lhand.gx = parseInt(this.lhand.gx * zoomFactorX);
this.lhand.gy = parseInt(this.lhand.gy * zoomFactorY);
this.lhand.gx += parseInt(this.scaleImageWidth / 2);
this.lhand.gy += parseInt(this.scaleImageHeight / 2);
for (i = 0; i <= 9; i++) {
this.handpoly.rx[i] = parseInt(this.handpoly_ex.rx[i] * zoomFactorX);
this.handpoly.ry[i] = parseInt(this.handpoly_ex.ry[i] * zoomFactorY);
this.handpoly.lx[i] = parseInt(this.handpoly_ex.lx[i] * zoomFactorX);
this.handpoly.ly[i] = parseInt(this.handpoly_ex.ly[i] * zoomFactorY);
}
// balls
for (i = this.numBalls() - 1; i >= 0; i--) {
this.b[i].gx -= parseInt(this.imageWidth / 2);
this.b[i].gy -= parseInt(this.imageHeight / 2);
this.b[i].gx = parseInt(this.b[i].gx * zoomFactorX);
this.b[i].gy = parseInt(this.b[i].gy * zoomFactorY);
this.b[i].gx += parseInt(this.scaleImageWidth / 2);
this.b[i].gy += parseInt(this.scaleImageHeight / 2);
}
};
JMLib.prototype.setMirror = function (mir) {
// store current status and stop juggling
var oldStatus = this.status;
this.stopJuggle();
// set mirror
this.mirror = mir;
// recalculate pattern
if (oldStatus != JMLib.ST_NONE) JMLib.prototype.startJuggle();
// restore state
this.status = oldStatus;
};
JMLib.prototype.setPattern = function (name, site, hr, dr) {
if (site.length > JMLib.JML_MAX_SITELEN) {
this.error("Siteswap too long");
return false;
}
if (name.length > JMLib.JML_MAX_NAMELEN) {
this.error("Pattern name too long");
return false;
}
if (!validator.validateSite(site)) {
this.error("Invalid siteswap");
return false;
}
this.siteswap = site;
this.pattname = name;
this.height_ratio = hr;
this.dwell_ratio = dr;
// Turn off beep
this.beep = 0;
// Check ratios
if (this.height_ratio < JMLib.HR_MIN || this.height_ratio > JMLib.HR_MAX) this.height_ratio = JMLib.HR_DEF;
if (this.dwell_ratio < JMLib.DR_MIN || this.dwell_ratio > JMLib.DR_MAX) this.dwell_ratio = JMLib.DR_DEF;
// Set pattern
if ((this.jml_errno = this.set_patt(site)) == 0) {
this.doStepcalc();
return true;
} else {
switch (this.jml_errno) {
case 4:
this.error("Syntax error");
break;
case 5:
this.error("Invalid pattern");
break;
case 6:
this.error("Invalid character in pattern");
break;
case 7:
this.error("Synchronous number must be even");
break;
case 8:
this.error("Max 6 balls may be multiplexed");
break;
case 9:
this.error("Too many balls in pattern");
break;
case 10:
this.error("Pattern too long");
break;
case 13:
this.error("0 inside [] is invalid");
break;
default:
this.error("Unexpected error");
break;
}
return false;
}
};
JMLib.prototype.setStyle = function (name, length, data) {
if (data.length > JMLib.JML_MAX_STYLELEN) {
this.error("Style too large");
return false;
}
if (name.length > JMLib.JML_MAX_NAMELEN) {
this.error("Style name too long");
return false;
}
this.stylename = name;
this.styledata = data;
this.style_len = length;
return true;
};
//fixme: style data is passed as an array
// should perhaps add a style data class
JMLib.prototype.setStyleEx = function (name) {
var style;
if (name == "Normal") {
style = new Array(13, 0, 4, 0);
this.setStyle(name, 1, style);
} else if (name == "Reverse") {
style = new Array(4, 0, 13, 0);
this.setStyle(name, 1, style);
} else if (name == "Shower") {
style = new Array(5, 0, 10, 0, 10, 0, 5, 0);
this.setStyle(name, 2, style);
} else if (name == "Mills Mess") {
style = new Array(-1, 0, -12, 0, 0, 0, 12, 0, 1, 0, -12, 0);
this.setStyle(name, 3, style);
} else if (name == "Center") {
style = new Array(13, 0, 0, 0);
this.setStyle(name, 1, style);
} else if (name == "Windmill") {
style = new Array(10, 0, -8, 0, -8, 0, 10, 0);
this.setStyle(name, 2, style);
}
// placeholder for adding random style support here
else {
// anything else is interpreted as "Normal"
this.setStyleDefault();
}
return true;
};
JMLib.prototype.getStyles = function () {
return this.possible_styles;
};
JMLib.prototype.numStyles = function () {
return this.possible_styles.length;
};
JMLib.prototype.setPatternDefault = function () {
this.setPattern("3 Cascade", "3", JMLib.HR_DEF, JMLib.DR_DEF);
this.setStyleDefault();
};
JMLib.prototype.setStyleDefault = function () {
var defStyle = new Array(13, 0, 4, 0);
//this.setStyle("Normal", 1, defStyle);
this.stylename = "Normal";
this.styledata = defStyle;
this.style_len = 1;
};
JMLib.prototype.setHR = function (HR) {
if (HR > JMLib.HR_MAX) {
this.height_ratio = JMLib.HR_MAX;
} else if (HR < JMLib.HR_MIN) {
this.height_ratio = JMLib.HR_MIN;
} else {
this.height_ratio = HR;
}
};
JMLib.prototype.getHR = function () {
return this.height_ratio;
};
JMLib.prototype.setDR = function (DR) {
if (DR > JMLib.DR_MAX) {
this.dwell_ratio = JMLib.DR_MAX;
} else if (DR < JMLib.DR_MIN) {
this.dwell_ratio = JMLib.DR_MIN;
} else {
this.dwell_ratio = DR;
}
};
JMLib.prototype.getDR = function () {
return this.dwell_ratio;
};
JMLib.prototype.numBalls = function () {
return this.balln;
};
// Internal functions
JMLib.prototype.arm_line = function () {
var mx, my, sx, sy;
if (this.hand_on) {
// only bother calculating if hands are drawn
// Notes:
// * gx/gy may need to be replaced by gx0/gy0 if erasing old values
// the method used in the X11 version
// * JMWin uses 11*dpm/DW instead of 11, which causes incorrect
// hand placement for some reason.
this.ap.rx[0] = this.rhand.gx + 11 + this.arm_x;
this.ap.ry[0] = this.rhand.gy + 11 + this.arm_y;
this.ap.lx[0] = this.lhand.gx + 11 - this.arm_x;
this.ap.ly[0] = this.lhand.gy + 11 + this.arm_y;
sx = parseInt(this.dpm * JMLib.XR / this.kw0);
sy = this.base - parseInt(this.dpm / 3);
this.ap.rx[1] = parseInt((this.ap.rx[0] + (this.horCenter + sx) * 2) / 3 + this.dpm / 12);
this.ap.lx[1] = parseInt((this.ap.lx[0] + (this.horCenter - sx) * 2) / 3 - this.dpm / 12);
this.ap.ry[1] = parseInt((this.ap.ry[0] + sy) / 2 + this.dpm / 8);
this.ap.ly[1] = parseInt((this.ap.ly[0] + sy) / 2 + this.dpm / 8);
this.ap.rx[2] = parseInt((this.ap.rx[1] + (this.horCenter + sx) * 3) / 4);
this.ap.lx[2] = parseInt((this.ap.lx[1] + (this.horCenter - sx) * 3) / 4);
this.ap.ry[2] = parseInt((this.ap.ry[1] + sy * 2) / 3 - this.dpm / 25);
this.ap.ly[2] = parseInt((this.ap.ly[1] + sy * 2) / 3 - this.dpm / 25);
this.ap.rx[3] = parseInt((this.ap.rx[2] + (this.horCenter + sx) * 2) / 3 - this.dpm / 13);
this.ap.lx[3] = parseInt((this.ap.lx[2] + (this.horCenter - sx) * 2) / 3 + this.dpm / 13);
this.ap.ry[3] = parseInt((this.ap.ry[2] + sy * 2) / 3 - this.dpm / 40);
this.ap.ly[3] = parseInt((this.ap.ly[2] + sy * 2) / 3 - this.dpm / 40);
mx = (this.ap.rx[3] + this.ap.lx[3]) / 2;
my = (this.ap.ry[3] + this.ap.ly[3]) / 2;
this.ap.rx[4] = parseInt((mx * 2 + this.ap.rx[3]) / 3);
this.ap.lx[4] = parseInt((mx * 2 + this.ap.lx[3]) / 3);
this.ap.ry[4] = parseInt((my * 2 + this.ap.ry[3]) / 3);
this.ap.ly[4] = parseInt((my * 2 + this.ap.ly[3]) / 3);<|fim▁hole|> this.ap.hr = parseInt(this.dpm / 11);
this.ap.rx[5] = this.ap.hx + parseInt(this.dpm / 20);
this.ap.lx[5] = this.ap.hx - parseInt(this.dpm / 20);
this.ap.ry[5] = this.ap.hy + parseInt(this.dpm / 13);
this.ap.ly[5] = parseInt(this.ap.ry[5]);
}
};
JMLib.prototype.applyCorrections = function () {
// Correct ball coordinates
for (var i = this.balln - 1; i >= 0; i--) {
this.b[i].gx += this.bm1;
this.b[i].gy += this.bm1;
}
};
JMLib.prototype.hand_pos = function (c, h, x, z) {
//fixme: for testing only, remove
// x and z must be arrays with one element (pass-by-reference emulation)
//if (!(x instanceof Array)) document.write("hand_pos assert failure (x)<br>");
//if (!(z instanceof Array)) document.write("hand_pos assert failure (z)<br>");
var a;
if (this.mirror) {
if (!this.syn && h) c--;
if (c & 1) a = (--c + h) % this.style_len * 4 + 2;else a = (c + h) % this.style_len * 4;
} else {
if (!this.syn && !h) c--;
if (c & 1) a = (c - h) % this.style_len * 4 + 2;else a = (c + 1 - h) % this.style_len * 4;
}
if (h) x[0] = this.styledata[a];else x[0] = -this.styledata[a];
z[0] = this.styledata[a + 1];
};
JMLib.prototype.juggle = function ( /*Ball*/d) {
var tp;
var flag = 0;
var h, t;
var tpox = new Array(1);
tpox[0] = 0;
var rpox = new Array(1);
rpox[0] = 0;
var tpoz = new Array(1);
tpoz[0] = 0;
var rpoz = new Array(1);
rpoz[0] = 0;
var x = 0;
var y = 0;
// Save old values
d.gx1 = d.gx0;
d.gy1 = d.gy0;
d.gx0 = d.gx;
d.gy0 = d.gy;
if (d.c < 0) {
if (this.time_count >= -d.c * this.tw) d.c = -d.c;
}
while (true) {
tp = this.time_count - this.tw * JMLib.xabs(d.c);
if (tp < this.aw) break;
d.st &= ~JMLib.OBJECT_UNDER;
d.c0 = d.c;
if (d.st & JMLib.OBJECT_HAND) {
d.c += 2;
this.flag = 1;
} else {
t = d.c;
if (this.syn) {
if (this.mirror && !d.chand) t++;else if (!this.mirror && d.chand) t++;
}
t %= this.pattw;
d.bh = this.patt[t][this.r[t]];
d.c += JMLib.xabs(d.bh);
if (++this.r[t] >= this.patts[t]) this.r[t] = 0;
d.thand = d.chand;
if (d.bh & 1 || d.bh < 0) d.chand = 1 - d.chand;
flag = 1;
}
}
if (d.c >= 0 && tp >= 0 && !(d.st & JMLib.OBJECT_UNDER)) {
d.st |= JMLib.OBJECT_UNDER;
if (d.st & JMLib.OBJECT_HAND) {
if (d.st & JMLib.OBJECT_MOVE2) {
d.st |= JMLib.OBJECT_MOVE;
d.st &= ~JMLib.OBJECT_MOVE2;
} else {
d.st &= ~JMLib.OBJECT_MOVE;
}
} else {
t = d.c;
if (this.syn) {
if (this.mirror && !d.chand) t++;else if (!this.mirror && d.chand) t++;
}
t %= this.pattw;
if (d.bh == 1) d.st |= JMLib.OBJECT_MOVE;else d.st &= ~JMLib.OBJECT_MOVE;
for (var i = 0; i < this.patts[t]; i++) {
h = this.patt[t][i];
if (h == 1) {
if (d.chand) this.lhand.st |= JMLib.OBJECT_MOVE2;else this.rhand.st |= JMLib.OBJECT_MOVE2;
}
if (h != 2) {
if (d.chand) this.rhand.st |= JMLib.OBJECT_MOVE2;else this.lhand.st |= JMLib.OBJECT_MOVE2;
d.st |= JMLib.OBJECT_MOVE;
}
}
}
}
if (!(d.st & JMLib.OBJECT_MOVE)) {
if (d.c < 0) {
//opera.postError("BEFORE hand_pos(" + -d.c + ", " + d.chand + ", " + tpox[0] + ", " + tpoz[0] + ")");
this.hand_pos(-d.c, d.chand, tpox, tpoz);
//opera.postError("AFTER hand_pos(" + -d.c + ", " + d.chand + ", " + tpox[0] + ", " + tpoz[0] + ")");
rpox[0] = tpox[0];
rpoz[0] = tpoz[0];
} else {
if (d.st & JMLib.OBJECT_UNDER) {
//opera.postError("BEFORE hand_pos(" + d.c + ", " + d.chand + ", " + tpox[0] + ", " + tpoz[0] + ")");
this.hand_pos(d.c, d.chand, tpox, tpoz);
//opera.postError("AFTER hand_pos(" + d.c + ", " + d.chand + ", " + tpox[0] + ", " + tpoz[0] + ")");
//opera.postError("BEFORE hand_pos(" + (d.c + 2) + ", " + d.chand + ", " + rpox[0] + ", " + rpoz[0] + ")");
this.hand_pos(d.c + 2, d.chand, rpox, rpoz);
//opera.postError("AFTER hand_pos(" + (d.c + 2) + ", " + d.chand + ", " + rpox[0] + ", " + rpoz[0] + ")");
if (tpox[0] != rpox[0] || tpoz[0] != rpoz[0]) {
this.hand_pos(d.c + 1, d.chand, rpox, rpoz);
if (tpox[0] != rpox[0] || tpoz[0] != rpoz[0]) d.st |= JMLib.OBJECT_MOVE;
}
} else {
this.hand_pos(d.c - 2, d.chand, tpox, tpoz);
this.hand_pos(d.c, d.chand, rpox, rpoz);
if (tpox[0] != rpox[0] || tpoz[0] != rpoz[0]) {
this.hand_pos(d.c - 1, d.chand, tpox, tpoz);
if (tpox[0] != rpox[0] || tpoz[0] != rpoz[0]) d.st |= JMLib.OBJECT_MOVE;
}
}
}
}
if (d.st & JMLib.OBJECT_MOVE) {
if (d.bh == 1) {
this.hand_pos(d.c0 + 1, d.thand, tpox, tpoz);
this.hand_pos(d.c + 1, d.chand, rpox, rpoz);
} else if (d.st & JMLib.OBJECT_UNDER) {
this.hand_pos(d.c, d.chand, tpox, tpoz);
this.hand_pos(d.c + 1, d.chand, rpox, rpoz);
} else {
this.hand_pos(d.c0 + 1, d.thand, tpox, tpoz);
this.hand_pos(d.c, d.chand, rpox, rpoz);
}
}
if (this.fpu) {
var fx;
if (!(d.st & JMLib.OBJECT_HAND) && d.c < 0) {
if (tpox[0] == 0) {
fx = 0;
y = parseInt(tpoz[0] * this.dpm / 20 - tp * this.dpm / 12 / this.tw);
//opera.postError("x=" + x + ", y=" + y);
//opera.postError(y, tpoz[0], this.dpm, tp, this.dpm, this.tw);
} else {
if (tpox[0] > 0) fx = tpox[0] / 10 - tp / 6 / this.tw;else fx = tpox[0] / 10 + tp / 6 / this.tw;
y = parseInt(tpoz[0] * this.dpm / 20);
}
} else if (!(d.st & JMLib.OBJECT_MOVE)) {
fx = tpox[0] / 10;
y = tpoz[0] * this.dpm / 20;
} else {
if (d.bh == 1) {
fx = (tp - this.aw) / this.tw * 2 + 1;
y = parseInt(this.high[1] * (1 - JMLib.jijo(fx)));
} else if (d.st & JMLib.OBJECT_UNDER) {
fx = tp / this.aw * 2 - 1;
y = parseInt(this.high[0] * (1 - JMLib.jijo(fx)));
} else {
fx = tp / (this.tw * JMLib.xabs(d.bh) - this.aw) * 2 + 1;
y = parseInt(this.high[JMLib.xabs(d.bh)] * (1 - JMLib.jijo(fx)));
}
y += parseInt((fx * (rpoz[0] - tpoz[0]) + rpoz[0] + tpoz[0]) * this.dpm / 40);
d.t = fx; // spin
fx = (fx * (rpox[0] - tpox[0]) + rpox[0] + tpox[0]) / 20;
}
x = parseInt(fx * this.dpm * JMLib.KW);
} else {
if (!(d.st & JMLib.OBJECT_HAND) && d.c < 0) {
if (tpox[0] == 0) {
x = 0;
y = parseInt(tpoz[0] * this.dpm / 20 - tp * this.dpm / 12 / this.tw);
} else {
if (tpox[0] > 0) x = parseInt(JMLib.XR * tpox[0] / 10 - JMLib.XR * tp / 6 / this.tw);else x = parseInt(JMLib.XR * tpox[0] / 10 + JMLib.XR * tp / 6 / this.tw);
y = parseInt(tpoz[0] * this.dpm / 20);
}
} else if (!(d.st & JMLib.OBJECT_MOVE)) {
x = parseInt(JMLib.XR * tpox[0] / 10);
y = parseInt(tpoz[0] * this.dpm / 20);
} else {
if (d.bh == 1) {
x = parseInt(JMLib.XR * (tp - this.aw) * 2 / this.tw + JMLib.XR);
y = parseInt((JMLib.jijo(JMLib.XR) - JMLib.jijo(x)) / this.high0[1]);
} else if (d.st & JMLib.OBJECT_UNDER) {
x = parseInt(JMLib.XR * tp * 2 / this.aw - JMLib.XR);
y = parseInt((JMLib.jijo(JMLib.XR) - JMLib.jijo(x)) / this.high0[0]);
} else {
x = parseInt(JMLib.XR * tp * 2 / (this.tw * JMLib.xabs(d.bh) - this.aw) + JMLib.XR);
y = parseInt((JMLib.jijo(JMLib.XR) - JMLib.jijo(x)) / this.high0[JMLib.xabs(d.bh)]);
}
y += parseInt((x * (rpoz[0] - tpoz[0]) + JMLib.XR * (rpoz[0] + tpoz[0])) * this.dpm / JMLib.XR / 40);
x = parseInt((x * (rpox[0] - tpox[0]) + JMLib.XR * (rpox[0] + tpox[0])) / 20);
}
x = parseInt(x * this.dpm / this.kw0);
}
// NOTE:
// * The alternative calulations of d->gx and gy below are
// from JMWin. They cause the entire pattern to be skewed.
//opera.postError(d);
d.gx = this.horCenter + x - 11;
//d->gx=horCenter + x - 11 * dpm / DW;
//opera.postError(d);
//opera.postError("hand_x=" + this.hand_x + " hand_y=" + this.hand_y);
if (d.st & JMLib.OBJECT_HAND) {
if (d.chand) d.gx += this.hand_x;else d.gx -= this.hand_x;
y -= this.hand_y;
}
d.gy = this.base - y - 11;
//d->gy = base - y - 11 * dpm / DW;
return flag;
};
JMLib.prototype.set_ini = function (rr) {
var i, j;
var tw0;
var aw0;
this.balln = 0;
this.pmax = 0;
if (this.pattw > JMLib.LMAX) return 10;
if (this.pattw == 0) return 1;
for (i = 0; i < this.pattw; i++) {
for (j = 0; j < this.patts[i]; j++) {
this.balln += JMLib.xabs(this.patt[i][j]);
this.pmax = JMLib.max(this.pmax, JMLib.xabs(this.patt[i][j]));
}
}
if (this.balln % this.pattw) return 5;
this.balln = parseInt(this.balln / this.pattw);
if (this.balln == 0) return 9;
if (this.balln > JMLib.BMAX) return 9;
for (i = 0; i < JMLib.LMAX * 2; i++) {
this.r[i] = 0;
}for (i = 0; i <= this.balln; i++) {
j = 0;
while (this.r[j] == this.patts[j % this.pattw] && j < this.pattw + this.pmax) {
j++;
}if (i == this.balln) {
if (j == this.pattw + this.pmax) break;else return 5;
}
this.b[i].st = 0;
if (this.mirror) {
if ((j + this.syn) % 2) {
this.b[i].thand = 1;
this.b[i].chand = 1;
} else {
this.b[i].thand = 0;
this.b[i].chand = 0;
}
} else {
if ((j + this.syn) % 2) {
this.b[i].thand = 0;
this.b[i].chand = 0;
} else {
this.b[i].thand = 1;
this.b[i].chand = 1;
}
}
if (this.syn) this.b[i].c = -parseInt(j / 2) * 2;else this.b[i].c = -j;
while (j < this.pattw + this.pmax) {
if (this.r[j] == this.patts[j % this.pattw]) return 5;else this.r[j]++;
var k = this.patt[j % this.pattw][this.patts[j % this.pattw] - this.r[j]];
if (this.syn && k < 0) {
if (j % 2 == 0) j += -k + 1;else j += -k - 1;
} else {
j += k;
}
}
}
if (rr == 0) return 0;
if (this.pmax < 3) this.pmax = 3;
tw0 = Math.sqrt(2 / this.ga * this.pmax * this.height_ratio) * 2 / (this.pmax - this.dwell_ratio * 2) * this.smode / this.cSpeed;
this.tw = parseInt(this.fadd(tw0, 0, 0));
if (this.tw == 0) return 15;
aw0 = tw0 * this.dwell_ratio * 2;
this.aw = parseInt(this.fadd(aw0, 0, 0));
if (this.aw < 1) this.aw = 1;
if (this.aw > this.tw * 2 - 1) this.aw = this.tw * 2 - 1;
this.kw0 = parseInt(JMLib.XR / JMLib.KW);
if (this.fpu) {
this.high[0] = -0.2 * this.dpm;
this.high[1] = this.ga * JMLib.jijo(tw0 / this.smode * this.cSpeed) / 8 * this.dpm;
for (i = 2; i <= this.pmax; i++) {
this.high[i] = this.ga * JMLib.jijo((tw0 * i - aw0) / this.smode * this.cSpeed) / 8 * this.dpm;
}
} else {
this.high0[0] = parseInt(-JMLib.jijo(JMLib.XR) / 0.2 * this.dpm);
this.high0[1] = parseInt(JMLib.jijo(JMLib.XR) / (this.ga * JMLib.jijo(tw0 / this.smode * this.cSpeed) / 8 * this.dpm));
for (i = 2; i <= this.pmax; i++) {
this.high0[i] = parseInt(JMLib.jijo(JMLib.XR) / (this.ga * JMLib.jijo((tw0 * i - aw0) / this.smode * this.cSpeed) / 8 * this.dpm));
}
}
for (i = 0; i < this.balln; i++) {
this.b[i].bh = 0;
this.b[i].gx = this.horCenter;
this.b[i].gy = this.verCenter;
this.b[i].gx0 = this.horCenter;
this.b[i].gy0 = this.verCenter;
this.b[i].gx1 = this.horCenter;
this.b[i].gy1 = this.verCenter;
}
if (this.mirror) {
this.lhand.c = 0;
if (this.syn) this.rhand.c = 0;else this.rhand.c = -1;
} else {
this.rhand.c = 0;
if (this.syn) this.lhand.c = 0;else this.lhand.c = -1;
}
this.rhand.bh = 2;
this.rhand.st = JMLib.OBJECT_HAND;
this.rhand.thand = 1;
this.rhand.chand = 1;
this.rhand.gx = this.horCenter;
this.rhand.gy = this.verCenter;
this.rhand.gx0 = this.horCenter;
this.rhand.gy0 = this.verCenter;
this.rhand.gx1 = this.horCenter;
this.rhand.gy1 = this.verCenter;
this.lhand.bh = 2;
this.lhand.st = JMLib.OBJECT_HAND;
this.lhand.thand = 0;
this.lhand.chand = 0;
this.lhand.gx = this.horCenter;
this.lhand.gy = this.verCenter;
this.lhand.gx0 = this.horCenter;
this.lhand.gy0 = this.verCenter;
this.lhand.gx1 = this.horCenter;
this.lhand.gy1 = this.verCenter;
for (i = 0; i < this.pattw; i++) {
this.r[i] = 0;
}return 0;
};
JMLib.prototype.set_dpm = function () {
var cSpeed0;
cSpeed0 = this.cSpeed;
this.cSpeed = 2.0;
this.base = 0;
this.dpm = 400;
this.gy_max = 80 - 11;
this.gy_min = -200 - 11;
this.gx_max = -1000;
this.gx_min = 1000;
if (this.set_ini(1) == 0) {
for (this.time_count = 0; this.time_count < this.tw * (this.pattw + this.pmax + this.style_len); this.time_count++) {
for (var i = 0; i < this.balln; i++) {
this.juggle(this.b[i]);
this.gy_max = JMLib.max(this.gy_max, this.b[i].gy);
this.gy_min = JMLib.min(this.gy_min, this.b[i].gy);
this.gx_max = JMLib.max(this.gx_max, this.b[i].gx + 2 * 11 * this.dpm / this.DW); // changed from X11 version
this.gx_min = JMLib.min(this.gx_min, this.b[i].gx);
}
this.juggle(this.rhand);
this.juggle(this.lhand);
this.gy_max = JMLib.max(this.gy_max, this.rhand.gy);
this.gy_min = JMLib.min(this.gy_min, this.rhand.gy);
this.gy_max = JMLib.max(this.gy_max, this.lhand.gy);
this.gy_min = JMLib.min(this.gy_min, this.lhand.gy);
this.gx_max = JMLib.max(this.gx_max, this.rhand.gx);
this.gx_min = JMLib.min(this.gx_min, this.rhand.gx);
this.gx_max = JMLib.max(this.gx_max, this.lhand.gx);
this.gx_min = JMLib.min(this.gx_min, this.lhand.gx);
this.arm_x = parseInt((22 - 11) * this.dpm / JMLib.DW);
this.arm_y = parseInt((16 - 11) * this.dpm / JMLib.DW);
// from JMWin:
//ap.rx[0]=rhand.gx +11*dpm/DW+arm_x;
//ap.ry[0]=rhand.gy +11*dpm/DW+arm_y;
//ap.lx[0]=lhand.gx +11*dpm/DW-arm_x;
//ap.ly[0]=lhand.gy +11*dpm/DW+arm_y;
//
this.arm_line();
for (i = 0; i < 5; i++) {
this.gx_max = JMLib.max(this.gx_max, this.ap.rx[i]);
this.gx_max = JMLib.max(this.gx_max, this.ap.lx[i]);
this.gx_min = JMLib.min(this.gx_min, this.ap.rx[i]);
this.gx_min = JMLib.min(this.gx_min, this.ap.lx[i]);
}
}
}
if (this.gy_max - this.gy_min > 0) {
// special handling for smaller screens
if (this.imageWidth <= 320) {
if (this.imageWidth > 160) {
// 160-320 width
this.dpm = parseInt(this.imageHeight * 280 / (this.gy_max - this.gy_min));
this.base = parseInt(this.imageHeight - this.gy_max * this.dpm / this.imageHeight - 5);
} else {
// 0-160 width
this.dpm = parseInt(this.imageHeight * 280 / (this.gy_max - this.gy_min));
this.base = parseInt(this.imageHeight - this.imageHeight / 4);
}
} else {
this.dpm = parseInt(400.0 * (this.imageHeight - 30 * 2) / (this.gy_max - this.gy_min));
this.base = parseInt(this.imageHeight - 30 - this.gy_max * this.dpm / 400);
}
this.gx_min = parseInt(this.horCenter - (this.horCenter - this.gx_min) * this.dpm / 400);
this.gx_max = parseInt(this.horCenter - (this.horCenter - this.gx_max) * this.dpm / 400);
// original version
//dpm=(JML_INT32)(400.0*340/(gy_max-gy_min));
//if(dpm>DW) dpm=DW;
//base=370-(JML_INT32)( (JML_FLOAT)gy_max*dpm/400 );
}
this.cSpeed = cSpeed0;
};
JMLib.prototype.set_patt = function (s) {
var flag = 0;
var flag2 = 0;
var a = 0;
var pos = 0;
if (s.length > JMLib.LMAX) return 10;
this.pattw = 0;
this.balln = 0;
if (s.charAt(0) == '(') this.syn = 1;else this.syn = 0;
for (var i = 0; i < JMLib.LMAX; i++) {
if (i >= s.length) {
if (flag != 0 || flag2 != 0) return 4;
break;
}
if (s.charAt(pos) == '[') {
flag2 = 1;
this.patts[this.pattw] = 0;
pos++;
continue;
}
if (s.charAt(pos) == ']') {
if (flag2 == 0) return 4;
flag2 = 0;
this.pattw++;
pos++;
continue;
}
if (this.syn == 1) {
switch (s.charAt(pos)) {
case '(':
if (flag != 0) return 4;
flag = 1;
pos++;
continue;
case ')':
if (flag != 5) return 4;
flag = 0;
pos++;
continue;
case ',':
if (flag != 2) return 4;
flag = 4;
pos++;
continue;
case 'X':
case 'x':
if (flag != 2 && flag != 5) return 4;
if (flag2) this.patt[this.pattw][this.patts[this.pattw] - 1] = -a;else this.patt[this.pattw - 1][0] = -a;
pos++;
continue;
}
}
a = this.ctod(s.charAt(pos));
if (a == -1) return 6;
if (this.syn) {
if (a % 2) return 7;
if (flag2 == 0 && flag != 1 && flag != 4) return 4;
if (flag == 1) flag = 2;
if (flag == 4) flag = 5;
}
if (flag2) {
if (a == 0) return 13;
this.patt[this.pattw][this.patts[this.pattw]++] = a;
if (this.patts[this.pattw] > JMLib.MMAX) return 8;
} else {
this.patt[this.pattw][0] = a;
if (a == 0) this.patts[this.pattw++] = 0;else this.patts[this.pattw++] = 1;
}
pos++;
this.balln += a;
}
if (this.pattw == 0) return 9;
this.balln = parseInt(this.balln / this.pattw);
if (this.balln > JMLib.BMAX) return 9;
return 0;
};
// fixme: this function won't work
// use similar function from validator instead
/*
JMLib.prototype.ctod = function(c) {
if (c >= '0' && c <= '9') return c - '0';
else if (c >= 'a' && c <= 'z') return c - 'a' + 10;
else if (c >= 'A' && c <= 'Z') return c - 'A' + 10;
return -1;
}*/
JMLib.prototype.ctod = function (s) {
var str_0 = new String("0");
var str_a = new String("a");
var str_A = new String("A");
if (s >= '0' && s <= '9') return s.charCodeAt(0) - str_0.charCodeAt(0);else if (s >= 'a' && s <= 'z') return s.charCodeAt(0) - str_a.charCodeAt(0) + 10;else if (s >= 'A' && s <= 'Z') return s.charCodeAt(0) - str_A.charCodeAt(0) + 10;else return -1;
};
JMLib.prototype.startJuggle = function () {
this.set_dpm();
if (this.set_ini(1) != 0) return;
this.xbitset();
// apply corrections
this.bm1 = 11 - parseInt(11 * this.dpm / JMLib.DW);
this.time_count = 0;
this.time_period = 0;
this.status = JMLib.ST_JUGGLE;
};
JMLib.prototype.stopJuggle = function () {
this.status = JMLib.ST_NONE;
};
JMLib.prototype.togglePause = function () {
if (this.status == JMLib.ST_JUGGLE) this.status = JMLib.ST_PAUSE;else if (this.status == JMLib.ST_PAUSE) this.status = JMLib.ST_JUGGLE;
};
JMLib.prototype.setPause = function (pauseOn) {
if (this.status != JMLib.ST_NONE) {
if (pauseOn) this.status = JMLib.ST_PAUSE;else this.status = JMLib.ST_JUGGLE;
}
};
JMLib.prototype.getStatus = function () {
return this.status;
};
// fixme: xpos function seems to be wrong
JMLib.prototype.fadd = function (x, k, t) {
return ((x + t) * JMLib.xpow(10, k) + .5) / JMLib.xpow(10, k);
};
JMLib.prototype.speedUp = function () {
this.cSpeed = JMLib.SPEED_MAX;
this.set_ini(0);
};
JMLib.prototype.speedDown = function () {
this.cSpeed = JMLib.SPEED_MIN;
this.set_ini(0);
};
JMLib.prototype.speedReset = function () {
this.cSpeed = JMLib.SPEED_DEF;
this.set_ini(0);
};
JMLib.prototype.setSpeed = function (s) {
this.cSpeed = s;
this.set_ini(0);
};
JMLib.prototype.speed = function () {
return this.cSpeed;
};
JMLib.prototype.doJuggle = function () {
var i,
tone = 0;
if (this.status == JMLib.ST_PAUSE || this.status == JMLib.ST_NONE) {
return 0;
}
this.time_count++;
if (this.time_count < this.aw) this.time_count = this.aw;
this.time_period = parseInt((this.time_count - this.aw) / this.tw);
this.time_period %= this.pattw;
//this.time_period = time_count % pattw;
for (i = 0; i < this.balln; i++) {
if (this.juggle(this.b[i]) && this.beep) tone = JMLib.max(tone, JMLib.xabs(this.b[i].bh));
}if (this.juggle(this.rhand) + this.juggle(this.lhand) > 0) {
//if (back_on==1) patt_print(1);
}
this.arm_line();
this.applyCorrections();
if (this.scalingMethod == JMLib.SCALING_METHOD_DYNAMIC) this.doCoordTransform();
return tone;
};
JMLib.prototype.xbitset = function () {
var i,
j = 0;
// data is used to create the hand bitmaps
var data = [0, 18, 0, 23, 17, 23, 20, 22, 22, 20, 23, 17, 23, 12, 18, 12, 18, 16, 16, 18, 0, 18, 12, 15, 23, 17, 99, 99];
// initialize the data array.
for (i = 0; data[i] < 99; i++) {
data[i] = parseInt((data[i] - 11) * this.dpm / JMLib.DW);
} // apply hand placement offsets
this.hand_x = data[i - 4] + 2;
this.hand_y = data[i - 3] + 2;
this.arm_x = data[i - 2];
this.arm_y = data[i - 1];
// calculate hand polygons
for (i = 0; data[i + 6] < 99; i += 2) {
if (j > 9) break;
this.handpoly_ex.rx[j] = 11 + data[i];
this.handpoly_ex.ry[j] = 10 + data[i + 1];
this.handpoly_ex.lx[j] = 12 - data[i];
this.handpoly_ex.ly[j] = 10 + data[i + 1];
j++;
}
for (i = 0; i <= 9; i++) {
this.handpoly.rx[i] = this.handpoly_ex.rx[i];
this.handpoly.ry[i] = this.handpoly_ex.ry[i];
this.handpoly.lx[i] = this.handpoly_ex.lx[i];
this.handpoly.ly[i] = this.handpoly_ex.ly[i];
}
};
JMLib.prototype.doStepcalc = function () {
var i;
var stp = 0; // position in steps array
var pos = 0; // position in string
// reset
for (i = 0; i < JMLib.LMAX; i++) {
this.steps[i] = -1;
}
// Synchronous pattern
if (this.syn) {
while (pos <= this.siteswap.length) {
if (this.siteswap.charAt(pos) == '(') {
this.steps[stp] = pos;
stp += 2;
while (this.siteswap.charAt(pos) != ')') {
pos++;
}
pos++;
} else if (pos == this.siteswap.length) {
this.steps[stp] = pos;
break;
} else {
this.error("Internal error");
return;
}
}
} else {
while (pos <= this.siteswap.length) {
if (this.siteswap.charAt(pos) == '(') {
this.error("Internal error");
return;
}
// Multiplex
else if (this.siteswap.charAt(pos) == '[') {
this.steps[stp++] = pos;
while (this.siteswap.charAt(pos) != ']') {
pos++;
}
pos++;
}
// Normal throw
else {
this.steps[stp++] = pos++;
}
}
}
};
module.exports = JMLib;<|fim▁end|> |
this.ap.hx = parseInt(mx);
this.ap.hy = parseInt((my * 2 - this.dpm * 2 / 3 + this.base) / 3); |
<|file_name|>ForestTrustInformation.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# module samba.dcerpc.lsa
# from /usr/lib/python2.7/dist-packages/samba/dcerpc/lsa.so
# by generator 1.135
""" lsa DCE/RPC """
# imports
import dcerpc as __dcerpc
import talloc as __talloc
class ForestTrustInformation(__talloc.Object):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __ndr_pack__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_pack(object) -> blob
NDR pack
"""
pass
def __ndr_print__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_print(object) -> None
NDR print
"""
pass
def __ndr_unpack__(self, *args, **kwargs): # real signature unknown
"""
S.ndr_unpack(class, blob, allow_remaining=False) -> None
NDR unpack
"""
pass
@staticmethod # known case of __new__
def __new__(S, *more): # real signature unknown; restored from __doc__
""" T.__new__(S, ...) -> a new object with type S, a subtype of T """
pass
<|fim▁hole|><|fim▁end|> | count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
entries = property(lambda self: object(), lambda self, v: None, lambda self: None) # default |
<|file_name|>0135_auto_20160824_0348.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-08-24 03:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crowdsourcing', '0134_userprofile_purpose_of_use'),
]
operations = [
migrations.AddField(
model_name='rawratingfeedback',
name='is_excluded',
field=models.BooleanField(default=False),
),
migrations.AlterIndexTogether(
name='rawratingfeedback',
index_together=set([('requester', 'worker', 'task', 'is_excluded')]),
),<|fim▁hole|> ]<|fim▁end|> | |
<|file_name|>load_timeline_for_day_and_user.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import *
import json
import bson.json_util as bju
import emission.core.get_database as edb
import argparse
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':<|fim▁hole|> parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("user_email",
help="specify the user email to load the data as")
parser.add_argument("-r", "--retain", action="store_true",
help="specify whether the entries should overwrite existing ones (default) or create new ones")
parser.add_argument("-v", "--verbose", type=int,
help="after how many lines we should print a status message.")
args = parser.parse_args()
fn = args.timeline_filename
print(fn)
print("Loading file " + fn)
tsdb = edb.get_timeseries_db()
user = ecwu.User.register(args.user_email)
override_uuid = user.uuid
print("After registration, %s -> %s" % (args.user_email, override_uuid))
entries = json.load(open(fn), object_hook = bju.object_hook)
for i, entry in enumerate(entries):
entry["user_id"] = override_uuid
if not args.retain:
del entry["_id"]
if args.verbose is not None and i % args.verbose == 0:
print("About to save %s" % entry)
tsdb.save(entry)<|fim▁end|> | |
<|file_name|>package.js<|end_file_name|><|fim▁begin|>Package.describe("Telescope BKX theme");<|fim▁hole|>
// api.use([
// 'jquery',
// 'underscore',
// 'templating'
// ], 'client');
api.add_files([
'lib/client/stylesheets/screen.css',
], ['client']);
});<|fim▁end|> |
Package.on_use(function (api) {
// api.use(['telescope-lib'], ['client', 'server']); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | import backend # hopefully fixes issues with Celery finding tasks? |
<|file_name|>bitcoin_pt_BR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="pt_BR" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Evcoin</source>
<translation>Sobre o Evcoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Evcoin</b> version</source>
<translation>Versão do <b>Evcoin</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎
Este é um software experimental.⏎
⏎
Distribuido sob a licença de software MIT/X11, veja o arquivo anexo COPYING ou http://www.opensource.org/licenses/mit-license.php.⏎
⏎
Este produto inclui software desenvolvido pelo Projeto OpenSSL para uso no OpenSSL Toolkit (http://www.openssl.org/), software de criptografia escrito por Eric Young ([email protected]) e sofware UPnP escrito por Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Copyright</translation>
</message>
<message>
<location line="+0"/>
<source>The Evcoin developers</source>
<translation>Desenvolvedores do Evcoin</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Catálogo de endereços</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Clique duas vezes para editar o endereço ou o etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Criar um novo endereço</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copie o endereço selecionado para a área de transferência do sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Novo endereço</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Evcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Estes são os seus endereços Evcoin para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Copiar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostrar &QR Code</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Evcoin address</source>
<translation>Assine uma mensagem para provar que você é dono de um endereço Evcoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Excluir os endereços selecionados da lista</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Evcoin address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço Evcoin específico.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Excluir</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Evcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Estes são os seus endereços Evcoin para receber pagamentos. Você pode querer enviar um endereço diferente para cada remetente, para acompanhar quem está pagando.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Copiar &Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Enviar bit&coins</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Exportar Catálogo de Endereços</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Rótulo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(Sem rótulo)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Janela da Frase de Segurança</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Digite a frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova frase de segurança</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repita a nova frase de segurança</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Digite a nova frase de seguraça da sua carteira. <br/> Por favor, use uma frase de <b>10 ou mais caracteres aleatórios,</b> ou <b>oito ou mais palavras.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Criptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para desbloquear a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desbloquear carteira</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operação precisa de sua frase de segurança para descriptografar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Descriptografar carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Alterar frase de segurança</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Digite a frase de segurança antiga e nova para a carteira.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirmar criptografia da carteira</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>Aviso: Se você criptografar sua carteira e perder sua senha, você vai <b>perder todos os seus LITECOINS!</b></translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Tem certeza de que deseja criptografar sua carteira?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Qualquer backup prévio que você tenha feito do seu arquivo wallet deve ser substituído pelo novo e encriptado arquivo wallet gerado. Por razões de segurança, qualquer backup do arquivo wallet não criptografado se tornará inútil assim que você começar a usar uma nova carteira criptografada.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Cuidado: A tecla Caps Lock está ligada!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Carteira criptografada</translation>
</message>
<message>
<location line="-56"/>
<source>Evcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your evcoins from being stolen by malware infecting your computer.</source>
<translation>O Evcoin irá fechar agora para finalizar o processo de encriptação. Lembre-se de que encriptar sua carteira não protege totalmente suas evcoins de serem roubadas por malwares que tenham infectado o seu computador.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>A criptografia da carteira falhou</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>A criptografia da carteira falhou devido a um erro interno. Sua carteira não estava criptografada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>A frase de segurança fornecida não confere.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>A abertura da carteira falhou</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>A frase de segurança digitada para a descriptografia da carteira estava incorreta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>A descriptografia da carteira falhou</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>A frase de segurança da carteira foi alterada com êxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>&Assinar Mensagem...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando com a rede...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Visão geral</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostrar visão geral da carteira</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Navegar pelo histórico de transações</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editar a lista de endereços e rótulos</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Mostrar a lista de endereços para receber pagamentos</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>S&air</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Sair da aplicação</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Evcoin</source>
<translation>Mostrar informação sobre Evcoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Sobre &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar informações sobre o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opções...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Criptografar Carteira...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup Carteira...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Mudar frase de segurança...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importando blocos do disco...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Reindexando blocos no disco...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Evcoin address</source>
<translation>Enviar moedas para um endereço evcoin</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Evcoin</source>
<translation>Modificar opções de configuração para evcoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Fazer cópia de segurança da carteira para uma outra localização</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Mudar a frase de segurança utilizada na criptografia da carteira</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>Janela de &Depuração</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Abrir console de depuração e diagnóstico</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Verificar mensagem...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Evcoin</source>
<translation>Evcoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Enviar</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Receber</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Endereços</translation>
</message>
<message>
<location line="+22"/>
<source>&About Evcoin</source>
<translation>&Sobre o Evcoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Exibir/Ocultar</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Mostrar ou esconder a Janela Principal.</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Criptografar as chaves privadas que pertencem à sua carteira</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Evcoin addresses to prove you own them</source>
<translation>Assine mensagems com seus endereços Evcoin para provar que você é dono deles</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Evcoin addresses</source>
<translation>Verificar mensagens para se assegurar que elas foram assinadas pelo dono de Endereços Evcoin específicos</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Arquivo</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Configurações</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Ajuda</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Barra de ferramentas</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Evcoin client</source>
<translation>Cliente Evcoin</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Evcoin network</source>
<translation><numerusform>%n conexão ativa na rede Evcoin</numerusform><numerusform>%n conexões ativas na rede Evcoin</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Processado %1 de %2 blocos (estimado) de histórico de transações.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Processado %1 blocos do histórico de transações.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n dia</numerusform><numerusform>%n dias</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n semana</numerusform><numerusform>%n semanas</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 atrás</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Último bloco recebido foi gerado %1 atrás.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transações após isso ainda não estão visíveis.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>A transação está acima do tamanho limite. Você ainda enviar ela com uma taxa de %1, que vai para os nós processam sua transação e ajuda a manter a rede. Você quer pagar a taxa?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Atualizado</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Recuperando o atraso ...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Confirmar taxa de transação</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Transação enviada</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Transação recebida</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Quantidade: %2
Tipo: %3
Endereço: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>Manipulação de URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Evcoin address or malformed URI parameters.</source>
<translation>URI não pode ser decodificado! Isso pode ter sido causado por um endereço Evcoin inválido ou por parâmetros URI malformados.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>desbloqueada</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Carteira está <b>criptografada</b> e atualmente <b>bloqueada</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Evcoin can no longer continue safely and will quit.</source>
<translation>Um erro fatal ocorreu. Evcoin não pode continuar em segurança e irá fechar.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Alerta da Rede</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar Endereço</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>A etiqueta associada a esse endereço do catálogo</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Endereço</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>O endereço associado à essa entrada do seu catálogo de endereços. Isso só pode ser modificado para endereço de envio.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Novo endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Novo endereço de envio</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar endereço de recebimento</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar endereço de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>O endereço digitado "%1" já se encontra no catálogo de endereços.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Evcoin address.</source>
<translation>O endereço digitado "%1" não é um endereço Evcoin válido.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Não foi possível destravar a carteira.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>A geração de nova chave falhou.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Evcoin-Qt</source>
<translation>Evcoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versão</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>opções da linha de comando</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>opções da UI</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Escolher língua, por exemplo "de_DE" (padrão: localização do sistema)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Inicializar minimizado</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Mostrar tela inicial ao ligar (padrão: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opções</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Pagar taxa de &transação</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Evcoin after logging in to the system.</source>
<translation>Iniciar Evcoin automaticamente após se logar no sistema.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Evcoin on system login</source>
<translation>Iniciar Evcoin no login do sistema</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Redefinir todas as opções do cliente para opções padrão.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Redefinir opções</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Evcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abrir as portas do cliente Evcoin automaticamente no roteador. Isto só funcionará se seu roteador suportar UPnP e esta função estiver habilitada.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapear porta usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Evcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Conectar à rede Evcoin através de um proxy SOCKS (ex. quando estiver usando através do Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Conectar através de um proxy SOCKS:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP do proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Endereço &IP do proxy (ex. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Porta:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porta do serviço de proxy (ex. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>&Versão do SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versão do proxy SOCKS (ex. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Janela</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostrar apenas um ícone na bandeja ao minimizar a janela.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimizar para a bandeja em vez da barra de tarefas.</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimizar em vez de sair do aplicativo quando a janela for fechada. Quando esta opção é escolhida, o aplicativo só será fechado selecionando Sair no menu Arquivo.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimizar ao sair</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Língua da interface com usuário:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Evcoin.</source>
<translation>A língua da interface com usuário pode ser escolhida aqui. Esta configuração só surtirá efeito após reiniciar o Evcoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidade usada para mostrar quantidades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Escolha a unidade padrão de subdivisão para interface mostrar quando enviar evcoins.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Evcoin addresses in the transaction list or not.</source>
<translation>Mostrar ou não endereços Evcoin na lista de transações.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Mostrar en&dereços na lista de transações</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancelar</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Aplicar</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>padrão</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Confirmar redefinição de opções</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Algumas configurações requerem reinicialização para surtirem efeito.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Você quer continuar?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Evcoin.</source>
<translation>Esta configuração surtirá efeito após reinicializar o aplicativo Evcoin</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>O endereço proxy fornecido é inválido.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Evcoin network after a connection is established, but this process has not completed yet.</source>
<translation>A informação mostrada pode estar desatualizada. Sua carteira sincroniza automaticamente com a rede Evcoin depois que a conexão é estabelecida, mas este processo pode não estar completo ainda.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Não confirmadas:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Carteira</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Imaturo:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Saldo minerado que ainda não maturou</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transações recentes</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Seu saldo atual</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Total de transações ainda não confirmadas, e que ainda não contam no saldo atual</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>fora de sincronia</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start evcoin: click-to-pay handler</source>
<translation>Não foi possível iniciar evcoin: manipulador clique-para-pagar</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Janela do código QR</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Requisitar Pagamento</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Quantia:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etiqueta:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mensagem:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salvar como...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Erro ao codigicar o URI em código QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>A quantidade digitada é inválida, favor verificar.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URI resultante muito longa. Tente reduzir o texto do rótulo ou da mensagem.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salvar código QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Imagens PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nome do cliente</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versão do cliente</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informação</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Usando OpenSSL versão</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Horário de inicialização</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rede</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Número de conexões</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Na rede de teste</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Corrente de blocos</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Quantidade atual de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Total estimado de blocos</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Horário do último bloco</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Opções da linha de comando</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Evcoin-Qt help message to get a list with possible Evcoin command-line options.</source>
<translation>Mostrar mensagem de ajuda do Evcoin-Qt para obter uma lista com possíveis opções da linha de comando do Evcoin.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Mostrar</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Data do 'build'</translation>
</message>
<message>
<location line="-104"/>
<source>Evcoin - Debug window</source>
<translation>Evcoin - Janela de Depuração</translation>
</message>
<message>
<location line="+25"/>
<source>Evcoin Core</source>
<translation>Núcleo Evcoin</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Arquivo de log de Depuração</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Evcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Abrir o arquivo de log de depuração do Evcoin do diretório atual de dados. Isso pode levar alguns segundos para arquivos de log grandes.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Limpar console</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Evcoin RPC console.</source>
<translation>Bem-vindo ao console Evcoin RPC.</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Use as setas para cima e para baixo para navegar pelo histórico, e <b>Ctrl-L</b> para limpar a tela.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Digite <b>help</b> para uma visão geral dos comandos disponíveis.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar dinheiro</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar para vários destinatários de uma só vez</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Adicionar destinatário</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Remover todos os campos da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmar o envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Enviar</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> para %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar envio de dinheiro</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Você tem certeza que deseja enviar %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>e</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>O endereço do destinatário não é válido, favor verificar.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>A quantidade a ser paga precisa ser maior que 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>A quantidade excede seu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>O total excede seu saldo quando uma taxa de transação de %1 é incluída.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Endereço duplicado: pode-se enviar para cada endereço apenas uma vez por transação.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Erro: Criação da transação falhou!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos evcoins de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns evcoins foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Formulário</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Q&uantidade:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Pagar &Para:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>O endereço para onde enviar o pagamento (ex. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Digite uma etiqueta para este endereço para adicioná-lo ao catálogo de endereços</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Escolha um endereço do seu catálogo</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Remover este destinatário</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Evcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Digite um endereço Evcoin (exemplo: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Assinaturas - Assinar / Verificar uma mensagem</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Assinar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Você pode assinar mensagens com seus endereços para provar que você é o dono deles. Seja cuidadoso para não assinar algo vago, pois ataques de pishing podem tentar te enganar para dar sua assinatura de identidade para eles. Apenas assine afirmações completamente detalhadas com as quais você concorda.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Endereço a ser usado para assinar a mensagem (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Escolha um endereço do catálogo</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Colar o endereço da área de transferência</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Entre a mensagem que você quer assinar aqui</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Assinatura</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copiar a assinatura para a área de transferência do sistema</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Evcoin address</source>
<translation>Assinar mensagem para provar que você é dono deste endereço Evcoin</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Assinar &Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Limpar Tudo</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Verificar Mensagem</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Forneça o endereço da assinatura, a mensagem (se assegure que você copiou quebras de linha, espaços, tabs, etc. exatamente) e a assinatura abaixo para verificar a mensagem. Cuidado para não ler mais na assinatura do que está escrito na mensagem propriamente, para evitar ser vítima de uma ataque do tipo "man-in-the-middle".</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>O endereço usado para assinar a mensagem (ex. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Evcoin address</source>
<translation>Verificar mensagem para se assegurar que ela foi assinada pelo dono de um endereço Evcoin específico.</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Verificar %Mensagem</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Limpar todos os campos de assinatura da mensagem</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Evcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Digite um endereço Evcoin (exemplo: Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Clique em "Assinar Mensagem" para gerar a assinatura</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Evcoin signature</source>
<translation>Entre com a assinatura Evcoin</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>O endereço fornecido é inválido.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Por favor, verifique o endereço e tente novamente.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>O endereço fornecido não se refere a uma chave.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Destravamento da Carteira foi cancelado.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>A chave privada para o endereço fornecido não está disponível.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Assinatura da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mensagem assinada.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>A assinatura não pode ser decodificada.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Por favor, verifique a assinatura e tente novamente.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>A assinatura não corresponde ao "resumo da mensagem".</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verificação da mensagem falhou.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mensagem verificada.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Evcoin developers</source>
<translation>Desenvolvedores do Evcoin</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/não confirmadas</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmações</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, difundir atráves de %n nó</numerusform><numerusform>, difundir atráves de %n nós</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Fonte</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Gerados</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Para</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>seu próprio endereço</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Crédito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>matura em mais %n bloco</numerusform><numerusform>matura em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>não aceito</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Débito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Taxa de transação</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Valor líquido</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensagem</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentário</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID da transação</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Evcoins gerados precisam maturar por 120 blocos antes de serem gastos. Quando você gera este bloco, ele é difundido na rede para ser adicionado ao blockchain. Se ele falhar ao ser acrescentado no blockchain, seu estado mudará para "não aceito" e não poderá ser gasto. Isso pode ocasionamente acontecer se outro nó gerou um bloco poucos segundos antes do seu.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informação de depuração</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transação</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Entradas</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>verdadeiro</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falso</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ainda não foi propagada na rede com sucesso.</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>desconhecido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalhes da transação</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Este painel mostra uma descrição detalhada da transação</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abrir para mais %n bloco</numerusform><numerusform>Abrir para mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Aberto até %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Não confirmado (%1 of %2 confirmações)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmações)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n bloco</numerusform><numerusform>Saldo minerado vai estar disponível quando ele maturar em mais %n blocos</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloco não foi recebido por nenhum outro participante da rede e provavelmente não será aceito!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Gerado mas não aceito</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recebido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagamento para você mesmo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status da transação. Passe o mouse sobre este campo para mostrar o número de confirmações.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data e hora em que a transação foi recebida.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Endereço de destino da transação.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Quantidade debitada ou creditada ao saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Todos</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoje</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Este mês</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mês passado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este ano</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervalo...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recebido por</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado para</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Para você mesmo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minerado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Outro</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Procure um endereço ou etiqueta</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Quantidade mínima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copiar endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copiar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar quantia</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Copiar ID da transação</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Editar etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostrar detalhes da transação</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Exportar Dados das Transações</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Arquivo separado por vírgulas (*. csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Endereço</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Quantidade</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Erro ao exportar</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Não foi possível gravar no arquivo %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervalo: </translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Send Coins</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>&Exportar</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Exportar os dados na aba atual para um arquivo</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Fazer cópia de segurança da Carteira</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Dados da Carteira (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Cópia de segurança Falhou</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Houve um erro ao tentar salvar os dados da carteira para uma nova localização.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Backup feito com sucesso</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Os dados da carteira foram salvos com sucesso na nova localização</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Evcoin version</source>
<translation>Versão do Evcoin</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or evcoind</source>
<translation>Enviar comando para -server ou evcoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Lista de comandos</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Obtenha ajuda sobre um comando</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opções:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: evcoin.conf)</source>
<translation>Especifique um arquivo de configurações (padrão: evcoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: evcoind.pid)</source>
<translation>Especifique um arquivo de pid (padrão: evcoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especificar diretório de dados</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Definir o tamanho do cache do banco de dados em megabytes (padrão: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>Procurar por conexões em <port> (padrão: 9333 ou testnet:19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Manter no máximo <n> conexões aos peers (padrão: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Conectar a um nó para receber endereços de participantes, e desconectar.</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Especificar seu próprio endereço público</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Limite para desconectar peers mal comportados (padrão: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Número de segundos para impedir que peers mal comportados reconectem (padrão: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>Escutar conexões JSON-RPC na porta <porta> (padrão: 9332 ou testnet: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceitar linha de comando e comandos JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Rodar em segundo plano como serviço e aceitar comandos</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Usar rede de teste</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Aceitar conexões externas (padrão: 1 se opções -proxy ou -connect não estiverem presentes)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=evcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Evcoin Alert" [email protected]
</source>
<translation>%s, você deve especificar uma senha rpcpassword no arquivo de configuração:⏎
%s⏎
É recomendado que você use a seguinte senha aleatória:⏎
rpcuser=evcoinrpc⏎
rpcpassword=%s⏎
(você não precisa lembrar esta senha)⏎
O nome de usuário e a senha NÃO PODEM ser os mesmos.⏎
Se o arquivo não existir, crie um com permissão de leitura apenas para o dono.⏎
É recomendado também definir um alertnotify para que você seja notificado de problemas;⏎
por exemplo: alertnotify=echo %%s | mail -s "Evcoin Alert" [email protected]⏎
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Um erro ocorreu ao configurar a porta RPC %u para escuta em IPv6, voltando ao IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Vincular ao endereço fornecido e sempre escutar nele. Use a notação [host]:port para IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Evcoin is probably already running.</source>
<translation>Não foi possível obter exclusividade de escrita no endereço %s. O Evcoin provavelmente já está rodando.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Erro: A transação foi rejeitada. Isso pode acontecer se alguns dos evcoins de sua carteira já haviam sido gastos, por exemplo se você usou uma cópia do arquivo wallet.dat e alguns evcoins foram gastos na cópia mas não foram marcados como gastos aqui.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Erro: Esta transação requer uma taxa de transação de pelo menos %s, por causa sua quantidade, complexidade ou uso de dinheiro recebido recentemente.</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Executar comando quando um alerta relevante for recebido (%s no comando será substituído pela mensagem)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Executar comando quando uma transação da carteira mudar (%s no comando será substituído por TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Determinar tamanho máximo de transações de alta-prioridade/baixa-taxa em bytes (padrão: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Este pode ser um build de teste pré-lançamento - use por sua conta e risco - não use para mineração ou aplicações de comércio.</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Cuidado: valor de -paytxfee escolhido é muito alto! Este é o valor da taxa de transação que você irá pagar se enviar a transação.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Cuidado: Transações mostradas podem não estar corretas! Você pode precisar atualizar, ou outros nós podem precisar atualizar o cliente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Evcoin will not work properly.</source>
<translation>Cuidado: Por favor, verifique que a data e hora do seu computador estão corretas! If o seu relógio estiver errado, o Evcoin não irá funcionar corretamente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Cuidado: erro ao ler arquivo wallet.dat! Todas as chaves foram lidas corretamente, mas dados transações e do catálogo de endereços podem estar faltando ou estar incorretas.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Aviso: wallet.dat corrompido, dados recuperados! Arquivo wallet.dat original salvo como wallet.{timestamp}.bak em %s; se seu saldo ou transações estiverem incorretos, você deve restauras o backup.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tentar recuperar chaves privadas de um arquivo wallet.dat corrompido</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Opções de criação de blocos:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Conectar apenas a nó(s) específico(s)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Detectado Banco de dados de blocos corrompido</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Descobrir os próprios endereços IP (padrão: 1 quando no modo listening e opção -externalip não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Você quer reconstruir o banco de dados de blocos agora?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Erro ao inicializar banco de dados de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Erro ao inicializar ambiente de banco de dados de carteira %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Erro ao carregar banco de dados de blocos</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Erro ao abrir banco de dados de blocos</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Erro: Espaço em disco insuficiente!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Erro: Carteira travada, impossível criar transação!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Erro: erro de sistema</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Falha ao escutar em qualquer porta. Use -listen=0 se você quiser isso.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Falha ao ler informação de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Falha ao ler bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Falha ao sincronizar índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Falha ao escrever índice de blocos</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Falha ao escrever informações de bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Falha ao escrever bloco</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Falha ao escrever informções de arquivo</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Falha ao escrever banco de dados de moedas</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Falha ao escrever índice de transações</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Falha ao escrever dados para desfazer ações</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Procurar pares usando consulta de DNS (padrão: 1 a menos que a opção -connect esteja presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Quantos blocos checar ao inicializar (padrão: 288, 0 = todos)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Quão minuciosa é a verificação dos blocos (0-4, padrão: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Reconstruir índice de blockchain a partir dos arquivos atuais blk000??.dat</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Defina o número de threads de script de verificação. (Padrão: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Verificando blocos...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Verificando carteira...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importar blocos de um arquivo externo blk000??.dat</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Informação</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Endereço -tor inválido: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Manter índice completo de transações (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Buffer máximo de recebimento por conexão, <n>*1000 bytes (padrão: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Buffer máximo de envio por conexão, <n>*1000 bytes (padrão: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Apenas aceitar cadeia de blocos correspondente a marcas de verificação internas (padrão: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Apenas conectar em nós na rede <net> (IPv4, IPv6, ou Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Mostrar informações extras de depuração. Implica em outras opções -debug*</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Mostrar informações extras de depuração da rede</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Pré anexar a saída de debug com estampa de tempo</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Evcoin Wiki for SSL setup instructions)</source>
<translation>Opções SSL: (veja a Wiki do Evcoin para instruções de configuração SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Escolher versão do proxy socks a ser usada (4-5, padrão: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Mandar informação de trace/debug para o console em vez de para o arquivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Mandar informação de trace/debug para o debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Determinar tamanho máximo de bloco em bytes (padrão: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Determinar tamanho mínimo de bloco em bytes (padrão: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Encolher arquivo debug.log ao iniciar o cliente (padrão 1 se opção -debug não estiver presente)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifique o tempo limite (timeout) da conexão em milissegundos (padrão: 5000) </translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Erro de sistema:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Usar UPnP para mapear porta de escuta (padrão: 1 quando estiver escutando)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Usar proxy para alcançar serviços escondidos (padrão: mesmo que -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Nome de usuário para conexões JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Cuidado</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Cuidado: Esta versão está obsoleta, atualização exigida!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Você precisa reconstruir os bancos de dados usando -reindex para mudar -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompido, recuperação falhou</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Senha para conexões JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permitir conexões JSON-RPC de endereços IP específicos</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Enviar comando para nó rodando em <ip> (pardão: 127.0.0.1)</translation>
</message>
<message><|fim▁hole|> <location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Executar comando quando o melhor bloco mudar (%s no comando será substituído pelo hash do bloco)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Atualizar carteira para o formato mais recente</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Determinar tamanho do pool de endereços para <n> (padrão: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Re-escanear blocos procurando por transações perdidas da carteira</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usar OpenSSL (https) para conexões JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Arquivo de certificado do servidor (padrão: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Chave privada do servidor (padrão: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Algoritmos de criptografia aceitos (padrão: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Esta mensagem de ajuda</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Impossível vincular a %s neste computador (bind retornou erro %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Conectar através de um proxy socks</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permitir consultas DNS para -addnode, -seednode e -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Carregando endereços...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Erro ao carregar wallet.dat: Carteira corrompida</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Evcoin</source>
<translation>Erro ao carregar wallet.dat: Carteira requer uma versão mais nova do Evcoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Evcoin to complete</source>
<translation>A Carteira precisou ser reescrita: reinicie o Evcoin para completar</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Erro ao carregar wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Endereço -proxy inválido: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rede desconhecida especificada em -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Versão desconhecida do proxy -socks requisitada: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossível encontrar o endereço -bind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossível encontrar endereço -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Quantidade inválida para -paytxfee=<quantidade>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Quantidade inválida</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Saldo insuficiente</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Carregando índice de blocos...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Adicionar um nó com o qual se conectar e tentar manter a conexão ativa</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Evcoin is probably already running.</source>
<translation>Impossível vincular a %s neste computador. O Evcoin provavelmente já está rodando.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Taxa por KB a ser acrescida nas transações que você enviar</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Carregando carteira...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Não é possível fazer downgrade da carteira</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Não foi possível escrever no endereço padrão</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Re-escaneando...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Carregamento terminado</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Para usar a opção %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Erro</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Você precisa especificar rpcpassword=<senha> no arquivo de configurações:⏎
%s⏎
Se o arquivo não existir, crie um com permissão de leitura apenas pelo dono</translation>
</message>
</context>
</TS><|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.