prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>debugger.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import division
import logging
import numpy
from pandas import merge, concat, DataFrame
from openfisca_france_data import default_config_files_directory as config_files_directory
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.base import year_specific_by_generic_data_frame_name
from openfisca_france_data.model.common import mark_weighted_percentiles as mwp
from openfisca_survey_manager.survey_collections import SurveyCollection
from openfisca_france_data.utils import simulation_results_as_data_frame
from openfisca_france_data.erf import get_erf2of, get_of2erf
from openfisca_plugin_aggregates.aggregates import Aggregates
from openfisca_parsers import input_variables_extractors
log = logging.getLogger(__name__)
def clean(parameter):
return parameter[:-len('_holder')] if parameter.endswith('_holder') else parameter
class Debugger(object):
def __init__(self):
super(Debugger, self).__init__()
self.erf_menage = None
self.erf_eec_indivi = None
self.of_menages_data_frame = None
self.of_individus_data_frame = None
self.variable = None
self.survey_scenario = None
def set_survey_scenario(self, survey_scenario = None):
assert survey_scenario is not None
self.survey_scenario = survey_scenario
self.column_by_name = self.survey_scenario.simulation.tax_benefit_system.column_by_name
self.simulation = self.survey_scenario.simulation
assert survey_scenario.simulation is not None, "The simulation attibute of survey_scenario is None"
def set_variable(self, variable):
if isinstance(variable, list):
self.variable = variable[0]
else:
self.variable = variable
def show_aggregates(self):
from openfisca_france_data.erf.aggregates import build_erf_aggregates
assert self.survey_scenario is not None, 'simulation attribute is None'
assert self.variable is not None, 'variable attribute is None'
variable = self.variable
openfisca_aggregates = Aggregates()
openfisca_aggregates.set_survey_scenario(self.survey_scenario)
openfisca_aggregates.compute()
column_by_name = self.column_by_name
temp = (build_erf_aggregates(variables=[variable], year= self.survey_scenario.year))
selection = openfisca_aggregates.aggr_frame["Mesure"] == column_by_name[variable].label
print openfisca_aggregates.aggr_frame[selection]
print temp
# TODO: clean this
return
def extract(self, data_frame, entities = "men"):
column_by_name = self.column_by_name
filtered_data_frame_columns = list(set(column_by_name.keys()).intersection(set(data_frame.columns)))
extracted_columns = [column_name for column_name in filtered_data_frame_columns
if column_by_name[column_name].entity in entities]
extracted_columns = list(set(extracted_columns).union(set(['idmen'])))
return data_frame[extracted_columns].copy()
def get_all_parameters(self, column_list):
global x
print [column.name for column in column_list]
x = x + 1
if x == 20:
boum
column_by_name = self.column_by_name
tax_benefit_system = self.survey_scenario.simulation.tax_benefit_system
extractor = input_variables_extractors.setup(tax_benefit_system)
if len(column_list) == 0:
return []
else:
column_name = column_list[0].name
print column_name
if extractor.get_input_variables(column_by_name[column_name]) is None:
return column_list
else:
first_column = [column_list[0]]
input_columns = self.get_all_parameters([
column_by_name[clean(parameter)]
for parameter in list(extractor.get_input_variables(column_by_name[column_name]))
])
other_columns = list(
set(self.get_all_parameters(column_list[1:])) - set(first_column + input_columns)
)
print 'input_variables: ', [column.name for column in input_columns]
print 'new_variables: ', [column.name for column in other_columns]
new_column_list = first_column + input_columns + other_columns
print 'final list: ', [column.name for column in new_column_list]
return new_column_list
def build_columns_to_fetch(self):
column_by_name = self.column_by_name
# parameters_column = self.get_all_parameters([column_by_name.get(x) for x in [self.variable]])
# parameters = [x.name for x in parameters_column]
parameters = [self.variable]
# We want to get all parameters and consumers that we're going to encounter
# consumers = []
# for variable in [self.variable]:
# column = column_by_name.get(variable)
# consumers = list(set(consumers).union(set(column.consumers)))
# column_names = list(set(parameters).union(set(consumers)))
# self.columns_to_fetch = column_names
# self.variable_consumers = list(set(consumers))
self.variable_parameters = list(set(parameters))
self.columns_to_fetch = list(set(parameters))
def build_openfisca_data_frames(self):
column_names = self.columns_to_fetch
for column in column_names:
assert column in survey_scenario.tax_benefit_system.column_by_name.keys()
data_frame_by_entity_key_plural = survey_scenario.create_data_frame_by_entity_key_plural(
variables = column_names + ['idmen_original'],
indices = True,
roles = True,
)
self.data_frame_by_entity_key_plural = data_frame_by_entity_key_plural
projected = self.project_on(data_frame_by_entity_key_plural = data_frame_by_entity_key_plural)
idmen_original_by_idmen = dict(
zip(
data_frame_by_entity_key_plural['menages'].index.values,
data_frame_by_entity_key_plural['menages']["idmen_original"].values
)
)
self.idmen_original_by_idmen = idmen_original_by_idmen
idmen_by_idmen_original = dict(
zip(
data_frame_by_entity_key_plural['menages']["idmen_original"].values,
data_frame_by_entity_key_plural['menages'].index.values,
)
)
self.idmen_by_idmen_original = idmen_by_idmen_original
data_frame_by_entity_key_plural['menages'] = projected.rename(
columns = {"idmen_original": "idmen"})
data_frame_by_entity_key_plural['individus'].replace(
{'idmen': idmen_original_by_idmen}, inplace = True)
self.data_frame_by_entity_key_plural = data_frame_by_entity_key_plural
def project_on(self, receiving_entity_key_plural = 'menages', data_frame_by_entity_key_plural = None):
tax_benefit_system = self.survey_scenario.tax_benefit_system
assert data_frame_by_entity_key_plural is not None
assert receiving_entity_key_plural is not tax_benefit_system.person_key_plural
entity_data_frame = data_frame_by_entity_key_plural[receiving_entity_key_plural]
person_data_frame = data_frame_by_entity_key_plural[tax_benefit_system.person_key_plural]
entity_keys_plural = list(
set(tax_benefit_system.entity_class_by_key_plural.keys()).difference(set(
[tax_benefit_system.person_key_plural, receiving_entity_key_plural]
))
)
for entity_key_plural in entity_keys_plural:
entity = tax_benefit_system.entity_class_by_key_plural[entity_key_plural]
# Getting only heads of other entities prenent in the projected on entity
boolean_index = person_data_frame[entity.role_for_person_variable_name] == 0 # Heads
index_entity = person_data_frame.loc[boolean_index, entity.index_for_person_variable_name].values # Ent.
for column_name, column_series in self.data_frame_by_entity_key_plural[entity_key_plural].iteritems():
person_data_frame.loc[boolean_index, column_name] \
= column_series.iloc[index_entity].values
person_data_frame[column_name].fillna(0)
receiving_entity = tax_benefit_system.entity_class_by_key_plural[receiving_entity_key_plural]
grouped_data_frame = person_data_frame.groupby(by = receiving_entity.index_for_person_variable_name).agg(sum)
grouped_data_frame.drop(receiving_entity.role_for_person_variable_name, axis = 1, inplace = True)
data_frame = concat([entity_data_frame, grouped_data_frame], axis = 1)
assert data_frame.notnull().all().all()
return data_frame
def build_erf_data_frames(self):
# TODO: remove this
self.columns_to_fetch = ['af']
variables = self.columns_to_fetch
erf_survey_collection = SurveyCollection.load(
collection = "erfs", config_files_directory = config_files_directory)
erf_survey = erf_survey_collection.get_survey("erfs_{}".format(year))
year_specific_by_generic = year_specific_by_generic_data_frame_name(year)
generic_by_year_specific = dict(zip(year_specific_by_generic.values(), year_specific_by_generic.keys()))
erf_variables = list(set(variables + ["ident", "wprm", "quelfic", "noi"]))
of2erf = get_of2erf()
for index, variable in enumerate(erf_variables):
if variable in of2erf:
erf_variables[index] = of2erf[variable]
data_frame_by_table = dict(eec_indivi = None, erf_indivi = None, erf_menage = None)
erf_variables_by_generic_table = dict(eec_indivi = [], erf_indivi = [], erf_menage = [])
year_specific_tables_by_erf_variable = dict(
[
(
erf_variable,
set(
erf_survey.find_tables(variable = erf_variable)
).intersection(
set([year_specific_by_generic[key] for key in erf_variables_by_generic_table.keys()])
)
) for erf_variable in erf_variables
]
)
for variable, year_specific_tables in year_specific_tables_by_erf_variable.iteritems():
if len(year_specific_tables) < 1:
log.info("No tables are present for variable {}".format(variable))
continue
else:
log.info("Variable {} is present in multiple tables : {}".format(variable, year_specific_tables))
for table in year_specific_tables:
log.info("Variable {} is retrieved from table {}".format(variable, table))
erf_variables_by_generic_table[generic_by_year_specific[table]].append(variable)
erf2of = get_erf2of()
for table, erf_variables in erf_variables_by_generic_table.iteritems():
if erf_variables:
data_frame_by_table[table] = erf_survey.get_values(
variables = erf_variables, table = year_specific_by_generic[table]
)
data_frame_by_table[table].rename(columns = erf2of, inplace = True)
data_frame_by_table[table].rename(columns = {'ident': 'idmen'}, inplace = True)
assert not data_frame_by_table["erf_menage"].duplicated().any(), "Duplicated idmen in erf_menage"
self.erf_data_frame_by_entity_key_plural = dict(
menages = data_frame_by_table["erf_menage"],
individus = data_frame_by_table["erf_indivi"].merge(data_frame_by_table["eec_indivi"])
)
# TODO: fichier foyer
def get_major_differences(self):
variable = self.variable
of_menages_data_frame = self.data_frame_by_entity_key_plural['menages']
erf_menages_data_frame = self.erf_data_frame_by_entity_key_plural['menages']<|fim▁hole|> erf_menages_data_frame[[variable, 'idmen']],
of_menages_data_frame[[variable, 'idmen']],
on = 'idmen',
how = 'inner',
suffixes = ('_erf', '_of')
)
log.info('Length of merged_menage_data_frame is {}'.format(len(merged_menage_data_frame)))
merged_menage_data_frame.set_index('idmen', drop = False, inplace = True)
table = merged_menage_data_frame[
numpy.logical_and(
merged_menage_data_frame[variable + '_erf'] != 0,
merged_menage_data_frame[variable + '_of'] != 0
)
]
table[variable + "_rel_diff"] = (table[variable + '_of'] - table[variable + '_erf']) \
/ table[variable + '_erf'] # Difference relative
log.info(
"Minimum difference between the two tables for {} is {}".format(
variable, str(table[variable + "_rel_diff"].min())
)
)
log.info(
"Maximum difference between the two tables for {} is {}".format(
variable, str(table[variable + "_rel_diff"].max())
)
)
table[variable + '_ratio'] = (
table[variable + '_of'] / table[variable + '_erf']
)
log.info(table[variable + "_rel_diff"].describe())
try:
assert len(table[variable + "_rel_diff"]) == len(table['wprm_of']), "PINAGS"
dec, values = mwp(
table[variable + "_rel_diff"],
numpy.arange(1, 11), table['wprm_of'],
2,
return_quantiles = True
)
log.info(sorted(values))
dec, values = mwp(
table[variable + "_rel_diff"],
numpy.arange(1, 101),
table['wprm_erf'],
2,
return_quantiles = True
)
log.info(sorted(values)[90:])
del dec, values
except:
log.info('Weighted percentile method did not work for {}'.format(variable + "_rel_diff"))
pass
table.sort(columns = variable + "_rel_diff", ascending = False, inplace = True)
print table[:10].to_string()
return table
def describe_discrepancies(self, fov = 10, consumers = False, parameters = True, descending = True, to_men = False):
variable = self.variable
major_differences_data_frame = self.get_major_differences()
major_differences_data_frame.sort(
columns = self.variable + "_rel_diff",
ascending = not descending,
inplace = True
)
debug_data_frame = major_differences_data_frame[0:fov].copy()
del major_differences_data_frame
of_menages_data_frame = self.data_frame_by_entity_key_plural['menages']
of_individus_data_frame = self.data_frame_by_entity_key_plural['individus']
erf_individus_data_frame = self.erf_data_frame_by_entity_key_plural['individus']
erf_menages_data_frame = self.erf_data_frame_by_entity_key_plural['menages']
return debug_data_frame
kept_columns = set()
if parameters:
kept_columns.update(set(self.variable_parameters))
if consumers:
kept_columns.update(set(self.variable_consumers))
kept_columns = list(kept_columns)
kept_columns = list(set(kept_columns).union(
set(['idmen', 'idfam', 'idfoy', 'quimen', 'quifam', 'quifoy'] + list(major_differences_data_frame.columns)))
)
if to_men:
entities_ind = ['ind']
entities_men = ['men', 'fam', 'foy']
else:
entities_ind = ['ind', 'fam', 'foy']
entities_men = ['men']
debug_data_frame = debug_data_frame.merge(
self.extract(of_menages_data_frame, entities = entities_men),
how = 'inner',
on = 'idmen',
)
print debug_data_frame.to_string()
debug_data_frame = debug_data_frame.merge(
self.extract(of_individus_data_frame, entities = entities_ind),
how = 'inner',
on = 'idmen',
)
debug_data_frame = debug_data_frame.merge(
erf_individus_data_frame,
how = 'inner',
on = 'idmen',
)
suffixes = ["_erf", "_of", "_rel_diff", "_ratio"]
reordered_columns = [variable + suffixe for suffixe in suffixes] \
+ ["idmen", "quimen", "idfam", "quifam", "idfoy", "quifoy"]
reordered_columns = reordered_columns + list(set(kept_columns) - set(reordered_columns))
debug_data_frame = debug_data_frame[reordered_columns].copy()
return debug_data_frame
def generate_test_case(self):
entity_class_by_key_plural = self.survey_scenario.tax_benefit_system.entity_class_by_key_plural
menages_entity = entity_class_by_key_plural['menages']
idmen_by_idmen_original = self.idmen_by_idmen_original
idmen_original = self.describe_discrepancies(descending = False)[menages_entity.index_for_person_variable_name].iloc[0]
idmen = idmen_by_idmen_original[idmen_original]
input_data_frame = self.survey_scenario.input_data_frame
individus_index = input_data_frame.index[input_data_frame[menages_entity.index_for_person_variable_name] == idmen]
index_by_entity = {
entity_class_by_key_plural['individus']: individus_index,
}
for entity in entity_class_by_key_plural.values():
if entity.key_plural != 'individus':
index_by_entity[entity] = input_data_frame.loc[
individus_index, entity.index_for_person_variable_name].unique()
extracted_indices = individus_index
for entity, entity_index in index_by_entity.iteritems():
if entity.key_plural in ['menages', 'individus']:
continue
extracted_indices = extracted_indices + \
input_data_frame.index[input_data_frame[entity.index_for_person_variable_name].isin(entity_index)]
extracted_input_data_frame = input_data_frame.loc[extracted_indices]
return extracted_input_data_frame
if __name__ == '__main__':
import sys
from openfisca_plugin_aggregates.tests.test_aggregates import create_survey_scenario
logging.basicConfig(level = logging.INFO, stream = sys.stdout)
restart = True
if restart:
year = 2009
survey_scenario = create_survey_scenario(year)
survey_scenario.simulation = survey_scenario.new_simulation()
debugger = Debugger()
debugger.set_survey_scenario(survey_scenario = survey_scenario)
debugger.set_variable('af')
debugger.build_columns_to_fetch()
debugger.build_openfisca_data_frames()
debugger.build_erf_data_frames()
# df_menage = debugger.data_frame_by_entity_key_plural['menages']
# df_famille = debugger.data_frame_by_entity_key_plural['familles']
# df_individus = debugger.data_frame_by_entity_key_plural['individus']
#df = debugger.get_major_differences()
# debugger.show_aggregates()
df = debugger.describe_discrepancies(descending = False)
df = debugger.generate_test_case()
boum
entity_class_by_key_plural = debugger.survey_scenario.tax_benefit_system.entity_class_by_key_plural
menages_entity = entity_class_by_key_plural['menages']
idmen = debugger.describe_discrepancies(descending = False)[menages_entity.index_for_person_variable_name].iloc[0]
input_data_frame = debugger.survey_scenario.input_data_frame<|fim▁end|>
|
merged_menage_data_frame = merge(
|
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>###
# Copyright (c) 2015, KG-Bot
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import json
import datetime
import time
import supybot.ircmsgs as ircmsgs
import supybot.schedule as schedule
import supybot.conf as conf
import supybot.utils as utils
from supybot.commands import *
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
try:
from supybot.i18n import PluginInternationalization
_ = PluginInternationalization('RequestBot')
except ImportError:
# Placeholder that allows to run the plugin on a bot
# without the i18n module
_ = lambda x:x
class RequestBot(callbacks.Plugin):
"""Plugin is used to automate bot requests for some channel."""
threaded = True
def __init__(self):
self.__parent = super(RequestBot, self)
self.__parent.__init__(irc)
self.dailyChecksInterval = conf.supybot.plugins.RequestBot.dailyCheckInterval()
self.stopDailyCheck = conf.supybot.plugins.RequestBot.stopDailyCheck()
self.numberOfChecks = conf.supybot.plugins.RequestBot.numberOfChecks()
self.numberOfValidUsers = conf.supybot.plugins.RequestBot.numberOfValidUsers()
def _logError(self, message):
with open("local\log.txt", "a") as logFile:
logFile.write("\n")
logFile.write(message)
def _checkChannelBan(self, channel):
try:
with open("plugins/RequestBot/local/channelBans.json", "r") as bans:
banList = json.loads(bans.read())
if channel.lower() not in banList.keys():
return "Valid"
else:
return "Banned"
except Exception as e:
today_date = datetime.datetime.today().strftime("[%Y-%m-%d %H:%M:%S]")
self._logError("%s - %s" % (today_date, str(e)))
return "Error"
def _checkRequesterBan(self, nick):
try:
with open("plugins/RequestBot/local/nickBans.json", "r") as nicks:
banList = json.loads(nicks.read())
if nick.lower() not in banList.keys():
return "Valid"
else:
return "Banned"
except Exception as e:
today_date = datetime.datetime.today().strftime("[%Y-%m-%d %H:%M:%S]")
self._logError("%s - %s" % (today_date, str(e)))<|fim▁hole|>
def _populateNicks(self, users):
with open("plugins/RequestBot/local/invalidNicks.json", "r") as nicks:
invalidNicks = json.loads(nicks.read())
numberOfInvalidUsers = 0
for user in users:
for nick in invalidNicks:
if user.lower() == nick:
numberOfInvalidUsers += 1
numberOfValidUsers = len(users) - numberOfInvalidUsers
if numberOfValidUsers >= self.numberOfValidUsers:
return "Valid"
else:
return "Invalid"
def _getDailyChecks(self, channel):
with open("plugins/RequestBot/local/dailyChecks.json", "r") as dailyChecks:
channels = json.loads(dailyChecks.read())
if channels != "{}" and channel in channels.keys():
return channels
else:
return "Error"
def _dailyCheckOfUsers(self, irc, msg, adminPlugin, channel, eventName):
numberOfChecks = self._getDailyChecks(channel)
if numberOfChecks <= self.numberOfChecks: #TODO Change this because numberOfChecks will return dict of items
if channel in irc.state.channels:
users = irc.state.channels[channel].users
validNicks = self._populateNicks(users)
if validNicks == "Invalid":
adminPlugin.part(irc, msg, [channel, partMsg])
def _channelState(self, irc, msg, nick, channel, adminPlugin):
"""Collects users from <channel> and determines if <nick> is owner or admin"""
channels = irc.state.channels
if channel in channels:
users = irc.state.channels[channel].users
# This checks number of valid users on channel
validNicks = self._populateNicks(users)
if validNicks == "Valid":
owners = irc.state.channels[channel].owners
# If owners are not empty that means ownermode is set and user must have +q
# mode to request bot
if len(owners) != 0:
if nick in owners:
eventName = "%s_RequestBot_dailyChecks" % channel
stopEventName = "%s_RequestBot_stopDailyChecks" % channel
# We must schedule it this way because we can't pass args in schedule...
def startDailyChecks():
# We are checking channel users for few days because one might try
# to bring a lot of users when he requests bot and later those users
# will part channel and never come back again
self._dailyCheckOfUsers(irc, msg, adminPlugin, channel, eventName)
# We're scheduling this to be run few times a day for few days and at the last
# time we're going to check if there where minimum users on the channel
# for most of the time
# TODO: Implement last check
schedule.addPeriodicEvent(startDailyChecks, self.dailyChecksInterval, eventName, now=False)
def stopDailyChecks():
# We must schedule it here because if we do it elswhere we won't be able to
# access new state of scheduler which holds reference to our scheduled event
schedule.removeEvent(eventName)
schedule.addEvent(stopDailyChecks, time.time() + self.stopDailyCheck, stopEventName)
greetMsg = "Hi, I've been assigned here thanks to %s. If you have any questions use +list or come to #KG-Bot and ask." % nick
irc.queueMsg(ircmsgs.privmsg(channel, greetMsg))
else:
partMsg = "You're not owner (with +q set) so you can't have me in here."
irc.queueMsg(ircmsgs.privmsg(channel, partMsg))
adminPlugin.part(irc, msg, [channel, partMsg])
# If there are no owners with +q mode set we're not going to allow admins or ops
# to request bot, we're forcing players to use ownermode and +q so only true channel owner
# can request bot (you never know what admins can try to do)
else:
partMsg = "There are no owners in here (with +q set)."
irc.queueMsg(ircmsgs.privmsg(channel, partMsg))
adminPlugin.part(irc, msg, [channel, partMsg])
else:
partMsg = "You don't have enough users in here."
irc.queueMsg(ircmsgs.privmsg(channel, partMsg))
adminPlugin.part(irc, msg, [channel, partMsg])
# This should never happen, maybe only if bot is kicked from channel before
# scheduled event for this command has been executed
else:
partMsg = "There was something strange internally. Please notify my owner about this."
irc.queueMsg(ircmsgs.privmsg(channel, partMsg))
adminPlugin.part(irc, msg, [channel, partMsg])
def request(self, irc, msg, args, channel, reason):
"""<channel> - channel name for which you make request, <reason> - reason why do you want bot (it must be some good reason, not some bullshit)
Request bot for <channel>, you must specify <reason> why do you want it."""
# TODO: Before anything happens we should check if <channel> is valid IRC channel name
# because if it's not we won't be able to join it, collect irc.state and our code will
# probably brake in the unwanted manner
#TODO: If we're already on channel nothing should be done and user should be
# presented with explanation (we still have to implement that in our code)"""
nick = msg.nick
isChannelBanned = self._checkChannelBan(channel)
# TODO: change this because this will probably return dict of more info about ban
if isChannelBanned == "Valid":
isRequesterBanned = self._checkRequesterBan(nick)
# TODO: Change this because this will probably behave like channel ban and will return dict
if isRequesterBanned == "Valid":
# We're doing it this way because it's much more easier than trying to reimplement
# admin join function with all those network, group, et. stuff
adminPlugin = irc.getCallback("Admin")
adminPlugin.join(irc, msg, [channel.lower()])
# We must schedule this command because when bot joins some channel it neads few seconds
# to collect irc.state and we can't access those right after the join
schedule.addEvent(self._channelState, time.time() + 5, args=[irc, msg, nick, channel, adminPlugin])
elif isRequesterBanned == "Banned":
irc.reply("You can't request bot becuase you're on ban list.")
else:
irc.reply("There was some ugly internal error. Please try again and notify my owner about this.")
elif isChannelBanned == "Banned":
irc.reply("This channel is banned and you can't request bot for it.")
else:
irc.reply("There was some ugly internal error. Please try again and notify my owner about this.")
request = wrap(request, ["channel", "something"])
Class = RequestBot
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|>
|
return "Error"
|
<|file_name|>hr_plan_wizard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class HrPlanWizard(models.TransientModel):
_name = 'hr.plan.wizard'
_description = 'Plan Wizard'
plan_id = fields.Many2one('hr.plan', default=lambda self: self.env['hr.plan'].search([], limit=1))
employee_id = fields.Many2one(
'hr.employee', string='Employee', required=True,
default=lambda self: self.env.context.get('active_id', None),
)
def action_launch(self):<|fim▁hole|> responsible = activity_type.get_responsible_id(self.employee_id)
if self.env['hr.employee'].with_user(responsible).check_access_rights('read', raise_exception=False):
date_deadline = self.env['mail.activity']._calculate_date_deadline(activity_type.activity_type_id)
self.employee_id.activity_schedule(
activity_type_id=activity_type.activity_type_id.id,
summary=activity_type.summary,
note=activity_type.note,
user_id=responsible.id,
date_deadline=date_deadline
)
return {
'type': 'ir.actions.act_window',
'res_model': 'hr.employee',
'res_id': self.employee_id.id,
'name': self.employee_id.display_name,
'view_mode': 'form',
'views': [(False, "form")],
}<|fim▁end|>
|
for activity_type in self.plan_id.plan_activity_type_ids:
|
<|file_name|>rscope.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::ty;
use std::vec;
use syntax::ast;
use syntax::codemap::Span;
use syntax::opt_vec::OptVec;
/// Defines strategies for handling regions that are omitted. For
/// example, if one writes the type `&Foo`, then the lifetime of of
/// this borrowed pointer has been omitted. When converting this
/// type, the generic functions in astconv will invoke `anon_regions`
/// on the provided region-scope to decide how to translate this
/// omitted region.
///
/// It is not always legal to omit regions, therefore `anon_regions`
/// can return `Err(())` to indicate that this is not a scope in which
/// regions can legally be omitted.
pub trait RegionScope {
fn anon_regions(&self,
span: Span,
count: uint)
-> Result<~[ty::Region], ()>;
}
// A scope in which all regions must be explicitly named
pub struct ExplicitRscope;
impl RegionScope for ExplicitRscope {
fn anon_regions(&self,
_span: Span,
_count: uint)
-> Result<~[ty::Region], ()> {
Err(())
}
}
/// A scope in which we generate anonymous, late-bound regions for
/// omitted regions. This occurs in function signatures.
pub struct BindingRscope {
binder_id: ast::NodeId,
anon_bindings: @mut uint
}
impl BindingRscope {
pub fn new(binder_id: ast::NodeId) -> BindingRscope {
BindingRscope {
binder_id: binder_id,
anon_bindings: @mut 0
}
}
}
impl RegionScope for BindingRscope {
fn anon_regions(&self,
_: Span,
count: uint)
-> Result<~[ty::Region], ()> {
let idx = *self.anon_bindings;
*self.anon_bindings += count;
Ok(vec::from_fn(count, |i| ty::ReLateBound(self.binder_id,
ty::BrAnon(idx + i))))
}
}
pub fn bound_type_regions(defs: &[ty::RegionParameterDef])<|fim▁hole|> |(i, def)| ty::ReEarlyBound(def.def_id.node, i, def.ident)).collect()
}<|fim▁end|>
|
-> OptVec<ty::Region> {
assert!(defs.iter().all(|def| def.def_id.crate == ast::LOCAL_CRATE));
defs.iter().enumerate().map(
|
<|file_name|>faIntersection.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, '__esModule', { value: true });
var prefix = 'fas';
var iconName = 'intersection';
var width = 384;
var height = 512;
var ligatures = [];
var unicode = 'f668';
var svgPathData = 'M166.74 33.62C69.96 46.04 0 133.11 0 230.68V464c0 8.84 7.16 16 16 16h64c8.84 0 16-7.16 16-16V224c0-59.2 53.85-106.04 115.13-94.14 45.58 8.85 76.87 51.5 76.87 97.93V464c0 8.84 7.16 16 16 16h64c8.84 0 16-7.16 16-16V224c0-114.18-100.17-205.4-217.26-190.38z';
exports.definition = {
prefix: prefix,
iconName: iconName,
icon: [
width,<|fim▁hole|> svgPathData
]};
exports.faIntersection = exports.definition;
exports.prefix = prefix;
exports.iconName = iconName;
exports.width = width;
exports.height = height;
exports.ligatures = ligatures;
exports.unicode = unicode;
exports.svgPathData = svgPathData;<|fim▁end|>
|
height,
ligatures,
unicode,
|
<|file_name|>animation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! CSS transitions and animations.
// NOTE(emilio): This code isn't really executed in Gecko, but we don't want to
// compile it out so that people remember it exists.
use crate::bezier::Bezier;
use crate::context::{CascadeInputs, SharedStyleContext};
use crate::dom::{OpaqueNode, TDocument, TElement, TNode};
use crate::properties::animated_properties::AnimationValue;
use crate::properties::longhands::animation_direction::computed_value::single_value::T as AnimationDirection;
use crate::properties::longhands::animation_fill_mode::computed_value::single_value::T as AnimationFillMode;
use crate::properties::longhands::animation_play_state::computed_value::single_value::T as AnimationPlayState;
use crate::properties::{
ComputedValues, Importance, LonghandId, LonghandIdSet, PropertyDeclarationBlock,
PropertyDeclarationId,
};
use crate::rule_tree::CascadeLevel;
use crate::style_resolver::StyleResolverForElement;
use crate::stylesheets::keyframes_rule::{KeyframesAnimation, KeyframesStep, KeyframesStepValue};
use crate::values::animated::{Animate, Procedure};
use crate::values::computed::{Time, TimingFunction};
use crate::values::generics::box_::AnimationIterationCount;
use crate::values::generics::easing::{StepPosition, TimingFunction as GenericTimingFunction};
use crate::Atom;
use servo_arc::Arc;
use std::fmt;
/// Represents an animation for a given property.
#[derive(Clone, Debug, MallocSizeOf)]
pub struct PropertyAnimation {
/// The value we are animating from.
from: AnimationValue,
/// The value we are animating to.
to: AnimationValue,
/// The timing function of this `PropertyAnimation`.
timing_function: TimingFunction,
/// The duration of this `PropertyAnimation` in seconds.
pub duration: f64,
}
impl PropertyAnimation {
/// Returns the given property longhand id.
pub fn property_id(&self) -> LonghandId {
debug_assert_eq!(self.from.id(), self.to.id());
self.from.id()
}
fn from_longhand(
longhand: LonghandId,
timing_function: TimingFunction,
duration: Time,
old_style: &ComputedValues,
new_style: &ComputedValues,
) -> Option<PropertyAnimation> {
// FIXME(emilio): Handle the case where old_style and new_style's writing mode differ.
let longhand = longhand.to_physical(new_style.writing_mode);
let from = AnimationValue::from_computed_values(longhand, old_style)?;
let to = AnimationValue::from_computed_values(longhand, new_style)?;
let duration = duration.seconds() as f64;
if from == to || duration == 0.0 {
return None;
}
Some(PropertyAnimation {
from,
to,
timing_function,
duration,
})
}
/// The output of the timing function given the progress ration of this animation.
fn timing_function_output(&self, progress: f64) -> f64 {
let epsilon = 1. / (200. * self.duration);
match self.timing_function {
GenericTimingFunction::CubicBezier { x1, y1, x2, y2 } => {
Bezier::new(x1, y1, x2, y2).solve(progress, epsilon)
},
GenericTimingFunction::Steps(steps, pos) => {
let mut current_step = (progress * (steps as f64)).floor() as i32;
if pos == StepPosition::Start ||
pos == StepPosition::JumpStart ||
pos == StepPosition::JumpBoth
{
current_step = current_step + 1;
}
// FIXME: We should update current_step according to the "before flag".
// In order to get the before flag, we have to know the current animation phase
// and whether the iteration is reversed. For now, we skip this calculation.
// (i.e. Treat before_flag is unset,)
// https://drafts.csswg.org/css-easing/#step-timing-function-algo
if progress >= 0.0 && current_step < 0 {
current_step = 0;
}
let jumps = match pos {
StepPosition::JumpBoth => steps + 1,
StepPosition::JumpNone => steps - 1,
StepPosition::JumpStart |
StepPosition::JumpEnd |
StepPosition::Start |
StepPosition::End => steps,
};
if progress <= 1.0 && current_step > jumps {
current_step = jumps;
}
(current_step as f64) / (jumps as f64)
},
GenericTimingFunction::Keyword(keyword) => {
let (x1, x2, y1, y2) = keyword.to_bezier();
Bezier::new(x1, x2, y1, y2).solve(progress, epsilon)
},
}
}
/// Update the given animation at a given point of progress.
fn update(&self, style: &mut ComputedValues, progress: f64) {
let procedure = Procedure::Interpolate {
progress: self.timing_function_output(progress),
};
if let Ok(new_value) = self.from.animate(&self.to, procedure) {
new_value.set_in_style_for_servo(style);
}
}
}
/// This structure represents the state of an animation.
#[derive(Clone, Debug, MallocSizeOf, PartialEq)]
pub enum AnimationState {
/// The animation has been created, but is not running yet. This state
/// is also used when an animation is still in the first delay phase.
Pending,
/// This animation is currently running.
Running,
/// This animation is paused. The inner field is the percentage of progress
/// when it was paused, from 0 to 1.
Paused(f64),
/// This animation has finished.
Finished,
/// This animation has been canceled.
Canceled,
}
impl AnimationState {
/// Whether or not this state requires its owning animation to be ticked.
fn needs_to_be_ticked(&self) -> bool {
*self == AnimationState::Running || *self == AnimationState::Pending
}
}
/// This structure represents a keyframes animation current iteration state.
///
/// If the iteration count is infinite, there's no other state, otherwise we
/// have to keep track the current iteration and the max iteration count.
#[derive(Clone, Debug, MallocSizeOf)]
pub enum KeyframesIterationState {
/// Infinite iterations with the current iteration count.
Infinite(f64),
/// Current and max iterations.
Finite(f64, f64),
}
/// A temporary data structure used when calculating ComputedKeyframes for an
/// animation. This data structure is used to collapse information for steps
/// which may be spread across multiple keyframe declarations into a single
/// instance per `start_percentage`.
struct IntermediateComputedKeyframe {
declarations: PropertyDeclarationBlock,
timing_function: Option<TimingFunction>,
start_percentage: f32,
}
impl IntermediateComputedKeyframe {
fn new(start_percentage: f32) -> Self {
IntermediateComputedKeyframe {
declarations: PropertyDeclarationBlock::new(),
timing_function: None,
start_percentage,
}
}
/// Walk through all keyframe declarations and combine all declarations with the
/// same `start_percentage` into individual `IntermediateComputedKeyframe`s.
fn generate_for_keyframes(
animation: &KeyframesAnimation,
context: &SharedStyleContext,
base_style: &ComputedValues,
) -> Vec<Self> {
let mut intermediate_steps: Vec<Self> = Vec::with_capacity(animation.steps.len());
let mut current_step = IntermediateComputedKeyframe::new(0.);
for step in animation.steps.iter() {
let start_percentage = step.start_percentage.0;
if start_percentage != current_step.start_percentage {
let new_step = IntermediateComputedKeyframe::new(start_percentage);
intermediate_steps.push(std::mem::replace(&mut current_step, new_step));
}
current_step.update_from_step(step, context, base_style);
}
intermediate_steps.push(current_step);
// We should always have a first and a last step, even if these are just
// generated by KeyframesStepValue::ComputedValues.
debug_assert!(intermediate_steps.first().unwrap().start_percentage == 0.);
debug_assert!(intermediate_steps.last().unwrap().start_percentage == 1.);
intermediate_steps
}
fn update_from_step(
&mut self,
step: &KeyframesStep,
context: &SharedStyleContext,
base_style: &ComputedValues,
) {
// Each keyframe declaration may optionally specify a timing function, falling
// back to the one defined global for the animation.
let guard = &context.guards.author;
if let Some(timing_function) = step.get_animation_timing_function(&guard) {
self.timing_function = Some(timing_function.to_computed_value_without_context());
}
let block = match step.value {
KeyframesStepValue::ComputedValues => return,
KeyframesStepValue::Declarations { ref block } => block,
};
// Filter out !important, non-animatable properties, and the
// 'display' property (which is only animatable from SMIL).
let guard = block.read_with(&guard);
for declaration in guard.normal_declaration_iter() {
if let PropertyDeclarationId::Longhand(id) = declaration.id() {
if id == LonghandId::Display {
continue;
}
if !id.is_animatable() {
continue;
}
}
self.declarations.push(
declaration.to_physical(base_style.writing_mode),
Importance::Normal,
);
}
}
fn resolve_style<E>(
self,
element: E,
context: &SharedStyleContext,
base_style: &Arc<ComputedValues>,
resolver: &mut StyleResolverForElement<E>,
) -> Arc<ComputedValues>
where
E: TElement,
{
if !self.declarations.any_normal() {
return base_style.clone();
}
let document = element.as_node().owner_doc();
let locked_block = Arc::new(document.shared_lock().wrap(self.declarations));
let mut important_rules_changed = false;
let rule_node = base_style.rules().clone();
let new_node = context.stylist.rule_tree().update_rule_at_level(
CascadeLevel::Animations,
Some(locked_block.borrow_arc()),
&rule_node,
&context.guards,
&mut important_rules_changed,
);
if new_node.is_none() {
return base_style.clone();
}
let inputs = CascadeInputs {
rules: new_node,
visited_rules: base_style.visited_rules().cloned(),
};
resolver
.cascade_style_and_visited_with_default_parents(inputs)
.0
}
}
/// A single computed keyframe for a CSS Animation.
#[derive(Clone, MallocSizeOf)]
struct ComputedKeyframe {
/// The timing function to use for transitions between this step
/// and the next one.
timing_function: TimingFunction,
/// The starting percentage (a number between 0 and 1) which represents
/// at what point in an animation iteration this step is.
start_percentage: f32,
/// The animation values to transition to and from when processing this
/// keyframe animation step.
values: Vec<AnimationValue>,
}
impl ComputedKeyframe {
fn generate_for_keyframes<E>(
element: E,
animation: &KeyframesAnimation,
context: &SharedStyleContext,
base_style: &Arc<ComputedValues>,
default_timing_function: TimingFunction,
resolver: &mut StyleResolverForElement<E>,
) -> Vec<Self>
where
E: TElement,
{
let mut animating_properties = LonghandIdSet::new();
for property in animation.properties_changed.iter() {
debug_assert!(property.is_animatable());
animating_properties.insert(property.to_physical(base_style.writing_mode));
}
let animation_values_from_style: Vec<AnimationValue> = animating_properties
.iter()
.map(|property| {
AnimationValue::from_computed_values(property, &**base_style)
.expect("Unexpected non-animatable property.")
})
.collect();
let intermediate_steps =
IntermediateComputedKeyframe::generate_for_keyframes(animation, context, base_style);
let mut computed_steps: Vec<Self> = Vec::with_capacity(intermediate_steps.len());
for (step_index, step) in intermediate_steps.into_iter().enumerate() {
let start_percentage = step.start_percentage;
let timing_function = step.timing_function.unwrap_or(default_timing_function);
let properties_changed_in_step = step.declarations.longhands().clone();
let step_style = step.resolve_style(element, context, base_style, resolver);
let values = {
// If a value is not set in a property declaration we use the value from
// the style for the first and last keyframe. For intermediate ones, we
// use the value from the previous keyframe.
//
// TODO(mrobinson): According to the spec, we should use an interpolated
// value for properties missing from keyframe declarations.
let default_values = if start_percentage == 0. || start_percentage == 1.0 {
&animation_values_from_style
} else {
debug_assert!(step_index != 0);
&computed_steps[step_index - 1].values
};
// For each property that is animating, pull the value from the resolved
// style for this step if it's in one of the declarations. Otherwise, we
// use the default value from the set we calculated above.
animating_properties
.iter()
.zip(default_values.iter())
.map(|(longhand, default_value)| {
if properties_changed_in_step.contains(longhand) {
AnimationValue::from_computed_values(longhand, &step_style)
.unwrap_or_else(|| default_value.clone())
} else {
default_value.clone()
}
})
.collect()
};
computed_steps.push(ComputedKeyframe {
timing_function,
start_percentage,
values,
});
}
computed_steps
}
}
/// A CSS Animation
#[derive(Clone, MallocSizeOf)]
pub struct Animation {
/// The node associated with this animation.
pub node: OpaqueNode,
/// The name of this animation as defined by the style.
pub name: Atom,
/// The properties that change in this animation.
properties_changed: LonghandIdSet,
/// The computed style for each keyframe of this animation.
computed_steps: Vec<ComputedKeyframe>,
/// The time this animation started at, which is the current value of the animation
/// timeline when this animation was created plus any animation delay.
pub started_at: f64,
/// The duration of this animation.
pub duration: f64,
/// The delay of the animation.
pub delay: f64,
/// The `animation-fill-mode` property of this animation.
pub fill_mode: AnimationFillMode,
/// The current iteration state for the animation.
pub iteration_state: KeyframesIterationState,
/// Whether this animation is paused.
pub state: AnimationState,
/// The declared animation direction of this animation.
pub direction: AnimationDirection,
/// The current animation direction. This can only be `normal` or `reverse`.
pub current_direction: AnimationDirection,
/// The original cascade style, needed to compute the generated keyframes of
/// the animation.
#[ignore_malloc_size_of = "ComputedValues"]
pub cascade_style: Arc<ComputedValues>,
/// Whether or not this animation is new and or has already been tracked
/// by the script thread.
pub is_new: bool,
}
impl Animation {
/// Whether or not this animation is cancelled by changes from a new style.
fn is_cancelled_in_new_style(&self, new_style: &Arc<ComputedValues>) -> bool {
let index = new_style
.get_box()
.animation_name_iter()
.position(|animation_name| Some(&self.name) == animation_name.as_atom());
let index = match index {
Some(index) => index,
None => return true,
};
new_style.get_box().animation_duration_mod(index).seconds() == 0.
}
/// Given the current time, advances this animation to the next iteration,
/// updates times, and then toggles the direction if appropriate. Otherwise
/// does nothing. Returns true if this animation has iterated.
pub fn iterate_if_necessary(&mut self, time: f64) -> bool {
if !self.iteration_over(time) {
return false;
}
// Only iterate animations that are currently running.
if self.state != AnimationState::Running {
return false;
}
if let KeyframesIterationState::Finite(ref mut current, max) = self.iteration_state {
// If we are already on the final iteration, just exit now. This prevents
// us from updating the direction, which might be needed for the correct
// handling of animation-fill-mode and also firing animationiteration events
// at the end of animations.
*current = (*current + 1.).min(max);
if *current == max {
return false;
}
}
// Update the next iteration direction if applicable.
// TODO(mrobinson): The duration might now be wrong for floating point iteration counts.
self.started_at += self.duration + self.delay;
match self.direction {
AnimationDirection::Alternate | AnimationDirection::AlternateReverse => {
self.current_direction = match self.current_direction {
AnimationDirection::Normal => AnimationDirection::Reverse,
AnimationDirection::Reverse => AnimationDirection::Normal,
_ => unreachable!(),
};
},
_ => {},
}
true
}
fn iteration_over(&self, time: f64) -> bool {
time > (self.started_at + self.duration)
}
/// Whether or not this animation has finished at the provided time. This does
/// not take into account canceling i.e. when an animation or transition is
/// canceled due to changes in the style.
pub fn has_ended(&self, time: f64) -> bool {
match self.state {
AnimationState::Running => {},
AnimationState::Finished => return true,
AnimationState::Pending | AnimationState::Canceled | AnimationState::Paused(_) => {
return false
},
}
if !self.iteration_over(time) {
return false;
}
// If we have a limited number of iterations and we cannot advance to another
// iteration, then we have ended.
return match self.iteration_state {
KeyframesIterationState::Finite(current, max) => max == current,
KeyframesIterationState::Infinite(..) => false,
};
}
/// Updates the appropiate state from other animation.
///
/// This happens when an animation is re-submitted to layout, presumably
/// because of an state change.
///
/// There are some bits of state we can't just replace, over all taking in
/// account times, so here's that logic.
pub fn update_from_other(&mut self, other: &Self, now: f64) {
use self::AnimationState::*;
debug!(
"KeyframesAnimationState::update_from_other({:?}, {:?})",
self, other
);
// NB: We shall not touch the started_at field, since we don't want to
// restart the animation.
let old_started_at = self.started_at;
let old_duration = self.duration;
let old_direction = self.current_direction;
let old_state = self.state.clone();
let old_iteration_state = self.iteration_state.clone();
*self = other.clone();
self.started_at = old_started_at;
self.current_direction = old_direction;
// Don't update the iteration count, just the iteration limit.
// TODO: see how changing the limit affects rendering in other browsers.
// We might need to keep the iteration count even when it's infinite.
match (&mut self.iteration_state, old_iteration_state) {
(
&mut KeyframesIterationState::Finite(ref mut iters, _),
KeyframesIterationState::Finite(old_iters, _),
) => *iters = old_iters,
_ => {},
}
// Don't pause or restart animations that should remain finished.
// We call mem::replace because `has_ended(...)` looks at `Animation::state`.
let new_state = std::mem::replace(&mut self.state, Running);
if old_state == Finished && self.has_ended(now) {
self.state = Finished;
} else {
self.state = new_state;
}
// If we're unpausing the animation, fake the start time so we seem to
// restore it.
//
// If the animation keeps paused, keep the old value.
//
// If we're pausing the animation, compute the progress value.
match (&mut self.state, &old_state) {
(&mut Pending, &Paused(progress)) => {
self.started_at = now - (self.duration * progress);
},
(&mut Paused(ref mut new), &Paused(old)) => *new = old,
(&mut Paused(ref mut progress), &Running) => {
*progress = (now - old_started_at) / old_duration
},
_ => {},
}
// Try to detect when we should skip straight to the running phase to
// avoid sending multiple animationstart events.
if self.state == Pending && self.started_at <= now && old_state != Pending {
self.state = Running;
}
}
/// Update the given style to reflect the values specified by this `Animation`
/// at the time provided by the given `SharedStyleContext`.
fn update_style(&self, context: &SharedStyleContext, style: &mut Arc<ComputedValues>) {
let duration = self.duration;
let started_at = self.started_at;
let now = match self.state {
AnimationState::Running | AnimationState::Pending | AnimationState::Finished => {
context.current_time_for_animations
},
AnimationState::Paused(progress) => started_at + duration * progress,
AnimationState::Canceled => return,
};
debug_assert!(!self.computed_steps.is_empty());
let mut total_progress = (now - started_at) / duration;
if total_progress < 0. &&
self.fill_mode != AnimationFillMode::Backwards &&
self.fill_mode != AnimationFillMode::Both
{
return;
}
if total_progress > 1. &&
self.fill_mode != AnimationFillMode::Forwards &&
self.fill_mode != AnimationFillMode::Both
{
return;
}
total_progress = total_progress.min(1.0).max(0.0);
// Get the indices of the previous (from) keyframe and the next (to) keyframe.
let next_keyframe_index;
let prev_keyframe_index;
let num_steps = self.computed_steps.len();
debug_assert!(num_steps > 0);
match self.current_direction {
AnimationDirection::Normal => {
next_keyframe_index = self
.computed_steps
.iter()
.position(|step| total_progress as f32 <= step.start_percentage);
prev_keyframe_index = next_keyframe_index
.and_then(|pos| if pos != 0 { Some(pos - 1) } else { None })
.unwrap_or(0);
},
AnimationDirection::Reverse => {
next_keyframe_index = self
.computed_steps
.iter()
.rev()
.position(|step| total_progress as f32 <= 1. - step.start_percentage)
.map(|pos| num_steps - pos - 1);
prev_keyframe_index = next_keyframe_index
.and_then(|pos| {
if pos != num_steps - 1 {
Some(pos + 1)
} else {
None
}
})
.unwrap_or(num_steps - 1)
},
_ => unreachable!(),
}
debug!(
"Animation::update_style: keyframe from {:?} to {:?}",
prev_keyframe_index, next_keyframe_index
);
let prev_keyframe = &self.computed_steps[prev_keyframe_index];
let next_keyframe = match next_keyframe_index {
Some(index) => &self.computed_steps[index],
None => return,
};
let update_with_single_keyframe_style = |style, keyframe: &ComputedKeyframe| {
let mutable_style = Arc::make_mut(style);
for value in keyframe.values.iter() {
value.set_in_style_for_servo(mutable_style);
}
};
if total_progress <= 0.0 {
update_with_single_keyframe_style(style, &prev_keyframe);
return;
}
if total_progress >= 1.0 {
update_with_single_keyframe_style(style, &next_keyframe);
return;
}
let relative_timespan =
(next_keyframe.start_percentage - prev_keyframe.start_percentage).abs();
let relative_duration = relative_timespan as f64 * duration;
let last_keyframe_ended_at = match self.current_direction {
AnimationDirection::Normal => {
self.started_at + (duration * prev_keyframe.start_percentage as f64)
},
AnimationDirection::Reverse => {
self.started_at + (duration * (1. - prev_keyframe.start_percentage as f64))
},
_ => unreachable!(),
};
let relative_progress = (now - last_keyframe_ended_at) / relative_duration;
let mut new_style = (**style).clone();
for (from, to) in prev_keyframe.values.iter().zip(next_keyframe.values.iter()) {
PropertyAnimation {
from: from.clone(),
to: to.clone(),
timing_function: prev_keyframe.timing_function,
duration: relative_duration as f64,
}
.update(&mut new_style, relative_progress);
}
*Arc::make_mut(style) = new_style;
}
}
impl fmt::Debug for Animation {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Animation")
.field("name", &self.name)
.field("started_at", &self.started_at)
.field("duration", &self.duration)
.field("delay", &self.delay)
.field("iteration_state", &self.iteration_state)
.field("state", &self.state)
.field("direction", &self.direction)
.field("current_direction", &self.current_direction)
.field("cascade_style", &())
.finish()
}
}
/// A CSS Transition
#[derive(Clone, Debug, MallocSizeOf)]
pub struct Transition {
/// The node associated with this animation.
pub node: OpaqueNode,
/// The start time of this transition, which is the current value of the animation
/// timeline when this transition was created plus any animation delay.
pub start_time: f64,
/// The delay used for this transition.
pub delay: f64,
/// The internal style `PropertyAnimation` for this transition.
pub property_animation: PropertyAnimation,
/// The state of this transition.
pub state: AnimationState,
/// Whether or not this transition is new and or has already been tracked
/// by the script thread.
pub is_new: bool,
/// If this `Transition` has been replaced by a new one this field is
/// used to help produce better reversed transitions.
pub reversing_adjusted_start_value: AnimationValue,
/// If this `Transition` has been replaced by a new one this field is
/// used to help produce better reversed transitions.
pub reversing_shortening_factor: f64,
}
impl Transition {
fn update_for_possibly_reversed_transition(
&mut self,
replaced_transition: &Transition,
delay: f64,
now: f64,
) {
// If we reach here, we need to calculate a reversed transition according to
// https://drafts.csswg.org/css-transitions/#starting
//
// "...if the reversing-adjusted start value of the running transition
// is the same as the value of the property in the after-change style (see
// the section on reversing of transitions for why these case exists),
// implementations must cancel the running transition and start
// a new transition..."
if replaced_transition.reversing_adjusted_start_value != self.property_animation.to {
return;
}
// "* reversing-adjusted start value is the end value of the running transition"
let replaced_animation = &replaced_transition.property_animation;
self.reversing_adjusted_start_value = replaced_animation.to.clone();
// "* reversing shortening factor is the absolute value, clamped to the
// range [0, 1], of the sum of:
// 1. the output of the timing function of the old transition at the
// time of the style change event, times the reversing shortening
// factor of the old transition
// 2. 1 minus the reversing shortening factor of the old transition."
let transition_progress = replaced_transition.progress(now);
let timing_function_output = replaced_animation.timing_function_output(transition_progress);
let old_reversing_shortening_factor = replaced_transition.reversing_shortening_factor;
self.reversing_shortening_factor = ((timing_function_output *
old_reversing_shortening_factor) +
(1.0 - old_reversing_shortening_factor))
.abs()
.min(1.0)
.max(0.0);
// "* start time is the time of the style change event plus:
// 1. if the matching transition delay is nonnegative, the matching
// transition delay, or.
// 2. if the matching transition delay is negative, the product of the new
// transition’s reversing shortening factor and the matching transition delay,"
self.start_time = if delay >= 0. {
now + delay
} else {
now + (self.reversing_shortening_factor * delay)
};
// "* end time is the start time plus the product of the matching transition
// duration and the new transition’s reversing shortening factor,"
self.property_animation.duration *= self.reversing_shortening_factor;
// "* start value is the current value of the property in the running transition,
// * end value is the value of the property in the after-change style,"
let procedure = Procedure::Interpolate {
progress: timing_function_output,
};
match replaced_animation
.from
.animate(&replaced_animation.to, procedure)
{
Ok(new_start) => self.property_animation.from = new_start,
Err(..) => {},
}
}
/// Whether or not this animation has ended at the provided time. This does
/// not take into account canceling i.e. when an animation or transition is
/// canceled due to changes in the style.
pub fn has_ended(&self, time: f64) -> bool {
time >= self.start_time + (self.property_animation.duration)
}
/// Whether this animation has the same end value as another one.
#[inline]
fn progress(&self, now: f64) -> f64 {
let progress = (now - self.start_time) / (self.property_animation.duration);
progress.min(1.0)
}
/// Update a style to the value specified by this `Transition` given a `SharedStyleContext`.
fn update_style(&self, context: &SharedStyleContext, style: &mut Arc<ComputedValues>) {
// Never apply canceled transitions to a style.
if self.state == AnimationState::Canceled {
return;
}
let progress = self.progress(context.current_time_for_animations);
if progress >= 0.0 {
self.property_animation
.update(Arc::make_mut(style), progress);
}
}
}
/// Holds the animation state for a particular element.
#[derive(Debug, Default, MallocSizeOf)]
pub struct ElementAnimationSet {
/// The animations for this element.
pub animations: Vec<Animation>,
/// The transitions for this element.
pub transitions: Vec<Transition>,
}
impl ElementAnimationSet {
/// Cancel all animations in this `ElementAnimationSet`. This is typically called
/// when the element has been removed from the DOM.
pub fn cancel_all_animations(&mut self) {
for animation in self.animations.iter_mut() {
animation.state = AnimationState::Canceled;
}
for transition in self.transitions.iter_mut() {
transition.state = AnimationState::Canceled;
}
}
pub(crate) fn apply_active_animations(
&mut self,
context: &SharedStyleContext,
style: &mut Arc<ComputedValues>,
) {
for animation in &self.animations {
animation.update_style(context, style);
}
for transition in &self.transitions {
transition.update_style(context, style);
}
}
/// Clear all canceled animations and transitions from this `ElementAnimationSet`.
pub fn clear_canceled_animations(&mut self) {
self.animations
.retain(|animation| animation.state != AnimationState::Canceled);
self.transitions
.retain(|animation| animation.state != AnimationState::Canceled);
}
/// Whether this `ElementAnimationSet` is empty, which means it doesn't
/// hold any animations in any state.
pub fn is_empty(&self) -> bool {
self.animations.is_empty() && self.transitions.is_empty()
}
/// Whether or not this state needs animation ticks for its transitions
/// or animations.
pub fn needs_animation_ticks(&self) -> bool {
self.animations
.iter()
.any(|animation| animation.state.needs_to_be_ticked()) ||
self.transitions
.iter()
.any(|transition| transition.state.needs_to_be_ticked())
}
/// The number of running animations and transitions for this `ElementAnimationSet`.
pub fn running_animation_and_transition_count(&self) -> usize {
self.animations
.iter()
.filter(|animation| animation.state.needs_to_be_ticked())
.count() +
self.transitions
.iter()
.filter(|transition| transition.state.needs_to_be_ticked())
.count()
}
/// If this `ElementAnimationSet` has any any active animations.
pub fn has_active_animation(&self) -> bool {
self.animations
.iter()
.any(|animation| animation.state != AnimationState::Canceled)
}
/// If this `ElementAnimationSet` has any any active transitions.
pub fn has_active_transition(&self) -> bool {
self.transitions
.iter()
.any(|transition| transition.state != AnimationState::Canceled)
}
/// Update our animations given a new style, canceling or starting new animations
/// when appropriate.
pub fn update_animations_for_new_style<E>(
&mut self,
element: E,
context: &SharedStyleContext,
new_style: &Arc<ComputedValues>,
resolver: &mut StyleResolverForElement<E>,
) where
E: TElement,
{
for animation in self.animations.iter_mut() {
if animation.is_cancelled_in_new_style(new_style) {
animation.state = AnimationState::Canceled;
}
}
maybe_start_animations(element, &context, &new_style, self, resolver);
}
/// Update our transitions given a new style, canceling or starting new animations
/// when appropriate.
pub fn update_transitions_for_new_style(
&mut self,
context: &SharedStyleContext,
opaque_node: OpaqueNode,
old_style: Option<&Arc<ComputedValues>>,
after_change_style: &Arc<ComputedValues>,
) {
// If this is the first style, we don't trigger any transitions and we assume
// there were no previously triggered transitions.
let mut before_change_style = match old_style {
Some(old_style) => Arc::clone(old_style),
None => return,
};
// We convert old values into `before-change-style` here.
// See https://drafts.csswg.org/css-transitions/#starting. We need to clone the
// style because this might still be a reference to the original `old_style` and
// we want to preserve that so that we can later properly calculate restyle damage.
if self.has_active_transition() || self.has_active_animation() {
before_change_style = before_change_style.clone();
self.apply_active_animations(context, &mut before_change_style);
}
let transitioning_properties = start_transitions_if_applicable(
context,
opaque_node,<|fim▁hole|> );
// Cancel any non-finished transitions that have properties which no longer transition.
for transition in self.transitions.iter_mut() {
if transition.state == AnimationState::Finished {
continue;
}
if transitioning_properties.contains(transition.property_animation.property_id()) {
continue;
}
transition.state = AnimationState::Canceled;
}
}
fn start_transition_if_applicable(
&mut self,
context: &SharedStyleContext,
opaque_node: OpaqueNode,
longhand_id: LonghandId,
index: usize,
old_style: &ComputedValues,
new_style: &Arc<ComputedValues>,
) {
let box_style = new_style.get_box();
let timing_function = box_style.transition_timing_function_mod(index);
let duration = box_style.transition_duration_mod(index);
let delay = box_style.transition_delay_mod(index).seconds() as f64;
let now = context.current_time_for_animations;
// Only start a new transition if the style actually changes between
// the old style and the new style.
let property_animation = match PropertyAnimation::from_longhand(
longhand_id,
timing_function,
duration,
old_style,
new_style,
) {
Some(property_animation) => property_animation,
None => return,
};
// Per [1], don't trigger a new transition if the end state for that
// transition is the same as that of a transition that's running or
// completed. We don't take into account any canceled animations.
// [1]: https://drafts.csswg.org/css-transitions/#starting
if self
.transitions
.iter()
.filter(|transition| transition.state != AnimationState::Canceled)
.any(|transition| transition.property_animation.to == property_animation.to)
{
return;
}
// We are going to start a new transition, but we might have to update
// it if we are replacing a reversed transition.
let reversing_adjusted_start_value = property_animation.from.clone();
let mut new_transition = Transition {
node: opaque_node,
start_time: now + delay,
delay,
property_animation,
state: AnimationState::Pending,
is_new: true,
reversing_adjusted_start_value,
reversing_shortening_factor: 1.0,
};
if let Some(old_transition) = self
.transitions
.iter_mut()
.filter(|transition| transition.state == AnimationState::Running)
.find(|transition| transition.property_animation.property_id() == longhand_id)
{
// We always cancel any running transitions for the same property.
old_transition.state = AnimationState::Canceled;
new_transition.update_for_possibly_reversed_transition(old_transition, delay, now);
}
self.transitions.push(new_transition);
}
}
/// Kick off any new transitions for this node and return all of the properties that are
/// transitioning. This is at the end of calculating style for a single node.
pub fn start_transitions_if_applicable(
context: &SharedStyleContext,
opaque_node: OpaqueNode,
old_style: &ComputedValues,
new_style: &Arc<ComputedValues>,
animation_state: &mut ElementAnimationSet,
) -> LonghandIdSet {
// If the style of this element is display:none, then we don't start any transitions
// and we cancel any currently running transitions by returning an empty LonghandIdSet.
let box_style = new_style.get_box();
if box_style.clone_display().is_none() {
return LonghandIdSet::new();
}
let mut properties_that_transition = LonghandIdSet::new();
for transition in new_style.transition_properties() {
let physical_property = transition.longhand_id.to_physical(new_style.writing_mode);
if properties_that_transition.contains(physical_property) {
continue;
}
properties_that_transition.insert(physical_property);
animation_state.start_transition_if_applicable(
context,
opaque_node,
physical_property,
transition.index,
old_style,
new_style,
);
}
properties_that_transition
}
/// Triggers animations for a given node looking at the animation property
/// values.
pub fn maybe_start_animations<E>(
element: E,
context: &SharedStyleContext,
new_style: &Arc<ComputedValues>,
animation_state: &mut ElementAnimationSet,
resolver: &mut StyleResolverForElement<E>,
) where
E: TElement,
{
let box_style = new_style.get_box();
for (i, name) in box_style.animation_name_iter().enumerate() {
let name = match name.as_atom() {
Some(atom) => atom,
None => continue,
};
debug!("maybe_start_animations: name={}", name);
let duration = box_style.animation_duration_mod(i).seconds();
if duration == 0. {
continue;
}
let keyframe_animation = match context.stylist.get_animation(name, element) {
Some(animation) => animation,
None => continue,
};
debug!("maybe_start_animations: animation {} found", name);
// If this animation doesn't have any keyframe, we can just continue
// without submitting it to the compositor, since both the first and
// the second keyframes would be synthetised from the computed
// values.
if keyframe_animation.steps.is_empty() {
continue;
}
let delay = box_style.animation_delay_mod(i).seconds();
let animation_start = context.current_time_for_animations + delay as f64;
let iteration_state = match box_style.animation_iteration_count_mod(i) {
AnimationIterationCount::Infinite => KeyframesIterationState::Infinite(0.0),
AnimationIterationCount::Number(n) => KeyframesIterationState::Finite(0.0, n.into()),
};
let animation_direction = box_style.animation_direction_mod(i);
let initial_direction = match animation_direction {
AnimationDirection::Normal | AnimationDirection::Alternate => {
AnimationDirection::Normal
},
AnimationDirection::Reverse | AnimationDirection::AlternateReverse => {
AnimationDirection::Reverse
},
};
let state = match box_style.animation_play_state_mod(i) {
AnimationPlayState::Paused => AnimationState::Paused(0.),
AnimationPlayState::Running => AnimationState::Pending,
};
let computed_steps = ComputedKeyframe::generate_for_keyframes(
element,
&keyframe_animation,
context,
new_style,
new_style.get_box().animation_timing_function_mod(i),
resolver,
);
let new_animation = Animation {
node: element.as_node().opaque(),
name: name.clone(),
properties_changed: keyframe_animation.properties_changed,
computed_steps,
started_at: animation_start,
duration: duration as f64,
fill_mode: box_style.animation_fill_mode_mod(i),
delay: delay as f64,
iteration_state,
state,
direction: animation_direction,
current_direction: initial_direction,
cascade_style: new_style.clone(),
is_new: true,
};
// If the animation was already present in the list for the node, just update its state.
for existing_animation in animation_state.animations.iter_mut() {
if existing_animation.state == AnimationState::Canceled {
continue;
}
if new_animation.name == existing_animation.name {
existing_animation
.update_from_other(&new_animation, context.current_time_for_animations);
return;
}
}
animation_state.animations.push(new_animation);
}
}<|fim▁end|>
|
&before_change_style,
after_change_style,
self,
|
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package udp
import (
"errors"
"log"
"net"
"strings"
"sync"
"github.com/influxdata/influxdb/models"
"github.com/influxdata/kapacitor"
"github.com/influxdata/kapacitor/expvar"
)
const (
UDPPacketSize = 65536
)
// statistics gathered by the UDP package.
const (
statPointsReceived = "points_rx"
statBytesReceived = "bytes_rx"
statPointsParseFail = "points_parse_fail"
statReadFail = "read_fail"
statPointsTransmitted = "points_tx"
statTransmitFail = "tx_fail"
)
//
// Service represents here an UDP service
// that will listen for incoming packets
// formatted with the inline protocol
//
type Service struct {
conn *net.UDPConn
addr *net.UDPAddr
wg sync.WaitGroup
done chan struct{}
packets chan []byte
config Config
PointsWriter interface {
WritePoints(database, retentionPolicy string, consistencyLevel models.ConsistencyLevel, points []models.Point) error
}
Logger *log.Logger
statMap *expvar.Map
statKey string
}
func NewService(c Config, l *log.Logger) *Service {
d := *c.WithDefaults()
return &Service{
config: d,
done: make(chan struct{}),
Logger: l,
}
}
func (s *Service) Open() (err error) {
if s.config.BindAddress == "" {
return errors.New("bind address has to be specified in config")
}
if s.config.Database == "" {
return errors.New("database has to be specified in config")
}
s.addr, err = net.ResolveUDPAddr("udp", s.config.BindAddress)
if err != nil {
s.Logger.Printf("E! Failed to resolve UDP address %s: %s", s.config.BindAddress, err)
return err
}
s.conn, err = net.ListenUDP("udp", s.addr)
if err != nil {
s.Logger.Printf("E! Failed to set up UDP listener at address %s: %s", s.addr, err)
return err
}
//save fully resolved and bound addr. Useful if port given was '0'.
s.addr = s.conn.LocalAddr().(*net.UDPAddr)<|fim▁hole|> tags := map[string]string{"bind": s.addr.String()}
s.statKey, s.statMap = kapacitor.NewStatistics("udp", tags)
if s.config.ReadBuffer != 0 {
err = s.conn.SetReadBuffer(s.config.ReadBuffer)
if err != nil {
s.Logger.Printf("E! Failed to set UDP read buffer to %d: %s", s.config.ReadBuffer, err)
return err
}
}
s.Logger.Printf("I! Started listening on UDP: %s", s.addr.String())
// Start reading and processing packets
s.packets = make(chan []byte, s.config.Buffer)
s.wg.Add(1)
go s.serve()
s.wg.Add(1)
go s.processPackets()
return nil
}
func (s *Service) serve() {
defer s.wg.Done()
defer close(s.packets)
buf := make([]byte, UDPPacketSize)
for {
select {
case <-s.done:
// We closed the connection, time to go.
return
default:
// Keep processing.
}
n, _, err := s.conn.ReadFromUDP(buf)
if err != nil {
if !strings.Contains(err.Error(), "use of closed network connection") {
s.statMap.Add(statReadFail, 1)
s.Logger.Printf("E! Failed to read UDP message: %s", err)
}
continue
}
s.statMap.Add(statBytesReceived, int64(n))
p := make([]byte, n)
copy(p, buf[:n])
s.packets <- p
}
}
func (s *Service) processPackets() {
defer s.wg.Done()
for p := range s.packets {
points, err := models.ParsePoints(p)
if err != nil {
s.statMap.Add(statPointsParseFail, 1)
s.Logger.Printf("E! Failed to parse points: %s", err)
continue
}
if err := s.PointsWriter.WritePoints(
s.config.Database,
s.config.RetentionPolicy,
models.ConsistencyLevelAll,
points,
); err == nil {
s.statMap.Add(statPointsTransmitted, int64(len(points)))
} else {
s.Logger.Printf("E! failed to write points to database %q: %s", s.config.Database, err)
s.statMap.Add(statTransmitFail, 1)
}
s.statMap.Add(statPointsReceived, int64(len(points)))
}
}
func (s *Service) Close() error {
if s.conn == nil {
return errors.New("Service already closed")
}
kapacitor.DeleteStatistics(s.statKey)
close(s.done)
s.conn.Close()
s.wg.Wait()
// Release all remaining resources.
s.done = nil
s.conn = nil
s.packets = nil
s.Logger.Print("I! Service closed")
return nil
}
func (s *Service) Addr() *net.UDPAddr {
return s.addr
}<|fim▁end|>
|
// Configure expvar monitoring. It's OK to do this even if the service fails to open and
// should be done before any data could arrive for the service.
|
<|file_name|>IterableComponentTypeMacro.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.template.macro;
import com.intellij.codeInsight.CodeInsightBundle;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.template.*;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.util.TypeConversionUtil;
import org.jetbrains.annotations.NotNull;
/**
* @author ven
*/
public class IterableComponentTypeMacro implements Macro {
public String getName() {
return "iterableComponentType";
}
public String getDescription() {
return CodeInsightBundle.message("macro.iterable.component.type");
}
public String getDefaultValue() {
return "a";
}
public Result calculateResult(@NotNull Expression[] params, ExpressionContext context) {
if (params.length != 1) return null;
final Result result = params[0].calculateResult(context);
if (result == null) return null;
Project project = context.getProject();
PsiDocumentManager.getInstance(project).commitAllDocuments();
PsiExpression expr = MacroUtil.resultToPsiExpression(result, context);
if (expr == null) return null;
PsiType type = expr.getType();
if (type instanceof PsiArrayType) {
return new PsiTypeResult(((PsiArrayType)type).getComponentType(), project);
}
if (type instanceof PsiClassType) {
PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics();
PsiClass aClass = resolveResult.getElement();
if (aClass != null) {
PsiClass iterableClass = JavaPsiFacade.getInstance(project).findClass("java.lang.Iterable", aClass.getResolveScope());
if (iterableClass != null) {
PsiSubstitutor substitutor = TypeConversionUtil.getClassSubstitutor(iterableClass, aClass, resolveResult.getSubstitutor());
if (substitutor != null) {
PsiType parameterType = substitutor.substitute(iterableClass.getTypeParameters()[0]);
if (parameterType instanceof PsiCapturedWildcardType) {
parameterType = ((PsiCapturedWildcardType)parameterType).getWildcard();
}
if (parameterType != null) {
if (parameterType instanceof PsiWildcardType) {
if (((PsiWildcardType)parameterType).isExtends()) {
return new PsiTypeResult(((PsiWildcardType)parameterType).getBound(), project);
}
else return null;<|fim▁hole|> }
}
}
}
}
return null;
}
public Result calculateQuickResult(@NotNull Expression[] params, ExpressionContext context) {
return calculateResult(params, context);
}
public LookupElement[] calculateLookupItems(@NotNull Expression[] params, ExpressionContext context) {
return LookupElement.EMPTY_ARRAY;
}
}<|fim▁end|>
|
}
return new PsiTypeResult(parameterType, project);
|
<|file_name|>log.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"io"
"log"
"net/http"
"os"
"time"
"github.com/elazarl/goproxy"
)
const (
AppendLog int = iota
ReopenLog int = iota
)
var (
emptyResp = &http.Response{}
emptyReq = &http.Request{}
)
type LogData struct {
action int
req *http.Request
resp *http.Response
user string
err error
time time.Time
}
type ProxyLogger struct {
path string
logChannel chan *LogData
errorChannel chan error
}
func fprintf(nr *int64, err *error, w io.Writer, pat string, a ...interface{}) {
if *err != nil {
return
}
var n int
n, *err = fmt.Fprintf(w, pat, a...)
*nr += int64(n)
}
func getAuthenticatedUserName(ctx *goproxy.ProxyCtx) string {
user, ok := ctx.UserData.(string)
if !ok {
user = "-"
}
return user
}
func (m *LogData) writeTo(w io.Writer) (nr int64, err error) {
if m.resp != nil {
if m.resp.Request != nil {
fprintf(&nr, &err, w,
"%v %v %v %v %v %v %v\n",
m.time.Format(time.RFC3339),
m.resp.Request.RemoteAddr,
m.resp.Request.Method,
m.resp.Request.URL,
m.resp.StatusCode,
m.resp.ContentLength,
m.user)
} else {
fprintf(&nr, &err, w,
"%v %v %v %v %v %v %v\n",
m.time.Format(time.RFC3339),
"-",
"-",
"-",
m.resp.StatusCode,
m.resp.ContentLength,
m.user)
}
} else if m.req != nil {
fprintf(&nr, &err, w,
"%v %v %v %v %v %v %v\n",
m.time.Format(time.RFC3339),
m.req.RemoteAddr,
m.req.Method,
m.req.URL,
"-",
"-",
m.user)
}
return
}
func newProxyLogger(conf *Configuration) *ProxyLogger {
var fh *os.File
if conf.AccessLog != "" {
var err error
fh, err = os.OpenFile(conf.AccessLog, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o600)
if err != nil {
log.Fatalf("Couldn't open log file: %v", err)
}
}<|fim▁hole|>
logger := &ProxyLogger{
path: conf.AccessLog,
logChannel: make(chan *LogData),
errorChannel: make(chan error),
}
go func() {
for m := range logger.logChannel {
if fh != nil {
switch m.action {
case AppendLog:
if _, err := m.writeTo(fh); err != nil {
log.Println("Can't write meta", err)
}
case ReopenLog:
err := fh.Close()
if err != nil {
log.Fatal(err)
}
fh, err = os.OpenFile(conf.AccessLog, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o600)
if err != nil {
log.Fatalf("Couldn't reopen log file: %v", err)
}
}
}
}
logger.errorChannel <- fh.Close()
}()
return logger
}
func (logger *ProxyLogger) logResponse(resp *http.Response, ctx *goproxy.ProxyCtx) {
if resp == nil {
resp = emptyResp
}
logger.writeLogEntry(&LogData{
action: AppendLog,
resp: resp,
user: getAuthenticatedUserName(ctx),
err: ctx.Error,
time: time.Now(),
})
}
func (logger *ProxyLogger) writeLogEntry(data *LogData) {
logger.logChannel <- data
}
func (logger *ProxyLogger) log(ctx *goproxy.ProxyCtx) {
data := &LogData{
action: AppendLog,
req: ctx.Req,
resp: ctx.Resp,
user: getAuthenticatedUserName(ctx),
err: ctx.Error,
time: time.Now(),
}
logger.writeLogEntry(data)
}
func (logger *ProxyLogger) close() error {
close(logger.logChannel)
return <-logger.errorChannel
}
func (logger *ProxyLogger) reopen() {
logger.writeLogEntry(&LogData{action: ReopenLog})
}<|fim▁end|>
| |
<|file_name|>demo_motion_correction_nonrigid.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 21 15:53:15 2016
@author: agiovann
"""
from __future__ import division
from __future__ import print_function
#%%
from builtins import zip
from builtins import str
from builtins import map
from builtins import range
from past.utils import old_div
import cv2
try:
cv2.setNumThreads(1)
except:
print('Open CV is naturally single threaded')
try:
if __IPYTHON__:
print((1))
# this is used for debugging purposes only. allows to reload classes when changed
get_ipython().magic('load_ext autoreload')
get_ipython().magic('autoreload 2')
except NameError:
print('Not launched under iPython')
import caiman as cm
import numpy as np
import time
import pylab as pl
import psutil
import sys
import os
from ipyparallel import Client
from skimage.external.tifffile import TiffFile
# , motion_correction_piecewise
from caiman.motion_correction import tile_and_correct
#%% in parallel
def tile_and_correct_wrapper(params):
from skimage.external.tifffile import imread
import numpy as np
import cv2
try:
cv2.setNumThreads(1)
except:
1 # 'Open CV is naturally single threaded'
from caiman.motion_correction import tile_and_correct
img_name, out_fname, idxs, shape_mov, template, strides, overlaps, max_shifts,\
add_to_movie, max_deviation_rigid, upsample_factor_grid, newoverlaps, newstrides, shifts_opencv = params
imgs = imread(img_name, key=idxs)
mc = np.zeros(imgs.shape, dtype=np.float32)
shift_info = []
for count, img in enumerate(imgs):
if count % 10 == 0:
print(count)
mc[count], total_shift, start_step, xy_grid = tile_and_correct(img, template, strides, overlaps, max_shifts, add_to_movie=add_to_movie, newoverlaps=newoverlaps, newstrides=newstrides,
upsample_factor_grid=upsample_factor_grid, upsample_factor_fft=10, show_movie=False, max_deviation_rigid=max_deviation_rigid, shifts_opencv=shifts_opencv)
shift_info.append([total_shift, start_step, xy_grid])
if out_fname is not None:
outv = np.memmap(out_fname, mode='r+', dtype=np.float32,
shape=shape_mov, order='F')
outv[:, idxs] = np.reshape(
mc.astype(np.float32), (len(imgs), -1), order='F').T
return shift_info, idxs, np.nanmean(mc, 0)
#%%
def motion_correction_piecewise(fname, splits, strides, overlaps, add_to_movie=0, template=None, max_shifts=(12, 12), max_deviation_rigid=3, newoverlaps=None, newstrides=None,
upsample_factor_grid=4, order='F', dview=None, save_movie=True, base_name='none', num_splits=None, shifts_opencv=False):
'''
'''
with TiffFile(fname) as tf:
d1, d2 = tf[0].shape
T = len(tf)
if type(splits) is int:
idxs = np.array_split(list(range(T)), splits)
else:
idxs = splits
save_movie = False
if template is None:
raise Exception('Not implemented')
shape_mov = (d1 * d2, T)
dims = d1, d2
if num_splits is not None:
idxs = np.array(idxs)[np.random.randint(0, len(idxs), num_splits)]
save_movie = False
print('**** MOVIE NOT SAVED BECAUSE num_splits is not None ****')
if save_movie:
if base_name is None:
base_name = fname[:-4]
fname_tot = base_name + '_d1_' + str(dims[0]) + '_d2_' + str(dims[1]) + '_d3_' + str(
1 if len(dims) == 2 else dims[2]) + '_order_' + str(order) + '_frames_' + str(T) + '_.mmap'
fname_tot = os.path.join(os.path.split(fname)[0], fname_tot)
np.memmap(fname_tot, mode='w+', dtype=np.float32,
shape=shape_mov, order=order)
else:
fname_tot = None
pars = []
for idx in idxs:
pars.append([fname, fname_tot, idx, shape_mov, template, strides, overlaps, max_shifts, np.array(
add_to_movie, dtype=np.float32), max_deviation_rigid, upsample_factor_grid, newoverlaps, newstrides, shifts_opencv])
t1 = time.time()
if dview is not None:
res = dview.map_sync(tile_and_correct_wrapper, pars)
else:
res = list(map(tile_and_correct_wrapper, pars))
print((time.time() - t1))
return fname_tot, res
#%%
# backend='SLURM'
backend = 'local'
if backend == 'SLURM':
n_processes = np.int(os.environ.get('SLURM_NPROCS'))
else:
# roughly number of cores on your machine minus 1
n_processes = np.maximum(np.int(psutil.cpu_count()), 1)
print(('using ' + str(n_processes) + ' processes'))
#%% start cluster for efficient computation
single_thread = False
if single_thread:
dview = None
else:
try:
c.close()
except:
print('C was not existing, creating one')
print("Stopping cluster to avoid unnencessary use of memory....")
sys.stdout.flush()
if backend == 'SLURM':
try:
cm.stop_server(is_slurm=True)
except:
print('Nothing to stop')
slurm_script = '/mnt/xfs1/home/agiovann/SOFTWARE/Constrained_NMF/SLURM/slurmStart.sh'
cm.start_server(slurm_script=slurm_script)
pdir, profile = os.environ['IPPPDIR'], os.environ['IPPPROFILE']
c = Client(ipython_dir=pdir, profile=profile)
else:
cm.stop_server()
cm.start_server()
c = Client()
print(('Using ' + str(len(c)) + ' processes'))
dview = c[:len(c)]
#%% set parameters and create template by rigid motion correction
t1 = time.time()
#fname = 'k56_20160608_RSM_125um_41mW_zoom2p2_00001_00034.tif'
#fname = 'Sue_1000.tif'
fname = 'Sue_2000.tif'
max_shifts = (12, 12)
# splits = 56 # for parallelization split the movies in num_splits chuncks across time
#num_splits_to_process = 28
#fname = 'M_FLUO_t_1000.tif'
#max_shifts = (10,10)
splits = 56 # for parallelization split the movies in num_splits chuncks across time
num_splits_to_process = 28
#fname = 'M_FLUO_4.tif'
m = cm.load(fname, subindices=slice(0, 500, None))
template = cm.motion_correction.bin_median(m[100:400].copy().motion_correct(
max_shifts[0], max_shifts[1], template=None)[0])
print(time.time() - t1)
#%
# pl.imshow(template)
#%
shifts_opencv = False
new_templ = template
add_to_movie = -np.min(template)
save_movie = False
num_iter = 1
for iter_ in range(num_iter):
print(iter_)
old_templ = new_templ.copy()
if iter_ == num_iter - 1:
save_movie = True
print('saving!')
num_splits_to_process = None
# templ_to_save = old_templ
fname_tot, res = motion_correction_piecewise(fname, splits, None, None,
add_to_movie=add_to_movie, template=old_templ, max_shifts=max_shifts, max_deviation_rigid=0,
newoverlaps=None, newstrides=None,
upsample_factor_grid=4, order='F', dview=dview, save_movie=save_movie, base_name=fname[:-4] + '_rig_', num_splits=num_splits_to_process, shifts_opencv=shifts_opencv)
new_templ = np.nanmedian(np.dstack([r[-1] for r in res]), -1)
print((old_div(np.linalg.norm(new_templ - old_templ), np.linalg.norm(old_templ))))
t2 = time.time() - t1
print(t2)
pl.imshow(new_templ, cmap='gray', vmax=np.percentile(new_templ, 95))
#%%
import scipy
np.save(fname[:-4] + '_templ_rigid.npy', new_templ)
#scipy.io.savemat('/mnt/xfs1/home/agiovann/dropbox/Python_progress/' + str(np.shape(m)[-1])+'_templ_rigid.mat',{'template':new_templ})
#%%
template = new_templ
#%%
mr = cm.load(fname_tot)
#%% online does not seem to work!
#overlaps = (16,16)
# if template.shape == (512,512):
# strides = (128,128)# 512 512
# #strides = (48,48)# 128 64
# elif template.shape == (64,128):
# strides = (48,48)# 512 512
# else:
# raise Exception('Unknown size, set manually')
#upsample_factor_grid = 4
#
#T = m.shape[0]
#idxs_outer = np.array_split(range(T),T/1000)
# for iddx in idxs_outer:
# num_fr = len(iddx)
# splits = np.array_split(iddx,num_fr/n_processes)
# print (splits[0][0]),(splits[-1][-1])
# fname_tot, res = motion_correction_piecewise(fname,splits, strides, overlaps,\
# add_to_movie=add_to_movie, template = template, max_shifts = (12,12),max_deviation_rigid = 3,\
# upsample_factor_grid = upsample_factor_grid,dview = dview)
#%%
# for 512 512 this seems good
t1 = time.time()
if template.shape == (512, 512):
strides = (128, 128) # 512 512
overlaps = (32, 32)
# strides = (16,16)# 512 512
newoverlaps = None
newstrides = None
# strides = (48,48)# 128 64
elif template.shape == (64, 128):
strides = (32, 32)
overlaps = (16, 16)
newoverlaps = None
newstrides = None
else:
raise Exception('Unknown size, set manually')
splits = 56
num_splits_to_process = 28
upsample_factor_grid = 4
max_deviation_rigid = 3
new_templ = template
add_to_movie = -np.min(m)
num_iter = 2
save_movie = False
for iter_ in range(num_iter):
print(iter_)
old_templ = new_templ.copy()
if iter_ == num_iter - 1:
save_movie = True
num_splits_to_process = None
print('saving!')
fname_tot, res = motion_correction_piecewise(fname, splits, strides, overlaps,
add_to_movie=add_to_movie, template=old_templ, max_shifts=max_shifts, max_deviation_rigid=max_deviation_rigid,
newoverlaps=newoverlaps, newstrides=newstrides,
upsample_factor_grid=upsample_factor_grid, order='F', dview=dview, save_movie=save_movie, base_name=fname[:-4] + '_els_opencv_', num_splits=num_splits_to_process, shifts_opencv=shifts_opencv)
new_templ = np.nanmedian(np.dstack([r[-1] for r in res]), -1)
# print((old_div(np.linalg.norm(new_templ-old_templ),np.linalg.norm(old_templ))))
# pl.imshow(new_templ,cmap = 'gray',vmax = np.percentile(new_templ,99))
# pl.pause(.1)
t2 = time.time() - t1
print(t2)
mc = cm.load(fname_tot)
#%%
pl.imshow(new_templ, cmap='gray', vmax=np.percentile(new_templ, 95))
#%%
np.save(fname[:-4] + '_templ_pw_rigid.npy', new_templ)
#scipy.io.savemat('/mnt/xfs1/home/agiovann/dropbox/Python_progress/' + str(np.shape(m)[-1])+'_templ_pw_rigid.mat',{'template':templ_to_save})
#%%
#%%
def compute_metrics_motion_correction(fname, final_size_x, final_size_y, swap_dim, pyr_scale=.5, levels=3, winsize=100, iterations=15, poly_n=5, poly_sigma=1.2 / 5, flags=0,
play_flow=False, resize_fact_flow=.2, template=None):
# cv2.OPTFLOW_FARNEBACK_GAUSSIAN
import scipy
vmin, vmax = -1, 1
m = cm.load(fname)
max_shft_x = np.int(np.ceil((np.shape(m)[1] - final_size_x) / 2))
max_shft_y = np.int(np.ceil((np.shape(m)[2] - final_size_y) / 2))
max_shft_x_1 = - ((np.shape(m)[1] - max_shft_x) - (final_size_x))
max_shft_y_1 = - ((np.shape(m)[2] - max_shft_y) - (final_size_y))
if max_shft_x_1 == 0:
max_shft_x_1 = None
if max_shft_y_1 == 0:
max_shft_y_1 = None
# print ([max_shft_x,max_shft_x_1,max_shft_y,max_shft_y_1])
m = m[:, max_shft_x:max_shft_x_1, max_shft_y:max_shft_y_1]
print('Local correlations..')
img_corr = m.local_correlations(eight_neighbours=True, swap_dim=swap_dim)
print(m.shape)
if template is None:
tmpl = cm.motion_correction.bin_median(m)
else:
tmpl = template
# tmpl = tmpl[max_shft_x:-max_shft_x,max_shft_y:-max_shft_y]
print('Compute Smoothness.. ')
smoothness = np.sqrt(
np.sum(np.sum(np.array(np.gradient(np.mean(m, 0)))**2, 0)))
smoothness_corr = np.sqrt(
np.sum(np.sum(np.array(np.gradient(img_corr))**2, 0)))
print('Compute correlations.. ')
correlations = []
count = 0
for fr in m:
if count % 100 == 0:
print(count)
count += 1
correlations.append(scipy.stats.pearsonr(
fr.flatten(), tmpl.flatten())[0])
print('Compute optical flow .. ')
m = m.resize(1, 1, resize_fact_flow)
norms = []
flows = []
count = 0
for fr in m:
if count % 100 == 0:
print(count)
count += 1
flow = cv2.calcOpticalFlowFarneback(
tmpl, fr, None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
if play_flow:
pl.subplot(1, 3, 1)
pl.cla()
pl.imshow(fr, vmin=0, vmax=300, cmap='gray')
pl.title('movie')
pl.subplot(1, 3, 3)
pl.cla()
pl.imshow(flow[:, :, 1], vmin=vmin, vmax=vmax)
pl.title('y_flow')
pl.subplot(1, 3, 2)
pl.cla()
pl.imshow(flow[:, :, 0], vmin=vmin, vmax=vmax)
pl.title('x_flow')
pl.pause(.05)
n = np.linalg.norm(flow)
flows.append(flow)
norms.append(n)
np.savez(fname[:-4] + '_metrics', flows=flows, norms=norms, correlations=correlations,
smoothness=smoothness, tmpl=tmpl, smoothness_corr=smoothness_corr, img_corr=img_corr)
return tmpl, correlations, flows, norms, smoothness
#%% run comparisons MLK
m_res = glob.glob('MKL*hdf5')
final_size = (512 - 24, 512 - 24)
winsize = 100
swap_dim = False
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], swap_dim, winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons NORMCORRE
m_fluos = glob.glob('M_FLUO*.mmap') + glob.glob('M_FLUO*.tif')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue*mmap') + glob.glob('Sue*.tif')
final_size = (512 - 24, 512 - 24)
winsize = 100
swap_dim = False
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], swap_dim, winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons SIMA
m_fluos = glob.glob('plane*.tif') + glob.glob('row*.tif')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue*.tif')
final_size = (512 - 24, 512 - 24)
winsize = 100
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% run comparisons SUITE2P
for mvs in glob.glob('Sue*2000*16*.mat'):
print(mvs)
cm.movie(scipy.io.loadmat(mvs)['data'].transpose(
[2, 0, 1])).save(mvs[:-3] + '.hdf5')
#%%
m_fluos = glob.glob('M_FLUO*.hdf5')
final_size = (64 - 20, 128 - 20)
winsize = 32
resize_fact_flow = 1
for mv in m_fluos:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#% run comparisons resonant
m_res = glob.glob('Sue_2000*16*.hdf5')
final_size = (512 - 24, 512 - 24)
winsize = 100
resize_fact_flow = .2
for mv in m_res:
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
mv, final_size[0], final_size[1], winsize=winsize, play_flow=False, resize_fact_flow=resize_fact_flow)
#%% plot the results
files_img = [u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/NORM_CORRE_OPENCV/Sue_2000_els_opencv__d1_512_d2_512_d3_1_order_F_frames_2000_._metrics.npz',
u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/NORMCORRE_EFF/Sue_2000_els__d1_512_d2_512_d3_1_order_F_frames_2000_._metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/MLK/Sue_2000_MLK_metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/SIMA_RESULTS/Sue_1000_T.tifrow1_example_sima_Trow1_example_sima_metrics.npz',
# u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/SUITE_2P_RES/Sue_2000_t_NB_16.._metrics.npz',
u'/mnt/xfs1/home/agiovann/DataForPublications/Piecewise-Rigid-Analysis-paper/MLK/MKL16T._metrics.npz']
# for fl in glob.glob('*.npz'):
for fl in files_img:
with np.load(fl) as ld:
print(ld.keys())
pl.figure()
print(fl + ':' + str(np.mean(ld['norms'])) + '+/-' + str(np.std(ld['norms'])) + ' ; ' + str(np.mean(ld['correlations'])
) + '+/-' + str(np.std(ld['correlations'])) + ' ; ' + str(ld['smoothness']) + ' ; ' + str(ld['smoothness_corr']))
pl.subplot(1, 2, 1)
try:
mean_img = np.mean(cm.load(fl[:-12] + 'mmap'), 0)[12:-12, 12:-12]
except:
try:
mean_img = np.mean(
cm.load(fl[:-12] + '.tif'), 0)[12:-12, 12:-12]
except:
mean_img = np.mean(
cm.load(fl[:-12] + 'hdf5'), 0)[12:-12, 12:-12]
# lq,hq = np.nanpercentile(mean_img,[.1,99.9])
lq, hq = 13.3, 318.01
pl.imshow(mean_img, vmin=lq, vmax=hq)
pl.colorbar()
# pl.plot(ld['correlations'])
pl.subplot(1, 2, 2)
pl.imshow(ld['img_corr'], vmin=0, vmax=.5)
pl.colorbar()
#%%
for fl in glob.glob('Mf*.npz'):
with np.load(fl) as ld:
print(ld.keys())
pl.figure()
print(fl + ':' + str(np.mean(ld['norms'])) + '+/-' + str(np.std(ld['norms'])) + ' ; ' + str(np.mean(ld['correlations'])
) + '+/-' + str(np.std(ld['correlations'])) + ' ; ' + str(ld['smoothness']) + ' ; ' + str(ld['smoothness_corr']))
#%%
#%
#total_shifts = []
#start_steps = []
#xy_grids = []
#mc = np.zeros(m.shape)
# for count,img in enumerate(np.array(m)):
# if count % 10 == 0:
# print(count)
# mc[count],total_shift,start_step,xy_grid = tile_and_correct(img, template, strides, overlaps,(12,12), newoverlaps = None, \
# newstrides = newstrides, upsample_factor_grid=upsample_factor_grid,\
# upsample_factor_fft=10,show_movie=False,max_deviation_rigid=2,add_to_movie=add_to_movie)
#
# total_shifts.append(total_shift)
# start_steps.append(start_step)
# xy_grids.append(xy_grid)
#mc = cm.load('M_FLUO_4_d1_64_d2_128_d3_1_order_F_frames_4620_.mmap')
#mc = cm.load('M_FLUO_t_d1_64_d2_128_d3_1_order_F_frames_6764_.mmap')
#%%
mc.resize(1, 1, .1).play(gain=10., fr=30, offset=100, magnification=1.)
#%%
m.resize(1, 1, .2).play(gain=10, fr=30, offset=0, magnification=1.)
#%%
cm.concatenate([mr.resize(1, 1, .5), mc.resize(1, 1, .5)], axis=1).play(
gain=10, fr=100, offset=300, magnification=1.)
#%%
import h5py
with h5py.File('sueann_pw_rigid_movie.mat') as f:
mef = np.array(f['M2'])
mef = cm.movie(mef.transpose([0, 2, 1]))
#%%
cm.concatenate([mef.resize(1, 1, .15), mc.resize(1, 1, .15)], axis=1).play(
gain=30, fr=40, offset=300, magnification=1.)
#%%
(mef - mc).resize(1, 1, .1).play(gain=50, fr=20, offset=0, magnification=1.)
#%%
(mc - mef).resize(1, 1, .1).play(gain=50, fr=20, offset=0, magnification=1.)
#%%
T, d1, d2 = np.shape(m)
shape_mov = (d1 * d2, m.shape[0])
Y = np.memmap('M_FLUO_4_d1_64_d2_128_d3_1_order_F_frames_4620_.mmap',
mode='r', dtype=np.float32, shape=shape_mov, order='F')
mc = cm.movie(np.reshape(Y, (d2, d1, T), order='F').transpose([2, 1, 0]))
mc.resize(1, 1, .25).play(gain=10., fr=50)
#%%
total_shifts = [r[0][0][0] for r in res]
pl.plot(np.reshape(np.array(total_shifts), (len(total_shifts), -1)))
#%%
#m_raw = cm.motion_correction.bin_median(m,exclude_nans=True)
#m_rig = cm.motion_correction.bin_median(mr,exclude_nans=True)
#m_el = cm.motion_correction.bin_median(mc,exclude_nans=True)
m_raw = np.nanmean(m, 0)
m_rig = np.nanmean(mr, 0)
m_el = np.nanmean(mc, 0)
m_ef = np.nanmean(mef, 0)
#%%
import scipy
r_raw = []
r_rig = []
r_el = []
r_ef = []
max_shft_x, max_shft_y = max_shifts
for fr_id in range(m.shape[0]):
fr = m[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_raw.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_raw.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
fr = mr[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_rig.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_rig.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
fr = mc[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_el.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_el.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
if 1:
fr = mef[fr_id].copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
templ_ = m_ef.copy()[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y]
r_ef.append(scipy.stats.pearsonr(fr.flatten(), templ_.flatten())[0])
r_raw = np.array(r_raw)
r_rig = np.array(r_rig)
r_el = np.array(r_el)
r_ef = np.array(r_ef)
#%%
#r_ef = scipy.io.loadmat('sueann.mat')['cM2'].squeeze()
#r_efr = scipy.io.loadmat('sueann.mat')['cY'].squeeze()
# pl.close()
#%%
pl.plot(r_raw)
pl.plot(r_rig)
pl.plot(r_el)
# pl.plot(r_ef)
#%%
pl.scatter(r_el, r_ef)
pl.plot([0, 1], [0, 1], 'r--')
#%%
pl.plot(old_div((r_ef - r_el), np.abs(r_el)))
#%%
import pylab as pl
vmax = -100
max_shft = 3
#%
pl.subplot(3, 3, 1)
pl.imshow(np.nanmean(m, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('raw')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 2)
pl.title('rigid mean')
pl.imshow(np.nanmean(mr, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 3)
pl.imshow(np.nanmean(mc, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid mean')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(3, 3, 5)
pl.scatter(r_raw, r_rig)
pl.plot([0, 1], [0, 1], 'r--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.subplot(3, 3, 6)
pl.scatter(r_rig, r_el)
pl.plot([0, 1], [0, 1], 'r--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
if 0:
pl.subplot(2, 3, 3)
pl.scatter(r_el, r_ef)
pl.plot([0, 1], [0, 1], 'r--')
pl.ylabel('pw-rigid')
pl.xlabel('pw-rigid eft')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.subplot(2, 3, 6)
pl.imshow(np.nanmean(mef, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid eft mean')
pl.axis('off')
#%%
pl.plot(r_ef)
#%%
mc = cm.movie(mc)
mc[np.isnan(mc)] = 0
#%% play movie
(mc + add_to_movie).resize(1, 1, .25).play(gain=10., fr=50)
#%% compute correlation images
ccimage = m.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_rig = mr.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_els = mc.local_correlations(eight_neighbours=True, swap_dim=False)
ccimage_ef = mef.local_correlations(eight_neighbours=True, swap_dim=False)
#%% check correlation images
pl.subplot(2, 2, 1)
pl.imshow(ccimage, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 2)
pl.imshow(ccimage_rig, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 3)
pl.imshow(ccimage_els, vmin=0, vmax=0.4, interpolation='none')
pl.subplot(2, 2, 4)
pl.imshow(ccimage_ef, vmin=0, vmax=0.4, interpolation='none')
#%%
all_mags = []
all_mags_eig = []
for chunk in res:
for frame in chunk[0]:
shifts, pos, init = frame
x_sh = np.zeros(np.add(init[-1], 1))
y_sh = np.zeros(np.add(init[-1], 1))
for nt, sh in zip(init, shifts):
x_sh[nt] = sh[0]
y_sh[nt] = sh[1]
jac_xx = x_sh[1:, :] - x_sh[:-1, :]
jac_yx = y_sh[1:, :] - y_sh[:-1, :]
jac_xy = x_sh[:, 1:] - x_sh[:, :-1]
jac_yy = y_sh[:, 1:] - y_sh[:, :-1]
mag_norm = np.sqrt(jac_xx[:, :-1]**2 + jac_yx[:, :-1]
** 2 + jac_xy[:-1, :]**2 + jac_yy[:-1, :]**2)
all_mags.append(mag_norm)
# pl.cla()
# pl.imshow(mag_norm,vmin=0,vmax =1,interpolation = 'none')
# pl.pause(.1)
#%%
mam = cm.movie(np.dstack(all_mags)).transpose([2, 0, 1])
#mam.play(magnification=10,gain = 5.)
#%%
pl.imshow(np.max(mam, 0), interpolation='none')
#%%
m = cm.load('rig_sue__d1_512_d2_512_d3_1_order_F_frames_3000_.mmap')
m1 = cm.load('els_sue__d1_512_d2_512_d3_1_order_F_frames_3000_.mmap')
m0 = cm.load('k56_20160608_RSM_125um_41mW_zoom2p2_00001_00034.tif')
tmpl = cm.motion_correction.bin_median(m)
tmpl1 = cm.motion_correction.bin_median(m1)
tmpl0 = cm.motion_correction.bin_median(m0)
#%%
vmin, vmax = -1, 1
count = 0
pyr_scale = .5
levels = 3
winsize = 100
iterations = 15
poly_n = 5
poly_sigma = old_div(1.2, 5)
flags = 0 # cv2.OPTFLOW_FARNEBACK_GAUSSIAN
norms = []
flows = []<|fim▁hole|> flow1 = cv2.calcOpticalFlowFarneback(tmpl1[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr1[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
flow = cv2.calcOpticalFlowFarneback(tmpl[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
flow0 = cv2.calcOpticalFlowFarneback(tmpl0[max_shft_x:-max_shft_x, max_shft_y:-max_shft_y], fr0[max_shft_x:-
max_shft_x, max_shft_y:-max_shft_y], None, pyr_scale, levels, winsize, iterations, poly_n, poly_sigma, flags)
#
# pl.subplot(2,3,1)
# pl.cla()
# pl.imshow(flow1[:,:,1],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,2)
# pl.cla()
# pl.imshow(flow[:,:,1],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,3)
# pl.cla()
# pl.imshow(flow0[:,:,1],vmin=vmin,vmax=vmax)
#
# pl.subplot(2,3,4)
# pl.cla()
# pl.imshow(flow1[:,:,0],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,5)
# pl.cla()
# pl.imshow(flow[:,:,0],vmin=vmin,vmax=vmax)
# pl.subplot(2,3,6)
# pl.cla()
# pl.imshow(flow0[:,:,0],vmin=vmin,vmax=vmax)
# pl.pause(.1)
n1, n, n0 = np.linalg.norm(flow1), np.linalg.norm(
flow), np.linalg.norm(flow0)
flows.append([flow1, flow, flow0])
norms.append([n1, n, n0])
#%%
flm1_x = cm.movie(np.dstack([fl[0][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm_x = cm.movie(np.dstack([fl[1][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm0_x = cm.movie(np.dstack([fl[2][:, :, 0]
for fl in flows])).transpose([2, 0, 1])
flm1_y = cm.movie(np.dstack([fl[0][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
flm_y = cm.movie(np.dstack([fl[1][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
flm0_y = cm.movie(np.dstack([fl[2][:, :, 1]
for fl in flows])).transpose([2, 0, 1])
#%%
pl.figure()
pl.subplot(2, 1, 1)
pl.plot(norms)
pl.subplot(2, 1, 2)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_el)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_rig)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_raw)
#%%
#%% compare to optical flow
pl.figure()
vmin = -.5
vmax = .5
cmap = 'hot'
pl.subplot(2, 3, 1)
pl.imshow(np.mean(np.abs(flm1_x), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.title('PW-RIGID')
pl.ylabel('optical flow x')
pl.colorbar()
pl.subplot(2, 3, 2)
pl.title('RIGID')
pl.imshow(np.mean(np.abs(flm_x), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.colorbar()
pl.subplot(2, 3, 3)
pl.imshow(np.mean(np.abs(flm0_x), 0), vmin=vmin * 4, vmax=vmax * 4, cmap=cmap)
pl.title('RAW')
pl.colorbar()
pl.subplot(2, 3, 4)
pl.imshow(np.mean(np.abs(flm1_y), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.ylabel('optical flow y')
pl.colorbar()
pl.subplot(2, 3, 5)
pl.imshow(np.mean(np.abs(flm_y), 0), vmin=vmin, vmax=vmax, cmap=cmap)
pl.colorbar()
pl.subplot(2, 3, 6)
pl.imshow(np.mean(np.abs(flm0_y), 0), vmin=vmin * 4, vmax=vmax * 4, cmap=cmap)
pl.colorbar()
#%%
fl_rig = [n[1] / 1000 for n in norms]
fl_raw = [n[2] / 1000 for n in norms]
fl_el = [n[0] / 1000 for n in norms]
#%%
font = {'family': 'Myriad Pro',
'weight': 'regular',
'size': 15}
pl.rc('font', **font)
vmax = -100
max_shft = 3
pl.subplot(4, 3, 1)
pl.imshow(np.nanmean(m, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('raw')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 2)
pl.title('rigid mean')
pl.imshow(np.nanmean(mr, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 3)
pl.imshow(np.nanmean(mc, 0)[max_shft:-max_shft, max_shft:-
max_shft], cmap='gray', vmax=vmax, interpolation='none')
pl.title('pw-rigid mean')
pl.axis('off')
pl.xlim([0, 100])
pl.ylim([220, 320])
pl.axis('off')
pl.subplot(4, 3, 5)
pl.scatter(r_raw, r_rig, s=50, c='red')
pl.axis('tight')
pl.plot([0, 1], [0, 1], 'k--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0.2, .45])
pl.ylim([.2, .45])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 6)
pl.scatter(r_rig, r_el, s=50, c='red')
pl.plot([0, 1], [0, 1], 'k--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0.3, .45])
pl.ylim([.3, .45])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 4)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_el)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_rig)
pl.plot(np.arange(0, 3000 * .2, 0.2), r_raw)
pl.xlim([220, 320])
pl.ylabel('correlation')
pl.locator_params(nbins=4)
pl.subplot(4, 3, 7)
pl.plot(norms)
pl.xlim([220, 320])
pl.ylabel('norm of optical flow')
pl.xlabel('frames')
pl.locator_params(nbins=4)
pl.subplot(4, 3, 8)
pl.scatter(fl_raw, fl_rig, s=50, c='red')
pl.axis('tight')
pl.plot([0, 3000], [0, 3000], 'k--')
pl.xlabel('raw')
pl.ylabel('rigid')
pl.xlim([0, 3])
pl.ylim([0, 3])
pl.locator_params(nbins=4)
pl.subplot(4, 3, 9)
pl.scatter(fl_rig, fl_el, s=50, c='red')
pl.plot([0, 1000], [0, 1000], 'k--')
pl.ylabel('pw-rigid')
pl.xlabel('rigid')
pl.xlim([0, 1])
pl.ylim([0, 1])
pl.locator_params(nbins=4)
ofl_mod_rig = np.mean(np.sqrt(flm_x**2 + flm_y**2), 0)
ofl_mod_el = np.mean(np.sqrt(flm1_x**2 + flm1_y**2), 0)
pl.subplot(4, 3, 10)
pl.imshow(ofl_mod_el, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
pl.colorbar()
pl.subplot(4, 3, 11)
pl.imshow(ofl_mod_rig, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
# pl.xlim([0,100])
# pl.ylim([220,320])
pl.axis('off')
pl.subplot(4, 3, 12)
pl.imshow(ofl_mod_el, cmap='hot', vmin=0, vmax=1, interpolation='none')
pl.axis('off')
# pl.xlim([0,100])
# pl.ylim([220,320])
pl.axis('off')
# font = {'family' : 'Myriad Pro',
# 'weight' : 'regular',
# 'size' : 15}
#
#pl.rc('font', **font)
pl.rcParams['pdf.fonttype'] = 42
#%% test against SIMA
import sima
import sima.motion
from sima.motion import HiddenMarkov2D
#fname_gr = 'M_FLUO_t.tif'
#fname_gr = 'Sue_1000.tif'
#fname_gr = 'Sue_2000.tif'
fname_gr = 'Sue_1000_T.tif'
fname_gr = 'Sue_1000_T.tifrow1_example_sima_T.tif'
sequences = [sima.Sequence.create('TIFF', fname_gr)]
dataset = sima.ImagingDataset(sequences, fname_gr)
#%%
import time
t1 = time.time()
granularity = 'row'
gran_n = 1
mc_approach = sima.motion.HiddenMarkov2D(granularity=(
granularity, gran_n), max_displacement=max_shifts, verbose=True, n_processes=14)
new_dataset = mc_approach.correct(dataset, None)
t2 = time.time() - t1
print(t2)
#%
new_dataset.export_frames(
[[[fname_gr[:-4] + granularity + str(gran_n) + '_example_sima.tif']]], fmt='TIFF16')
#%%
m_s = cm.load(granularity + str(gran_n) + '_example_sima.tif')
m_s_row = cm.load('example_sima.tif')
#%%
def compute_jacobians(res):
all_mags = []
all_mags_eig = []
for chunk in res:
for frame in chunk[0]:
shifts, pos, init = frame
x_sh = np.zeros(np.add(init[-1], 1))
y_sh = np.zeros(np.add(init[-1], 1))
for nt, sh in zip(init, shifts):
x_sh[nt] = sh[0]
y_sh[nt] = sh[1]
jac_xx = x_sh[1:, :] - x_sh[:-1, :]
jac_yx = y_sh[1:, :] - y_sh[:-1, :]
jac_xy = x_sh[:, 1:] - x_sh[:, :-1]
jac_yy = y_sh[:, 1:] - y_sh[:, :-1]
mag_norm = np.sqrt(
jac_xx[:, :-1]**2 + jac_yx[:, :-1]**2 + jac_xy[:-1, :]**2 + jac_yy[:-1, :]**2)
for a, b, c, d in zip(jac_xx, jac_xy, jac_yy, jac_yy):
jc = np.array([[a, b], [c, d]])
w, vl, vr = scipy.linalg.eig(jc)
lsl
all_mags_eig.append(mag_eig)
all_mags.append(mag_norm)
# %%
#m = cm.load('M_FLUO_t_1000.tif')
#tmpl, correlations, flows_rig, norms = compute_metrics_motion_correction('M_FLUO_t_1000_rig__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#tmpl, correlations, flows_els, norms = compute_metrics_motion_correction('M_FLUO_t_1000_els__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#tmpl, correlations, flows_orig, norms = compute_metrics_motion_correction('M_FLUO_t_1000.tif',10,10,winsize=32, play_flow=False, resize_fact_flow=1)
#mfl_orig = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_orig],axis=0))
#mfl_rig = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_rig],axis=0))
#mfl_els = cm.movie(np.concatenate([np.sqrt(np.sum(ff**2,-1))[np.newaxis,:,:] for ff in flows_els],axis=0))
# %%
#cm.concatenate([mfl_orig/5.,mfl_rig,mfl_els],axis = 1).zproject(vmax = .5)
# %%
#cm.concatenate([m[:,10:-10,10:-10]/500,mfl_orig,mfl_rig,mfl_els],axis = 1).play(magnification = 5,gain = 5)
#%% TEST OPT FLOW
nmf = 'M_FLUO_t_shifted_flow.tif'
m = cm.load('M_FLUO_t_1000_els__d1_64_d2_128_d3_1_order_F_frames_1000_.mmap')
#shfts = [(a,b) for a,b in zip(np.random.randint(-2,3,m.shape[0]),np.random.randint(-2,3,m.shape[0]))]
shfts = [(a, b) for a, b in zip(np.random.randn(
m.shape[0]), np.random.randn(m.shape[0]))]
msh = m.copy().apply_shifts(shfts)
msh[:, 10:-10, 10:-10].save(nmf)
template = np.nanmean(m[:, 10:-10, 10:-10], 0)
tmpl, correlations, flows_orig, norms, smoothness = compute_metrics_motion_correction(
'M_FLUO_t_shifted_flow.tif', template.shape[0], template.shape[1], winsize=32, play_flow=False, resize_fact_flow=1, template=template)
with np.load('M_FLUO_t_shifted_flow_metrics.npz') as ld:
flows = ld['flows']
ff_1 = [np.nanmean(f[:, :, 1]) for f in flows]
ff_0 = [np.nanmean(f[:, :, 0]) for f in flows]
pl.subplot(2, 1, 1)
pl.plot(np.array(shfts)[:, 1])
pl.plot(np.array(ff_0))
pl.legend(['shifts', 'optical flow'])
pl.xlim([400, 600])
pl.ylabel('x shifts')
pl.subplot(2, 1, 2)
pl.plot(np.array(shfts)[:, 0])
pl.plot(np.array(ff_1))
pl.xlim([400, 600])
pl.xlabel('frames (15 Hz)')
pl.ylabel('y shifts')<|fim▁end|>
|
for fr, fr1, fr0 in zip(m.resize(1, 1, .2), m1.resize(1, 1, .2), m0.resize(1, 1, .2)):
count += 1
print(count)
|
<|file_name|>template.py<|end_file_name|><|fim▁begin|>from ..baseapi import BaseApi
class Template(BaseApi):
def __init__(self, *args, **kwargs):<|fim▁hole|> self.endpoint = 'templates'
self.list_id = None
def all(self):
"""
returns a list of available templates.
"""
return self._mc_client._get(url=self.endpoint)
def get(self, template_id):
"""
returns a specific template.
"""
return self._mc_client._get(url=self._build_path(template_id))
def update(self, template_id, data):
"""
updates a specific template
"""
return self._mc_client._patch(url=self._build_path(template_id), data=data)
def delete(self, template_id):
"""
removes a specific template.
"""
return self._mc_client._delete(url=self._build_path(template_id))<|fim▁end|>
|
super(Template, self).__init__(*args, **kwargs)
|
<|file_name|>(6 kyu) Multiples of 3 or 5.rs<|end_file_name|><|fim▁begin|>// #1
// fn solution(num: i32) -> i32 {
// let mut result = 0;
// for i in 1..num {
// result += if 0 == i % 3 || 0 == i % 5 {i} else {0}
// }
// result<|fim▁hole|> (1..num).filter(|i| 0 == i % 3 || 0 == i % 5).sum()
}<|fim▁end|>
|
// }
// #2
fn solution(num: i32) -> i32 {
|
<|file_name|>todo.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core';
import { Todo } from '../../shared/models/todo.model';
<|fim▁hole|> * The todo component
*/
@Component({
selector: 'todo',
templateUrl: './todo.component.html'
})
export class TodoComponent {
/**
* The entry todo from the parent list
*/
@Input() todo: Todo;
/**
* Local reference of TodoStore
*/
todoStore: TodoStore;
constructor(todoStore: TodoStore) {
this.todoStore = todoStore;
}
remove(todo: Todo){
this.todoStore.remove(todo);
}
toggleCompletion(todo: Todo) {
this.todoStore.toggleCompletion(todo);
}
editTodo(todo: Todo) {
todo.editing = true;
}
stopEditing(todo: Todo, editedTitle: string) {
todo.title = editedTitle;
todo.editing = false;
}
cancelEditingTodo(todo: Todo) {
todo.editing = false;
}
updateEditingTodo(todo: Todo, editedTitle: string) {
editedTitle = editedTitle.trim();
todo.editing = false;
if (editedTitle.length === 0) {
return this.todoStore.remove(todo);
}
todo.title = editedTitle;
this.todoStore.update();
}
}<|fim▁end|>
|
import { TodoStore } from '../../shared/services/todo.store';
/**
|
<|file_name|>oversized_pancake_flipper.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
using namespace std;
typedef long long int ll;
typedef pair<int, int> pii;
string sub() {
string s;
int k;
cin >> s >> k;
int cnt = 0;
for (int i = 0; i < (int)s.size(); i++) {
if (s[i] == '+')
continue;
else {
if (i + k - 1 >= (int)s.size())
return "IMPOSSIBLE";
cnt++;<|fim▁hole|> s[z] = '+';
}
}
stringstream ss;
ss << cnt;
string res;
ss >> res;
return res;
}
int main() {
int t;
cin >> t;
for (int i = 0; i < t; i++)
cout << "Case #" << i + 1 << ": " << sub() << endl;
}<|fim▁end|>
|
for (int z = i; z < i + k; z++)
if (s[z] == '+')
s[z] = '-';
else
|
<|file_name|>driver.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::session::Session;
use rustc::session::config::{self, Input, OutputFilenames};
use rustc::session::search_paths::PathKind;
use rustc::ast_map;
use rustc::lint;
use rustc::metadata;
use rustc::metadata::creader::CrateReader;
use rustc::middle::{stability, ty, reachable};
use rustc::middle::dependency_format;
use rustc::middle;
use rustc::plugin::registry::Registry;
use rustc::plugin;
use rustc::util::common::time;
use rustc_borrowck as borrowck;
use rustc_resolve as resolve;
use rustc_trans::back::link;
use rustc_trans::back::write;
use rustc_trans::trans;
use rustc_typeck as typeck;
use rustc_privacy;
use super::Compilation;
use serialize::json;
use std::env;
use std::ffi::{OsString, OsStr};
use std::fs;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use syntax::ast;
use syntax::attr;
use syntax::attr::AttrMetaMethods;
use syntax::diagnostics;
use syntax::parse;
use syntax::parse::token;
use syntax;
pub fn compile_input(sess: Session,
cfg: ast::CrateConfig,
input: &Input,
outdir: &Option<PathBuf>,
output: &Option<PathBuf>,
addl_plugins: Option<Vec<String>>,
control: CompileController) {
macro_rules! controller_entry_point{($point: ident, $tsess: expr, $make_state: expr) => ({
let state = $make_state;
(control.$point.callback)(state);
$tsess.abort_if_errors();
if control.$point.stop == Compilation::Stop {
return;
}
})}
// We need nested scopes here, because the intermediate results can keep
// large chunks of memory alive and we want to free them as soon as
// possible to keep the peak memory usage low
let (sess, result) = {
let (outputs, expanded_crate, id) = {
let krate = phase_1_parse_input(&sess, cfg, input);
controller_entry_point!(after_parse,
sess,
CompileState::state_after_parse(input,
&sess,
outdir,
&krate));
let outputs = build_output_filenames(input,
outdir,
output,
&krate.attrs,
&sess);
let id = link::find_crate_name(Some(&sess),
&krate.attrs,
input);
let expanded_crate
= match phase_2_configure_and_expand(&sess,
krate,
&id[..],
addl_plugins) {
None => return,
Some(k) => k
};
(outputs, expanded_crate, id)
};
controller_entry_point!(after_expand,
sess,
CompileState::state_after_expand(input,
&sess,
outdir,
&expanded_crate,
&id[..]));
let mut forest = ast_map::Forest::new(expanded_crate);
let arenas = ty::CtxtArenas::new();
let ast_map = assign_node_ids_and_map(&sess, &mut forest);
write_out_deps(&sess, input, &outputs, &id[..]);
controller_entry_point!(after_write_deps,
sess,
CompileState::state_after_write_deps(input,
&sess,
outdir,
&ast_map,
&ast_map.krate(),
&id[..]));
phase_3_run_analysis_passes(sess,
ast_map,
&arenas,
id,
control.make_glob_map,
|tcx, analysis| {
{
let state = CompileState::state_after_analysis(input,
&tcx.sess,
outdir,
tcx.map.krate(),
&analysis,
tcx);
(control.after_analysis.callback)(state);
tcx.sess.abort_if_errors();
if control.after_analysis.stop == Compilation::Stop {
return Err(());
}
}
if log_enabled!(::log::INFO) {
println!("Pre-trans");
tcx.print_debug_stats();
}
let trans = phase_4_translate_to_llvm(tcx, analysis);
if log_enabled!(::log::INFO) {
println!("Post-trans");
tcx.print_debug_stats();
}
// Discard interned strings as they are no longer required.
token::get_ident_interner().clear();
Ok((outputs, trans))
})
};
let (outputs, trans) = if let Ok(out) = result {
out
} else {
return;
};
phase_5_run_llvm_passes(&sess, &trans, &outputs);
controller_entry_point!(after_llvm,
sess,
CompileState::state_after_llvm(input,
&sess,
outdir,
&trans));
phase_6_link_output(&sess, &trans, &outputs);
}
/// The name used for source code that doesn't originate in a file
/// (e.g. source from stdin or a string)
pub fn anon_src() -> String {
"<anon>".to_string()
}
pub fn source_name(input: &Input) -> String {
match *input {
// FIXME (#9639): This needs to handle non-utf8 paths
Input::File(ref ifile) => ifile.to_str().unwrap().to_string(),
Input::Str(_) => anon_src()
}
}
/// CompileController is used to customise compilation, it allows compilation to
/// be stopped and/or to call arbitrary code at various points in compilation.
/// It also allows for various flags to be set to influence what information gets
/// collected during compilation.
///
/// This is a somewhat higher level controller than a Session - the Session
/// controls what happens in each phase, whereas the CompileController controls
/// whether a phase is run at all and whether other code (from outside the
/// the compiler) is run between phases.
///
/// Note that if compilation is set to stop and a callback is provided for a
/// given entry point, the callback is called before compilation is stopped.
///
/// Expect more entry points to be added in the future.
pub struct CompileController<'a> {
pub after_parse: PhaseController<'a>,
pub after_expand: PhaseController<'a>,
pub after_write_deps: PhaseController<'a>,
pub after_analysis: PhaseController<'a>,
pub after_llvm: PhaseController<'a>,
pub make_glob_map: resolve::MakeGlobMap,
}
impl<'a> CompileController<'a> {
pub fn basic() -> CompileController<'a> {
CompileController {
after_parse: PhaseController::basic(),
after_expand: PhaseController::basic(),
after_write_deps: PhaseController::basic(),
after_analysis: PhaseController::basic(),
after_llvm: PhaseController::basic(),
make_glob_map: resolve::MakeGlobMap::No,
}
}
}
pub struct PhaseController<'a> {
pub stop: Compilation,
pub callback: Box<Fn(CompileState) -> () + 'a>,
}
impl<'a> PhaseController<'a> {
pub fn basic() -> PhaseController<'a> {
PhaseController {
stop: Compilation::Continue,
callback: box |_| {},
}
}
}
/// State that is passed to a callback. What state is available depends on when
/// during compilation the callback is made. See the various constructor methods
/// (`state_*`) in the impl to see which data is provided for any given entry point.
pub struct CompileState<'a, 'ast: 'a, 'tcx: 'a> {
pub input: &'a Input,
pub session: &'a Session,
pub cfg: Option<&'a ast::CrateConfig>,
pub krate: Option<&'a ast::Crate>,
pub crate_name: Option<&'a str>,
pub output_filenames: Option<&'a OutputFilenames>,
pub out_dir: Option<&'a Path>,
pub expanded_crate: Option<&'a ast::Crate>,
pub ast_map: Option<&'a ast_map::Map<'ast>>,
pub analysis: Option<&'a ty::CrateAnalysis>,
pub tcx: Option<&'a ty::ctxt<'tcx>>,
pub trans: Option<&'a trans::CrateTranslation>,
}
impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> {
fn empty(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
input: input,
session: session,
out_dir: out_dir.as_ref().map(|s| &**s),
cfg: None,
krate: None,
crate_name: None,
output_filenames: None,
expanded_crate: None,
ast_map: None,
analysis: None,
tcx: None,
trans: None,
}
}
fn state_after_parse(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>,
krate: &'a ast::Crate)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
krate: Some(krate),
.. CompileState::empty(input, session, out_dir)
}
}
fn state_after_expand(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>,
expanded_crate: &'a ast::Crate,
crate_name: &'a str)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
crate_name: Some(crate_name),
expanded_crate: Some(expanded_crate),
.. CompileState::empty(input, session, out_dir)
}
}
fn state_after_write_deps(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>,
ast_map: &'a ast_map::Map<'ast>,
expanded_crate: &'a ast::Crate,
crate_name: &'a str)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
crate_name: Some(crate_name),
ast_map: Some(ast_map),
expanded_crate: Some(expanded_crate),
.. CompileState::empty(input, session, out_dir)
}
}
fn state_after_analysis(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>,
expanded_crate: &'a ast::Crate,
analysis: &'a ty::CrateAnalysis,
tcx: &'a ty::ctxt<'tcx>)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
analysis: Some(analysis),
tcx: Some(tcx),
expanded_crate: Some(expanded_crate),
.. CompileState::empty(input, session, out_dir)
}
}
fn state_after_llvm(input: &'a Input,
session: &'a Session,
out_dir: &'a Option<PathBuf>,
trans: &'a trans::CrateTranslation)
-> CompileState<'a, 'ast, 'tcx> {
CompileState {
trans: Some(trans),
.. CompileState::empty(input, session, out_dir)
}
}
}
pub fn phase_1_parse_input(sess: &Session, cfg: ast::CrateConfig, input: &Input)
-> ast::Crate {
// These may be left in an incoherent state after a previous compile.
// `clear_tables` and `get_ident_interner().clear()` can be used to free
// memory, but they do not restore the initial state.
syntax::ext::mtwt::reset_tables();
token::reset_ident_interner();
let krate = time(sess.time_passes(), "parsing", (), |_| {
match *input {
Input::File(ref file) => {
parse::parse_crate_from_file(&(*file), cfg.clone(), &sess.parse_sess)
}
Input::Str(ref src) => {
parse::parse_crate_from_source_str(anon_src().to_string(),
src.to_string(),
cfg.clone(),
&sess.parse_sess)
}
}
});
if sess.opts.debugging_opts.ast_json_noexpand {
println!("{}", json::as_json(&krate));
}
if let Some(ref s) = sess.opts.show_span {
syntax::show_span::run(sess.diagnostic(), s, &krate);
}
krate
}
// For continuing compilation after a parsed crate has been
// modified
/// Run the "early phases" of the compiler: initial `cfg` processing,
/// loading compiler plugins (including those from `addl_plugins`),
/// syntax expansion, secondary `cfg` expansion, synthesis of a test
/// harness if one is to be provided and injection of a dependency on the
/// standard library and prelude.
///
/// Returns `None` if we're aborting after handling -W help.
pub fn phase_2_configure_and_expand(sess: &Session,
mut krate: ast::Crate,
crate_name: &str,
addl_plugins: Option<Vec<String>>)
-> Option<ast::Crate> {
let time_passes = sess.time_passes();
// strip before anything else because crate metadata may use #[cfg_attr]
// and so macros can depend on configuration variables, such as
//
// #[macro_use] #[cfg(foo)]
// mod bar { macro_rules! baz!(() => {{}}) }
//
// baz! should not use this definition unless foo is enabled.
krate = time(time_passes, "configuration 1", krate, |krate|
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate));
*sess.crate_types.borrow_mut() =
collect_crate_types(sess, &krate.attrs);
*sess.crate_metadata.borrow_mut() =
collect_crate_metadata(sess, &krate.attrs);
time(time_passes, "recursion limit", (), |_| {
middle::recursion_limit::update_recursion_limit(sess, &krate);
});
time(time_passes, "gated macro checking", (), |_| {
let features =
syntax::feature_gate::check_crate_macros(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate);
// these need to be set "early" so that expansion sees `quote` if enabled.
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
krate = time(time_passes, "crate injection", krate, |krate|
syntax::std_inject::maybe_inject_crates_ref(krate,
sess.opts.alt_std_name.clone()));
let macros = time(time_passes, "macro loading", (), |_|
metadata::macro_import::read_macro_defs(sess, &krate));
let mut addl_plugins = Some(addl_plugins);
let registrars = time(time_passes, "plugin loading", (), |_|
plugin::load::load_plugins(sess, &krate, addl_plugins.take().unwrap()));
let mut registry = Registry::new(sess, &krate);
time(time_passes, "plugin registration", registrars, |registrars| {
if sess.features.borrow().rustc_diagnostic_macros {
registry.register_macro("__diagnostic_used",
diagnostics::plugin::expand_diagnostic_used);
registry.register_macro("__register_diagnostic",
diagnostics::plugin::expand_register_diagnostic);
registry.register_macro("__build_diagnostic_array",
diagnostics::plugin::expand_build_diagnostic_array);
}
for registrar in registrars {
registry.args_hidden = Some(registrar.args);
(registrar.fun)(&mut registry);
}
});
let Registry { syntax_exts, lint_passes, lint_groups,
llvm_passes, attributes, .. } = registry;
{
let mut ls = sess.lint_store.borrow_mut();
for pass in lint_passes {
ls.register_pass(Some(sess), true, pass);
}
for (name, to) in lint_groups {
ls.register_group(Some(sess), true, name, to);
}
*sess.plugin_llvm_passes.borrow_mut() = llvm_passes;
*sess.plugin_attributes.borrow_mut() = attributes.clone();
}
// Lint plugins are registered; now we can process command line flags.
if sess.opts.describe_lints {
super::describe_lints(&*sess.lint_store.borrow(), true);
return None;
}
sess.lint_store.borrow_mut().process_command_line(sess);
// Abort if there are errors from lint processing or a plugin registrar.
sess.abort_if_errors();
krate = time(time_passes, "expansion", (krate, macros, syntax_exts),
|(krate, macros, syntax_exts)| {
// Windows dlls do not have rpaths, so they don't know how to find their
// dependencies. It's up to us to tell the system where to find all the
// dependent dlls. Note that this uses cfg!(windows) as opposed to
// targ_cfg because syntax extensions are always loaded for the host
// compiler, not for the target.
let mut _old_path = OsString::new();
if cfg!(windows) {
_old_path = env::var_os("PATH").unwrap_or(_old_path);
let mut new_path = sess.host_filesearch(PathKind::All)
.get_dylib_search_paths();
new_path.extend(env::split_paths(&_old_path));
env::set_var("PATH", &env::join_paths(new_path).unwrap());
}
let features = sess.features.borrow();
let cfg = syntax::ext::expand::ExpansionConfig {
crate_name: crate_name.to_string(),
features: Some(&features),
recursion_limit: sess.recursion_limit.get(),
trace_mac: sess.opts.debugging_opts.trace_macros,
};
let ret = syntax::ext::expand::expand_crate(&sess.parse_sess,
cfg,
macros,
syntax_exts,
krate);
if cfg!(windows) {
env::set_var("PATH", &_old_path);
}
ret
}
);
// Needs to go *after* expansion to be able to check the results
// of macro expansion. This runs before #[cfg] to try to catch as
// much as possible (e.g. help the programmer avoid platform
// specific differences)
time(time_passes, "complete gated feature checking 1", (), |_| {
let features =
syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate, &attributes,
sess.opts.unstable_features);
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
// JBC: make CFG processing part of expansion to avoid this problem:
// strip again, in case expansion added anything with a #[cfg].
krate = time(time_passes, "configuration 2", krate, |krate|
syntax::config::strip_unconfigured_items(sess.diagnostic(), krate));
krate = time(time_passes, "maybe building test harness", krate, |krate|
syntax::test::modify_for_testing(&sess.parse_sess,
&sess.opts.cfg,
krate,
sess.diagnostic()));<|fim▁hole|>
krate = time(time_passes, "prelude injection", krate, |krate|
syntax::std_inject::maybe_inject_prelude(krate));
time(time_passes, "checking that all macro invocations are gone", &krate, |krate|
syntax::ext::expand::check_for_macros(&sess.parse_sess, krate));
// One final feature gating of the true AST that gets compiled
// later, to make sure we've got everything (e.g. configuration
// can insert new attributes via `cfg_attr`)
time(time_passes, "complete gated feature checking 2", (), |_| {
let features =
syntax::feature_gate::check_crate(sess.codemap(),
&sess.parse_sess.span_diagnostic,
&krate, &attributes,
sess.opts.unstable_features);
*sess.features.borrow_mut() = features;
sess.abort_if_errors();
});
Some(krate)
}
pub fn assign_node_ids_and_map<'ast>(sess: &Session,
forest: &'ast mut ast_map::Forest)
-> ast_map::Map<'ast> {
struct NodeIdAssigner<'a> {
sess: &'a Session
}
impl<'a> ast_map::FoldOps for NodeIdAssigner<'a> {
fn new_id(&self, old_id: ast::NodeId) -> ast::NodeId {
assert_eq!(old_id, ast::DUMMY_NODE_ID);
self.sess.next_node_id()
}
}
let map = time(sess.time_passes(), "assigning node ids and indexing ast", forest, |forest|
ast_map::map_crate(forest, NodeIdAssigner { sess: sess }));
if sess.opts.debugging_opts.ast_json {
println!("{}", json::as_json(map.krate()));
}
map
}
/// Run the resolution, typechecking, region checking and other
/// miscellaneous analysis passes on the crate. Return various
/// structures carrying the results of the analysis.
pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: Session,
ast_map: ast_map::Map<'tcx>,
arenas: &'tcx ty::CtxtArenas<'tcx>,
name: String,
make_glob_map: resolve::MakeGlobMap,
f: F)
-> (Session, R)
where F: FnOnce(&ty::ctxt<'tcx>,
ty::CrateAnalysis) -> R
{
let time_passes = sess.time_passes();
let krate = ast_map.krate();
time(time_passes, "external crate/lib resolution", (), |_|
CrateReader::new(&sess).read_crates(krate));
let lang_items = time(time_passes, "language item collection", (), |_|
middle::lang_items::collect_language_items(krate, &sess));
let resolve::CrateMap {
def_map,
freevars,
export_map,
trait_map,
external_exports,
glob_map,
} =
time(time_passes, "resolution", (),
|_| resolve::resolve_crate(&sess, &ast_map, make_glob_map));
// Discard MTWT tables that aren't required past resolution.
syntax::ext::mtwt::clear_tables();
let named_region_map = time(time_passes, "lifetime resolution", (),
|_| middle::resolve_lifetime::krate(&sess, krate, &def_map));
time(time_passes, "looking for entry point", (),
|_| middle::entry::find_entry_point(&sess, &ast_map));
sess.plugin_registrar_fn.set(
time(time_passes, "looking for plugin registrar", (), |_|
plugin::build::find_plugin_registrar(
sess.diagnostic(), krate)));
let region_map = time(time_passes, "region resolution", (), |_|
middle::region::resolve_crate(&sess, krate));
time(time_passes, "loop checking", (), |_|
middle::check_loop::check_crate(&sess, krate));
time(time_passes, "static item recursion checking", (), |_|
middle::check_static_recursion::check_crate(&sess, krate, &def_map, &ast_map));
ty::with_ctxt(sess,
arenas,
def_map,
named_region_map,
ast_map,
freevars,
region_map,
lang_items,
stability::Index::new(krate),
|tcx| {
// passes are timed inside typeck
typeck::check_crate(tcx, trait_map);
time(time_passes, "const checking", (), |_|
middle::check_const::check_crate(tcx));
let (exported_items, public_items) =
time(time_passes, "privacy checking", (), |_|
rustc_privacy::check_crate(tcx, &export_map, external_exports));
// Do not move this check past lint
time(time_passes, "stability index", (), |_|
tcx.stability.borrow_mut().build(tcx, krate, &public_items));
time(time_passes, "intrinsic checking", (), |_|
middle::intrinsicck::check_crate(tcx));
time(time_passes, "effect checking", (), |_|
middle::effect::check_crate(tcx));
time(time_passes, "match checking", (), |_|
middle::check_match::check_crate(tcx));
time(time_passes, "liveness checking", (), |_|
middle::liveness::check_crate(tcx));
time(time_passes, "borrow checking", (), |_|
borrowck::check_crate(tcx));
time(time_passes, "rvalue checking", (), |_|
middle::check_rvalues::check_crate(tcx, krate));
// Avoid overwhelming user with errors if type checking failed.
// I'm not sure how helpful this is, to be honest, but it avoids a
// lot of annoying errors in the compile-fail tests (basically,
// lint warnings and so on -- kindck used to do this abort, but
// kindck is gone now). -nmatsakis
tcx.sess.abort_if_errors();
let reachable_map =
time(time_passes, "reachability checking", (), |_|
reachable::find_reachable(tcx, &exported_items));
time(time_passes, "death checking", (), |_| {
middle::dead::check_crate(tcx,
&exported_items,
&reachable_map)
});
let ref lib_features_used =
time(time_passes, "stability checking", (), |_|
stability::check_unstable_api_usage(tcx));
time(time_passes, "unused lib feature checking", (), |_|
stability::check_unused_or_stable_features(
&tcx.sess, lib_features_used));
time(time_passes, "lint checking", (), |_|
lint::check_crate(tcx, &exported_items));
// The above three passes generate errors w/o aborting
tcx.sess.abort_if_errors();
f(tcx, ty::CrateAnalysis {
export_map: export_map,
exported_items: exported_items,
public_items: public_items,
reachable: reachable_map,
name: name,
glob_map: glob_map,
})
})
}
/// Run the translation phase to LLVM, after which the AST and analysis can
/// be discarded.
pub fn phase_4_translate_to_llvm(tcx: &ty::ctxt, analysis: ty::CrateAnalysis)
-> trans::CrateTranslation {
let time_passes = tcx.sess.time_passes();
time(time_passes, "resolving dependency formats", (), |_|
dependency_format::calculate(tcx));
// Option dance to work around the lack of stack once closures.
time(time_passes, "translation", analysis, |analysis|
trans::trans_crate(tcx, analysis))
}
/// Run LLVM itself, producing a bitcode file, assembly file or object file
/// as a side effect.
pub fn phase_5_run_llvm_passes(sess: &Session,
trans: &trans::CrateTranslation,
outputs: &OutputFilenames) {
if sess.opts.cg.no_integrated_as {
let output_type = config::OutputTypeAssembly;
time(sess.time_passes(), "LLVM passes", (), |_|
write::run_passes(sess, trans, &[output_type], outputs));
write::run_assembler(sess, outputs);
// Remove assembly source, unless --save-temps was specified
if !sess.opts.cg.save_temps {
fs::remove_file(&outputs.temp_path(config::OutputTypeAssembly)).unwrap();
}
} else {
time(sess.time_passes(), "LLVM passes", (), |_|
write::run_passes(sess,
trans,
&sess.opts.output_types,
outputs));
}
sess.abort_if_errors();
}
/// Run the linker on any artifacts that resulted from the LLVM run.
/// This should produce either a finished executable or library.
pub fn phase_6_link_output(sess: &Session,
trans: &trans::CrateTranslation,
outputs: &OutputFilenames) {
time(sess.time_passes(), "linking", (), |_|
link::link_binary(sess,
trans,
outputs,
&trans.link.crate_name));
}
fn escape_dep_filename(filename: &str) -> String {
// Apparently clang and gcc *only* escape spaces:
// http://llvm.org/klaus/clang/commit/9d50634cfc268ecc9a7250226dd5ca0e945240d4
filename.replace(" ", "\\ ")
}
fn write_out_deps(sess: &Session,
input: &Input,
outputs: &OutputFilenames,
id: &str) {
let mut out_filenames = Vec::new();
for output_type in &sess.opts.output_types {
let file = outputs.path(*output_type);
match *output_type {
config::OutputTypeExe => {
for output in sess.crate_types.borrow().iter() {
let p = link::filename_for_input(sess, *output,
id, &file);
out_filenames.push(p);
}
}
_ => { out_filenames.push(file); }
}
}
// Write out dependency rules to the dep-info file if requested with
// --dep-info
let deps_filename = match sess.opts.write_dependency_info {
// Use filename from --dep-file argument if given
(true, Some(ref filename)) => filename.clone(),
// Use default filename: crate source filename with extension replaced
// by ".d"
(true, None) => match *input {
Input::File(..) => outputs.with_extension("d"),
Input::Str(..) => {
sess.warn("can not write --dep-info without a filename \
when compiling stdin.");
return
},
},
_ => return,
};
let result = (|| -> io::Result<()> {
// Build a list of files used to compile the output and
// write Makefile-compatible dependency rules
let files: Vec<String> = sess.codemap().files.borrow()
.iter()
.filter(|fmap| fmap.is_real_file())
.filter(|fmap| !fmap.is_imported())
.map(|fmap| escape_dep_filename(&fmap.name))
.collect();
let mut file = try!(fs::File::create(&deps_filename));
for path in &out_filenames {
try!(write!(&mut file,
"{}: {}\n\n", path.display(), files.connect(" ")));
}
Ok(())
})();
match result {
Ok(()) => {}
Err(e) => {
sess.fatal(&format!("error writing dependencies to `{}`: {}",
deps_filename.display(), e));
}
}
}
pub fn collect_crate_types(session: &Session,
attrs: &[ast::Attribute]) -> Vec<config::CrateType> {
// Unconditionally collect crate types from attributes to make them used
let attr_types: Vec<config::CrateType> = attrs.iter().filter_map(|a| {
if a.check_name("crate_type") {
match a.value_str() {
Some(ref n) if *n == "rlib" => {
Some(config::CrateTypeRlib)
}
Some(ref n) if *n == "dylib" => {
Some(config::CrateTypeDylib)
}
Some(ref n) if *n == "lib" => {
Some(config::default_lib_output())
}
Some(ref n) if *n == "staticlib" => {
Some(config::CrateTypeStaticlib)
}
Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable),
Some(_) => {
session.add_lint(lint::builtin::UNKNOWN_CRATE_TYPES,
ast::CRATE_NODE_ID,
a.span,
"invalid `crate_type` \
value".to_string());
None
}
_ => {
session.span_err(a.span, "`crate_type` requires a value");
session.note("for example: `#![crate_type=\"lib\"]`");
None
}
}
} else {
None
}
}).collect();
// If we're generating a test executable, then ignore all other output
// styles at all other locations
if session.opts.test {
return vec!(config::CrateTypeExecutable)
}
// Only check command line flags if present. If no types are specified by
// command line, then reuse the empty `base` Vec to hold the types that
// will be found in crate attributes.
let mut base = session.opts.crate_types.clone();
if base.is_empty() {
base.extend(attr_types);
if base.is_empty() {
base.push(link::default_output_for_target(session));
}
base.sort();
base.dedup();
}
base.into_iter().filter(|crate_type| {
let res = !link::invalid_output_for_target(session, *crate_type);
if !res {
session.warn(&format!("dropping unsupported crate type `{}` \
for target `{}`",
*crate_type, session.opts.target_triple));
}
res
}).collect()
}
pub fn collect_crate_metadata(session: &Session,
_attrs: &[ast::Attribute]) -> Vec<String> {
session.opts.cg.metadata.clone()
}
pub fn build_output_filenames(input: &Input,
odir: &Option<PathBuf>,
ofile: &Option<PathBuf>,
attrs: &[ast::Attribute],
sess: &Session)
-> OutputFilenames {
match *ofile {
None => {
// "-" as input file will cause the parser to read from stdin so we
// have to make up a name
// We want to toss everything after the final '.'
let dirpath = match *odir {
Some(ref d) => d.clone(),
None => PathBuf::new()
};
// If a crate name is present, we use it as the link name
let stem = sess.opts.crate_name.clone().or_else(|| {
attr::find_crate_name(attrs).map(|n| n.to_string())
}).unwrap_or(input.filestem());
OutputFilenames {
out_directory: dirpath,
out_filestem: stem,
single_output_file: None,
extra: sess.opts.cg.extra_filename.clone(),
}
}
Some(ref out_file) => {
let ofile = if sess.opts.output_types.len() > 1 {
sess.warn("ignoring specified output filename because multiple \
outputs were requested");
None
} else {
Some(out_file.clone())
};
if *odir != None {
sess.warn("ignoring --out-dir flag due to -o flag.");
}
let cur_dir = Path::new("");
OutputFilenames {
out_directory: out_file.parent().unwrap_or(cur_dir).to_path_buf(),
out_filestem: out_file.file_stem().unwrap_or(OsStr::new(""))
.to_str().unwrap().to_string(),
single_output_file: ofile,
extra: sess.opts.cg.extra_filename.clone(),
}
}
}
}<|fim▁end|>
| |
<|file_name|>noUndecoratedClassWithNgFieldsRule.ts<|end_file_name|><|fim▁begin|>import * as Lint from 'tslint';
import * as ts from 'typescript';
const RULE_FAILURE = `Undecorated class defines fields with Angular decorators. Undecorated ` +
`classes with Angular fields cannot be extended in Ivy since no definition is generated. ` +
`Add a "@Directive" decorator to fix this.`;
/**
* Rule that doesn't allow undecorated class declarations with fields using Angular
* decorators.
*/
export class Rule extends Lint.Rules.TypedRule {
applyWithProgram(sourceFile: ts.SourceFile, program: ts.Program): Lint.RuleFailure[] {
return this.applyWithWalker(
new Walker(sourceFile, this.getOptions(), program.getTypeChecker()));
}
}
class Walker extends Lint.RuleWalker {
constructor(
sourceFile: ts.SourceFile, options: Lint.IOptions, private _typeChecker: ts.TypeChecker) {
super(sourceFile, options);
}
visitClassDeclaration(node: ts.ClassDeclaration) {
if (this._hasAngularDecorator(node)) {
return;
}
for (let member of node.members) {
if (member.decorators && this._hasAngularDecorator(member)) {
this.addFailureAtNode(node, RULE_FAILURE);
return;
}
}
}
/** Checks if the specified node has an Angular decorator. */
private _hasAngularDecorator(node: ts.Node): boolean {
return !!node.decorators && node.decorators.some(d => {
if (!ts.isCallExpression(d.expression) ||
!ts.isIdentifier(d.expression.expression)) {
return false;
}
<|fim▁hole|> }
/** Gets the module import of the given identifier if imported. */
private _getModuleImportOfIdentifier(node: ts.Identifier): string | null {
const symbol = this._typeChecker.getSymbolAtLocation(node);
if (!symbol || !symbol.declarations || !symbol.declarations.length) {
return null;
}
const decl = symbol.declarations[0];
if (!ts.isImportSpecifier(decl)) {
return null;
}
const importDecl = decl.parent.parent.parent;
const moduleSpecifier = importDecl.moduleSpecifier;
return ts.isStringLiteral(moduleSpecifier) ? moduleSpecifier.text : null;
}
}<|fim▁end|>
|
const moduleImport = this._getModuleImportOfIdentifier(d.expression.expression);
return moduleImport ? moduleImport.startsWith('@angular/core') : false;
});
|
<|file_name|>ejemplo04model.js<|end_file_name|><|fim▁begin|>var mongoose = require("mongoose");
// definicion del esquema
var Schema = mongoose.Schema;
var LibroSchema = new Schema({
titulo: String,
autor: String,
campos_biblioteca: {
ejemplares: Number,
ultima_reserva: Date
}
})<|fim▁hole|> Libro: mongoose.model("Libro", LibroSchema),
Reserva: mongoose.model("Libro", LibroSchema)
}<|fim▁end|>
|
var ReservaSchema = new Schema({});
module.exports = {
|
<|file_name|>list.py<|end_file_name|><|fim▁begin|># Copyright 2012 Rackspace
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from cliff.lister import Lister
from service_registry_cli.utils import BaseListCommand, get_client
class ListCommand(BaseListCommand, Lister):
"""
Return a list of the configuration values.
"""
log = logging.getLogger(__name__)
def get_parser(self, prog_name):
parser = super(ListCommand, self).get_parser(prog_name=prog_name)
parser.add_argument('--namespace', dest='namespace')
return parser
def take_action(self, parsed_args):<|fim▁hole|>
marker = parsed_args.marker if parsed_args.marker else None
limit = parsed_args.limit if parsed_args.limit else None
kwargs = {'marker': marker, 'limit': limit}
if parsed_args.namespace:
kwargs['namespace'] = parsed_args.namespace
result = client.configuration.list_for_namespace(**kwargs)
else:
result = client.configuration.list(**kwargs)
values = result['values']
metadata = result['metadata']
parsed_args.returned_metadata = metadata
result = [(value['id'], value['value'])
for value in values]
return (('ID', 'Value'), result)<|fim▁end|>
|
client = get_client(parsed_args)
|
<|file_name|>test.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
declare var test: number;
|
<|file_name|>change-tracker.js<|end_file_name|><|fim▁begin|>/*
* Keeps track of items being created or deleted in a list
* - emits events about changes when .poll is called
* - events are: delete, create
*
* Usage:
*
* var tracker = changeTracker.create(updateItems, items);
*
* - updateItems is a function to fetch the current state of the items you want
* to watch. It should return a list of objects with a unique 'name'.
*
* - items is the current list, as given by running updateItems now
*
* tracker.on("create", createListener);
* tracker.on("delete", deleteListener);
* tracker.poll();
*
* When calling poll, updateItems is called, the result is compared to the old
* list, and events are emitted.
*
*/
var EventEmitter = require("events").EventEmitter;
var when = require("when");
function create(updateItems, items) {
var instance = Object.create(this);<|fim▁hole|>}
function eq(item1) {
return function (item2) { return item1.name === item2.name; };
}
function notIn(coll) {
return function (item) { return !coll.some(eq(item)); };
}
function poll() {
var d = when.defer();
this.updateItems(function (err, after) {
if (err) { return d.reject(err); }
var before = this.items;
var created = after.filter(notIn(before));
var deleted = before.filter(notIn(after));
created.forEach(this.emit.bind(this, "create"));
deleted.forEach(this.emit.bind(this, "delete"));
this.items = after;
d.resolve();
}.bind(this));
return d.promise;
}
module.exports = new EventEmitter();
module.exports.create = create;
module.exports.poll = poll;<|fim▁end|>
|
instance.updateItems = updateItems;
instance.items = items;
return instance;
|
<|file_name|>inquest_server.rs<|end_file_name|><|fim▁begin|>extern crate docopt;
extern crate grpc;
extern crate inquest;
extern crate rustc_serialize;
use std::collections::{BinaryHeap, HashMap, BTreeMap};
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
use std::sync::{Arc, Mutex, RwLock};
use docopt::Docopt;
use grpc::error::GrpcError;
use grpc::result::GrpcResult;
use inquest::pb::proddle::{CancelProbeRequest, GetBucketKeysRequest, GetProbesRequest, SearchRequest, SendProbeResultsRequest, ScheduleProbeRequest};
use inquest::pb::proddle::{CancelProbeReply, GetBucketKeysReply, GetProbesReply, SearchReply, SendProbeResultsReply, ScheduleProbeReply};
use inquest::pb::proddle::{Probe, Protocol};
use inquest::pb::proddle_grpc::{ProbeCache, ProbeCacheServer, Scheduler, SchedulerServer};
use inquest::writer::{FileWriter, PrintWriter, Writer};
const USAGE: &'static str = "
Inquest Server
Usage:
inquest_server (-h | --help)
inquest_server (--print | --file <directory> [--max-filesize=<max-filesize>]) [--bucket-count=<bucket-count>]
Options:
--directory=<directory> Directory to write result files.
-h --help Display this screen.
--bucket-count=<bucket-count> Number of buckets [default: 1000];
--max-filesize=<max-filesize> Maxiumum filesize for results files (in MB) [default: 5];
--print Print results to stdout;
--file Write results out to files.
";
#[derive(Debug, RustcDecodable)]
struct Args {
arg_directory: String,
flag_bucket_count: u64,
flag_max_filesize: u32,
flag_print: bool,
flag_file: bool,
}
fn main() {
let args: Args = Docopt::new(USAGE)
.and_then(|d| d.decode())
.unwrap_or_else(|e| e.exit());
//initialize writer
let writer = if args.flag_print {
Arc::new(Mutex::new(Box::new(PrintWriter::new()) as Box<Writer + Send>))
} else if args.flag_file {
Arc::new(Mutex::new(Box::new(FileWriter::new(&args.arg_directory, 1024 * 1024 * args.flag_max_filesize)) as Box<Writer + Send>))
} else {
panic!("Unable to start inquest_server without writer type. Please specify '--print' or '--file' in arguments.");
};
//intialize server variables
let probe_map = Arc::new(RwLock::new(BTreeMap::new()));
{
//add buckets to probe_map
let mut counter = 0;
let delta = u64::max_value() / args.flag_bucket_count;
let mut probe_map = probe_map.write().unwrap();
for _ in 0..args.flag_bucket_count {
probe_map.insert(counter, HashMap::new());
counter += delta;
}
}
let _probe_cache_server = ProbeCacheServer::new(52890, ProbeCacheImpl::new(probe_map.clone(), writer.clone()));
let _scheduler_server = SchedulerServer::new(12289, SchedulerImpl::new(probe_map.clone(), writer.clone()));
loop {
std::thread::park();
}<|fim▁hole|>
struct ProbeCacheImpl {
probe_map: Arc<RwLock<BTreeMap<u64, HashMap<String, HashMap<Protocol, Vec<Probe>>>>>>, //map<domain_hash, map<domain, vec<probe>>>
writer: Arc<Mutex<Box<Writer + Send>>>,
}
impl ProbeCacheImpl {
fn new(probe_map: Arc<RwLock<BTreeMap<u64, HashMap<String, HashMap<Protocol, Vec<Probe>>>>>>, writer: Arc<Mutex<Box<Writer + Send>>>) -> ProbeCacheImpl {
ProbeCacheImpl {
probe_map: probe_map,
writer: writer,
}
}
}
impl ProbeCache for ProbeCacheImpl {
fn GetBucketKeys(&self, _: GetBucketKeysRequest) -> GrpcResult<GetBucketKeysReply> {
let bucket_keys;
{
let probe_map = self.probe_map.read().unwrap();
bucket_keys = probe_map.keys().cloned().collect();
}
Ok(inquest::create_get_bucket_keys_reply(bucket_keys))
}
fn GetProbes(&self, request: GetProbesRequest) -> GrpcResult<GetProbesReply> {
//compute local bucket hashes
let mut bucket_hashes = HashMap::new();
{
let probe_map = self.probe_map.read().unwrap();
for (bucket_key, domain_map) in probe_map.iter() {
let mut hasher = DefaultHasher::new();
//add all probe ids to binary heap
let mut probe_ids = BinaryHeap::new();
for protocol_map in domain_map.values() {
for probes in protocol_map.values() {
for probe in probes {
probe_ids.push(probe.get_probe_id());
}
}
}
//loo over probe_ids in order
while let Some(probe_id) = probe_ids.pop() {
probe_id.hash(&mut hasher);
}
bucket_hashes.insert(bucket_key.to_owned(), hasher.finish());
}
}
//compare hashes
let mut bucket_probes = HashMap::new();
for bucket_hash in request.get_bucket_hash() {
match bucket_hashes.get(&bucket_hash.get_bucket_key()) {
Some(local_bucket_hash) => {
//check if bucket hashes differ
if &bucket_hash.get_hash() != local_bucket_hash {
let reply_probes = bucket_probes.entry(bucket_hash.get_bucket_key()).or_insert(Vec::new());
//add all local probes
let probe_map = self.probe_map.read().unwrap();
for domain_map in probe_map.get(&bucket_hash.get_bucket_key()) {
for protocol_map in domain_map.values() {
for probes in protocol_map.values() {
for probe in probes {
reply_probes.push(probe.clone());
}
}
}
}
}
},
None => continue,
}
}
Ok(inquest::create_get_probes_reply(bucket_probes))
}
fn SendProbeResults(&self, request: SendProbeResultsRequest) -> GrpcResult<SendProbeResultsReply> {
let mut writer = self.writer.lock().unwrap();
for probe_result in request.get_probe_result() {
let _ = writer.write_probe_result(probe_result);
}
Ok(inquest::create_send_probe_results_reply())
}
}
struct SchedulerImpl {
probe_map: Arc<RwLock<BTreeMap<u64, HashMap<String, HashMap<Protocol, Vec<Probe>>>>>>, //map<domain_hash, map<domain, vec<probe>>>
writer: Arc<Mutex<Box<Writer + Send>>>,
}
impl SchedulerImpl {
fn new(probe_map: Arc<RwLock<BTreeMap<u64, HashMap<String, HashMap<Protocol, Vec<Probe>>>>>>, writer: Arc<Mutex<Box<Writer + Send>>>) -> SchedulerImpl {
SchedulerImpl {
probe_map: probe_map,
writer: writer,
}
}
}
impl Scheduler for SchedulerImpl {
fn CancelProbe(&self, request: CancelProbeRequest) -> GrpcResult<CancelProbeReply> {
let key = inquest::compute_domain_hash(request.get_domain());
let mut probe_map = self.probe_map.write().unwrap();
//determine correct bucket key
let mut bucket_key = 0;
for map_key in probe_map.keys() {
if *map_key > key {
break;
}
bucket_key = *map_key;
}
let mut domain_map = probe_map.get_mut(&bucket_key).unwrap();
let remove_domain;
{
let mut protocol_map = match domain_map.get_mut(request.get_domain()) {
Some(protocol_map) => protocol_map,
None => return Err(GrpcError::Other("domain doesn't exist")),
};
//loop over protocols
for protocol in request.get_protocol() {
let remove_protocol;
{
let mut probes = match protocol_map.get_mut(protocol) {
Some(probes) => probes,
None => continue,
};
match protocol {
&Protocol::HTTP => {
if request.has_url_suffix() {
let mut index = -1;
for (i, p) in probes.iter().enumerate() {
if p.get_url_suffix() == request.get_url_suffix() {
index = i as i16;
break;
}
}
if index != -1 {
probes.remove(index as usize);
}
} else {
probes.clear();
}
},
_ => probes.clear(),
}
remove_protocol = probes.len() == 0;
}
//remove protocol if there are no probes scheduled
if remove_protocol {
protocol_map.remove(protocol);
}
}
remove_domain = protocol_map.len() == 0;
}
//remove domain if there are no probes scheduled
if remove_domain {
domain_map.remove(request.get_domain());
}
Ok(inquest::create_cancel_probe_reply())
}
fn Search(&self, request: SearchRequest) -> GrpcResult<SearchReply> {
let key = inquest::compute_domain_hash(request.get_domain());
let probe_map = self.probe_map.read().unwrap();
//determine correct bucket key
let mut bucket_key = 0;
for map_key in probe_map.keys() {
if *map_key > key {
break;
}
bucket_key = *map_key;
}
//find map containing protocols pertaining to the given domain
let domain_map = probe_map.get(&bucket_key).unwrap();
let protocol_map = match domain_map.get(request.get_domain()) {
Some(protocol_map) => protocol_map,
None => return Err(GrpcError::Other("domain does not exist")),
};
//loop over protocol arguments and return respective probes
let mut search_probes: Vec<Probe> = Vec::new();
for protocol in request.get_protocol() {
match protocol_map.get(protocol) {
Some(probes) => {
for p in probes {
search_probes.push(p.clone());
}
},
None => continue,
}
}
Ok(inquest::create_search_reply(search_probes))
}
fn ScheduleProbe(&self, request: ScheduleProbeRequest) -> GrpcResult<ScheduleProbeReply> {
for probe in request.get_probe() {
let key = inquest::compute_domain_hash(probe.get_domain());
let probe_id = inquest::compute_probe_hash(probe);
let mut probe_map = self.probe_map.write().unwrap();
//determine correct bucket key
let mut bucket_key = 0;
for map_key in probe_map.keys() {
if *map_key > key {
break;
}
bucket_key = *map_key;
}
//see if probe already exists
let mut domain_map = probe_map.get_mut(&bucket_key).unwrap();
let mut protocol_map = domain_map.entry(probe.get_domain().to_owned()).or_insert(HashMap::new());
let mut probes = protocol_map.entry(probe.get_protocol()).or_insert(Vec::new());
let mut found = false;
for p in probes.iter() {
if p.get_probe_id() == probe_id {
found = true;
break;
}
}
if !found {
//add probe
let mut probe = probe.clone();
probe.set_probe_id(probe_id);
{
let mut writer = self.writer.lock().unwrap();
let _ = writer.write_probe(&probe);
}
probes.push(probe);
}
}
Ok(inquest::create_schedule_probe_reply())
}
}<|fim▁end|>
|
}
|
<|file_name|>submitServiceDescription.py<|end_file_name|><|fim▁begin|>import urllib, urllib2, sys, httplib
url = "/MELA/REST_WS"
HOST_IP="109.231.126.217:8180"
#HOST_IP="localhost:8180"<|fim▁hole|> connection = httplib.HTTPConnection(HOST_IP)
description_file = open("./costTest.xml", "r")
body_content = description_file.read()
headers={
'Content-Type':'application/xml; charset=utf-8',
'Accept':'application/json, multipart/related'
}
connection.request('PUT', url+'/service', body=body_content,headers=headers,)
result = connection.getresponse()
print result.read()<|fim▁end|>
|
if __name__=='__main__':
|
<|file_name|>search_near_title.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Copyright (c) 2014, Kersten Doering <[email protected]>, Bjoern Gruening <[email protected]>
"""
#Kersten Doering 16.06.2014
#check http://xapian.org/docs/queryparser.html for syntax and functions
import xappy
searchConn = xappy.SearchConnection("xapian/xapian2015")
searchConn.reopen()
#########################
querystring1 = "scaffolds"
querystring2 = "finger"
#in the following example, "pancreatic cancer" and "Erlotinib" are not allowed to have more than 4 other words between them"
#"title" and "text" are searched with Xapian
#"pancreatic cancer" is split into two terms and connected with the other query using "NEAR"
terms = querystring1.split(' ')
querystring1 = " NEAR/3 ".join(terms)#not more than 2 words are allowed to be between "pancreatic" and "cancer"
#NEAR searches without considering the word order, while in case of ADJ the word order is fixed
title = querystring1 + " NEAR/5 " + querystring2#adjusting the limit of words between the terms changes the results
#same query can be done for the field "text" which is the PubMed abstract and both query fields can be connected with logical OR - look at search_title_or_text.py or search_not_title_or_text.py
#notice that this becomes a phrase search now for the single terms
title_q = searchConn.query_field('title', title)
<|fim▁hole|>
#save all machting documents in "results" (starting with rank 0 - check help documentation of function "search")
results = searchConn.search(title_q, 0, searchConn.get_doccount())
print "number of matches: ", results.matches_estimated
### debug: ###
#print "Rank\tPubMed-ID\tTitle (query term highlighted)"
#for index,result in enumerate(results):
# if "<b>" in results.get_hit(index).highlight('title')[0]:
# print index, "\t", result.id, "\t", results.get_hit(index).highlight('title')[0]
# else:
# print resuld.id, "does not contain a highlighted term"
## if index > 5:
## break
#HTML output:
#open HTML file
outfile = open("Xapian_query_results_NEAR.html","w")
#document header
start_string = """
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<html><head>
<meta http-equiv="content-type" content="text/html; charset=windows-1252">
<title>Xapian_query_results_NEAR</title>
</head>
<body>
<table border="1" width="100%">
<tbody><tr>
<th>Rank</th>
<th>PubMed-ID</th>
<th>Title (query term highlighted)</th>
</tr>
"""
#string for finishing HTML document
end_string = """
</tbody></table>
</body></html>
"""
#write header
outfile.write(start_string)
print "### save results in Xapian_query_results_NEAR.html ###"
#write the first 1000 PubMed-IDs and titles with term "pancreatic" or stem "pancreat"
for index,result in enumerate(results):
outfile.write("<tr><td>" + str(index) + "</td><td>" + result.id + "</td><td>" + results.get_hit(index).highlight('title')[0] +"</td></tr>")
# if index == 999:
# break
#write string for finishing HTML document
outfile.write(end_string)
#close file connection
outfile.close()
#close connection to Xapian database
#searchConn.close()<|fim▁end|>
|
print "search query: ", title_q
|
<|file_name|>introspection.py<|end_file_name|><|fim▁begin|># coding=utf-8
import logging; logger = logging.getLogger("robots.introspection")
import threading
introspection = None
# disable introspection for now
if False:
try:
import Pyro4
import Pyro4.errors
uri = "PYRONAME:robots.introspection" # uses name server
try:
introspection = Pyro4.Proxy(uri)
introspection.initiate(str(0)) # 0 is the action ID of the main process
logger.info("Connection to the introspection server established.")
except Pyro4.errors.CommunicationError:
logger.warning("Introspection server not running. No introspection.")
introspection = None
except Pyro4.errors.NamingError:
logger.warning("Introspection server not running (no name server). No introspection.")
introspection = None
<|fim▁hole|><|fim▁end|>
|
except ImportError:
pass
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os, io
from setuptools import setup
from SVNOnline.SVNOnline import __version__
here = os.path.abspath(os.path.dirname(__file__))
README = io.open(os.path.join(here, 'README.rst'), encoding='UTF-8').read()
CHANGES = io.open(os.path.join(here, 'CHANGES.rst'), encoding='UTF-8').read()
setup(name='SVNOnline',
version=__version__,
description='A svn online client.',
keywords=('svn', 'svn client', 'svn online'),
long_description=README + '\n\n\n' + CHANGES,<|fim▁hole|> 'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
author='sintrb',
author_email='[email protected]',
license='Apache',
packages=['SVNOnline'],
scripts=['SVNOnline/SVNOnline', 'SVNOnline/SVNOnline.bat'],
include_package_data=True,
install_requires=['svn==0.3.36'],
zip_safe=False)<|fim▁end|>
|
url='https://github.com/sintrb/SVNOnline',
classifiers=[
'Intended Audience :: Developers',
|
<|file_name|>check_new_bsd_license.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
bsd = '''
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Willow Garage, Inc. nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
skip_check_tag = "Willow Garage BSD License not applicable"
nerrors = 0
import os
autofix = False
if "ECTO_LICENSE_AUTOFIX" in os.environ:
autofix = True
files = sys.argv[1:]
commentchars = { '.cpp' : '//',
'.hpp' : '//',
'.py' : '#',
'.cmake' : '#',
'.txt' : '#'
}
for filename in files:
txt = open(filename).read()
thiserror = False
result = filename + "..."
if skip_check_tag in txt:
result += "ok"
else:
for l in bsd.split('\n'):
if l not in txt:
result += "missing: " + l + "\n"
thiserror = True
if thiserror:
nerrors += 1
else:
result += "ok"
if thiserror and autofix:
newf = open(filename, "w")
for k, v in commentchars.iteritems():
if filename.endswith(k):
cmt = v
if txt.startswith('#!'):
hashbang, rest = txt.split('\n', 1)
print >>newf, hashbang
else:
rest = txt
print >>newf, cmt, bsd.replace('\n', '\n' + cmt + ' ')
print >>newf, rest
newf.close()<|fim▁hole|>sys.exit(nerrors)<|fim▁end|>
|
result += filename + "AUTOFIXED"
print result
|
<|file_name|>PositionBoTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2004-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kpme.pm.position;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kuali.kpme.core.api.groupkey.HrGroupKey;
import org.kuali.kpme.core.groupkey.HrGroupKeyBoTest;
import org.kuali.kpme.core.kfs.coa.businessobject.Account;
import org.kuali.kpme.pm.api.classification.qual.ClassificationQualification;
import org.kuali.kpme.pm.api.position.Position;
import org.kuali.kpme.pm.api.position.PositionDuty;
import org.kuali.kpme.pm.api.position.PositionQualification;
import org.kuali.kpme.pm.api.position.PstnFlag;
import org.kuali.kpme.pm.api.position.funding.PositionFunding;
import org.kuali.kpme.pm.api.positiondepartment.PositionDepartment;
import org.kuali.kpme.pm.api.positionresponsibility.PositionResponsibility;
import org.kuali.kpme.pm.classification.ClassificationBo;
import org.kuali.kpme.pm.classification.qual.ClassificationQualificationBoTest;
import org.kuali.kpme.pm.position.funding.PositionFundingBoTest;
import org.kuali.kpme.pm.positionresponsibility.PositionResponsibilityBoTest;
import org.kuali.rice.krad.service.BusinessObjectService;
public class PositionBoTest {
private static Map<String, Position> testPositionBos;
public static Position.Builder builder = Position.Builder.create("1", "ISU-IA");
private BusinessObjectService mockBusinessObjectService;
static {
testPositionBos = new HashMap<String, Position>();
builder.setActive(true);
builder.setBenefitsEligible("N");
builder.setCategory("category");
builder.setClassificationTitle("classTitle");
builder.setContract("contract");
builder.setContractType("contractType");
builder.setCreateTime(DateTime.now());
builder.setDescription("desc");
builder.setGroupKeyCode("ISU-IA");
builder.setGroupKey(HrGroupKey.Builder.create(HrGroupKeyBoTest.getTestHrGroupKey("ISU-IA")));
builder.setHrPositionId("KPME_TEST_00001");
builder.setId(builder.getHrPositionId());
builder.setLeaveEligible("leaveEligible");
builder.setMaxPoolHeadCount(0);
builder.setObjectId("0804716a-cbb7-11e3-9cd3-51a754ad6a0a");
builder.setPayGrade("XH");
builder.setPayStep("YY");
builder.setPositionNumber("1");
builder.setPositionClass("positionClass");
builder.setAppointmentType("appt");
builder.setReportsToWorkingTitle("rptToWorkTitle");
builder.setPrimaryDepartment("dept1");
builder.setProcess("process");
builder.setPmPositionClassId("KPME_TEST_2000");
List<PositionResponsibility.Builder> positionResponsilityList = new ArrayList<PositionResponsibility.Builder>();
PositionResponsibility.Builder responsibilityBuilder = PositionResponsibility.Builder.create(PositionResponsibilityBoTest.getPositionResponsibility("TST-PSTNRESPOPT"));
responsibilityBuilder.setHrPositionId(builder.getHrPositionId());
positionResponsilityList.add(responsibilityBuilder);
builder.setPositionResponsibilityList(positionResponsilityList);
List<PositionDepartment.Builder> positionDeptList = new ArrayList<PositionDepartment.Builder>();
PositionDepartment.Builder deptBuilder = PositionDepartment.Builder.create(PositionDataBoTest.getPositionDepartment("TST-PSTNDEPT"));
deptBuilder.setHrPositionId(builder.getHrPositionId());
positionDeptList.add(deptBuilder);
builder.setDepartmentList(positionDeptList);
List<PstnFlag.Builder> pstnFlagList = new ArrayList<PstnFlag.Builder>();
PstnFlag.Builder flagBuilder = PstnFlag.Builder.create(PstnFlagBoTest.getPstnFlag("TST-PSTNFLAG"));
flagBuilder.setHrPositionId(builder.getHrPositionId());
pstnFlagList.add(flagBuilder);
builder.setFlagList(pstnFlagList);
List<PositionDuty.Builder> positionDutyList = new ArrayList<PositionDuty.Builder>();
PositionDuty.Builder dutyBuilder = PositionDuty.Builder.create(PositionDutyBoTest.getPositionDutyBo("TST-PSTNDUTY"));
dutyBuilder.setHrPositionId(builder.getHrPositionId());
positionDutyList.add(dutyBuilder);
builder.setDutyList(positionDutyList);
List<PositionFunding.Builder> positionFundingList = new ArrayList<PositionFunding.Builder>();
PositionFunding.Builder fundingBuilder = PositionFunding.Builder.create(PositionFundingBoTest.getPositionFunding("9999"));
fundingBuilder.setHrPositionId(builder.getHrPositionId());
fundingBuilder.setAccount("KPME_TEST_ACCOUNT");
positionFundingList.add(fundingBuilder);
builder.setFundingList(positionFundingList);
List<PositionQualification.Builder> positionQualificationList = new ArrayList<PositionQualification.Builder>();
PositionQualification.Builder qualificationBuilder = PositionQualification.Builder.create(PositionQualificationBoTest.getPositionQualificationBo("TST-PSTNQLFCTN"));
qualificationBuilder.setHrPositionId(builder.getHrPositionId());
positionQualificationList.add(qualificationBuilder);
builder.setQualificationList(positionQualificationList);
List<ClassificationQualification.Builder> classificationQualificationList = new ArrayList<ClassificationQualification.Builder>();
ClassificationQualification.Builder classQualBuilder = ClassificationQualification.Builder.create(ClassificationQualificationBoTest.getClassificationQualificationBo("TST-CLASSFCTNQLFCTN"));
classQualBuilder.setPmPositionClassId("KPME_TEST_2000");
classificationQualificationList.add(classQualBuilder);
builder.setRequiredQualList(classificationQualificationList);
testPositionBos.put("TST-PSTN", builder.build());
}
@Test
public void testNotEqualsWithGroup() {
Position immutable = PositionBoTest.getPosition("TST-PSTN");
PositionBo bo = PositionBo.from(immutable);
Assert.assertFalse(bo.equals(immutable));
Assert.assertFalse(immutable.equals(bo));
// this is simply to prevent invocations of refresh reference
ClassificationBo classificationBo = new ClassificationBo();
bo.getRequiredQualList().get(0).setOwner(classificationBo);
//bo.getFundingList().get(0).setBusinessObjectService(mockBusinessObjectService);
Position im2 = PositionBo.to(bo);
PositionBo bo2 = PositionBo.from(im2);<|fim▁hole|> //bo2.getFundingList().get(0).setBusinessObjectService(mockBusinessObjectService);
Position im3 = PositionBo.to(bo2);
Assert.assertEquals(im2, im3);
}
public static Position getPosition(String Position) {
Position position = testPositionBos.get(Position);
return position;
}
@Before
public void setup() throws Exception {
Account account = new Account();
account.setAccountNumber("KPME_TEST_ACCOUNT");
account.setChartOfAccountsCode("MC");
account.setActive(true);
Map<String, String> fields = new HashMap<String, String>();
fields.put("accountNumber", "KPME_TEST_ACCOUNT");
fields.put("active", "true");
mockBusinessObjectService = mock(BusinessObjectService.class);
{
when(mockBusinessObjectService.findByPrimaryKey(Account.class, fields)).thenReturn(account);
}
}
}<|fim▁end|>
|
// this is simply to prevent invocations of refresh reference
bo2.getRequiredQualList().get(0).setOwner(classificationBo);
|
<|file_name|>rcu.rs<|end_file_name|><|fim▁begin|>/*******************************************************************************
*
* kit/kernel/sync/rcu.rs
*
* vim:ft=rust:ts=4:sw=4:et:tw=80
*
* Copyright (C) 2015-2021, Devyn Cairns
* Redistribution of this file is permitted under the terms of the simplified
* BSD license. See LICENSE for more information.
*
******************************************************************************/
use alloc::sync::Arc;
use core::sync::atomic::{
AtomicPtr,
Ordering::*,
fence,
};
use core::fmt;
use core::mem::ManuallyDrop;
/// Read-copy-update.
pub struct Rcu<T> {
state: AtomicPtr<T>,
}
impl<T> Rcu<T> {
pub fn new(value: Arc<T>) -> Rcu<T> {
let rcu = Rcu { state: AtomicPtr::new(Arc::into_raw(value) as *mut T) };
fence(Release);
rcu
}
pub fn read(&self) -> Arc<T> {
unsafe {
let ptr = self.state.load(Acquire);
let stored_arc = ManuallyDrop::new(Arc::from_raw(ptr));
(*stored_arc).clone()
}
}
/// Write a new value without verifying the existing value.
pub fn put(&self, value: Arc<T>) {
unsafe {
let old = self.state.swap(Arc::into_raw(value) as *mut T, AcqRel);
let old_arc = Arc::from_raw(old);
drop(old_arc);
}
}
pub fn update(
&self,
original: &Arc<T>,
value: Arc<T>,
) -> Result<(), Arc<T>> {
unsafe {
let raw_ptr = Arc::into_raw(value);
let original_ptr = Arc::as_ptr(original);
self.state
.compare_exchange(<|fim▁hole|> original_ptr as *mut T,
raw_ptr as *mut T,
AcqRel,
Relaxed,
)
.map(|arc| drop(Arc::from_raw(arc)))
// Give it back on error.
.map_err(|_| Arc::from_raw(raw_ptr))
}
}
pub fn update_with<F>(&self, mut mapper: F) -> Option<Arc<T>>
where
F: FnMut(&Arc<T>) -> Option<Arc<T>>,
{
loop {
let original = self.read();
if let Some(new) = mapper(&original) {
if self.update(&original, new.clone()).is_ok() {
return Some(new);
} else {
continue;
}
} else {
return None;
}
}
}
}
impl<T> From<Arc<T>> for Rcu<T> {
fn from(arc: Arc<T>) -> Rcu<T> {
Rcu::new(arc)
}
}
impl<T> From<T> for Rcu<T> {
fn from(value: T) -> Rcu<T> {
Rcu::new(Arc::new(value))
}
}
impl<T: fmt::Debug> fmt::Debug for Rcu<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let value = self.read();
write!(f, "Rcu({0:p} = {0:?})", value)
}
}
impl<T> fmt::Pointer for Rcu<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let value = self.read();
write!(f, "{:p}", value)
}
}
#[test]
fn rcu_new_read_update() {
let rcu = Rcu::new((5usize, 6usize).into());
assert_eq!(Arc::strong_count(&rcu.read()), 2);
assert_eq!(Arc::strong_count(&rcu.read()), 2);
assert_eq!(*rcu.read(), (5, 6));
let original = rcu.read();
let new = (7, 8).into();
assert!(rcu.update(&original, new).is_ok());
assert_eq!(Arc::strong_count(&original), 1);
assert_eq!(Arc::strong_count(&rcu.read()), 2);
drop(original);
assert_eq!(*rcu.read(), (7, 8));
}<|fim▁end|>
| |
<|file_name|>step7_quote.rs<|end_file_name|><|fim▁begin|>#![feature(exit_status)]
extern crate mal;
use std::collections::HashMap;
use std::env as stdenv;
use mal::types::{MalVal, MalRet, MalError, err_str};
use mal::types::{symbol, _nil, string, list, vector, hash_map, malfunc};
use mal::types::MalError::{ErrString, ErrMalVal};
use mal::types::MalType::{Nil, False, Sym, List, Vector, Hash_Map, Func, MalFunc};
use mal::{readline, reader, core};
use mal::env::{env_set, env_get, env_new, env_bind, env_root, Env};
// read
fn read(str: String) -> MalRet {
reader::read_str(str)
}
// eval
fn is_pair(x: MalVal) -> bool {
match *x {
List(ref lst,_) | Vector(ref lst,_) => lst.len() > 0,
_ => false,
}
}
fn quasiquote(ast: MalVal) -> MalVal {
if !is_pair(ast.clone()) {
return list(vec![symbol("quote"), ast])
}
match *ast.clone() {
List(ref args,_) | Vector(ref args,_) => {
let ref a0 = args[0];
match **a0 {
Sym(ref s) if *s == "unquote" => return args[1].clone(),
_ => (),
}
if is_pair(a0.clone()) {
match **a0 {
List(ref a0args,_) | Vector(ref a0args,_) => {
match *a0args[0] {
Sym(ref s) if *s == "splice-unquote" => {
return list(vec![symbol("concat"),
a0args[1].clone(),
quasiquote(list(args[1..].to_vec()))])
},
_ => (),
}
},
_ => (),
}
}
let rest = list(args[1..].to_vec());
return list(vec![symbol("cons"),
quasiquote(a0.clone()),
quasiquote(rest)])
},
_ => _nil(), // should never reach
}
}
fn eval_ast(ast: MalVal, env: Env) -> MalRet {
match *ast {
Sym(_) => env_get(&env, &ast),
List(ref a,_) | Vector(ref a,_) => {
let mut ast_vec : Vec<MalVal> = vec![];
for mv in a.iter() {
let mv2 = mv.clone();
ast_vec.push(try!(eval(mv2, env.clone())));
}
Ok(match *ast { List(_,_) => list(ast_vec),
_ => vector(ast_vec) })
}
Hash_Map(ref hm,_) => {
let mut new_hm: HashMap<String,MalVal> = HashMap::new();
for (key, value) in hm.iter() {
new_hm.insert(key.to_string(),
try!(eval(value.clone(), env.clone())));
}
Ok(hash_map(new_hm))
}
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
'tco: loop {
//println!("eval: {}, {}", ast, env.borrow());
//println!("eval: {}", ast);
match *ast {
List(_,_) => (), // continue
_ => return eval_ast(ast, env),
}
// apply list
match *ast {
List(_,_) => (), // continue
_ => return Ok(ast),
}
let tmp = ast;
let (args, a0sym) = match *tmp {
List(ref args,_) => {
if args.len() == 0 {
return Ok(tmp.clone());
}
let ref a0 = *args[0];
match *a0 {
Sym(ref a0sym) => (args, &a0sym[..]),
_ => (args, "__<fn*>__"),
}
},
_ => return err_str("Expected list"),
};
match a0sym {
"def!" => {
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
let r = try!(eval(a2, env.clone()));
match *a1 {
Sym(_) => {
env_set(&env.clone(), a1, r.clone());
return Ok(r);
},
_ => return err_str("def! of non-symbol"),
}
},
"let*" => {
let let_env = env_new(Some(env.clone()));
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
match *a1 {
List(ref binds,_) | Vector(ref binds,_) => {
let mut it = binds.iter();
while it.len() >= 2 {
let b = it.next().unwrap();
let exp = it.next().unwrap();
match **b {
Sym(_) => {
let r = try!(eval(exp.clone(), let_env.clone()));
env_set(&let_env, b.clone(), r);
},
_ => return err_str("let* with non-symbol binding"),
}
}
},
_ => return err_str("let* with non-list bindings"),
}
ast = a2;
env = let_env.clone();
continue 'tco;
},
"quote" => return Ok((*args)[1].clone()),
"quasiquote" => {
let a1 = (*args)[1].clone();
ast = quasiquote(a1);
continue 'tco;
},
"do" => {
let el = list(args[1..args.len()-1].to_vec());
try!(eval_ast(el, env.clone()));
ast = args[args.len() - 1].clone();
continue 'tco;
},
"if" => {
let a1 = (*args)[1].clone();
let c = try!(eval(a1, env.clone()));
match *c {
False | Nil => {
if args.len() >= 4 {
ast = args[3].clone();
continue 'tco;
} else {
return Ok(_nil());
}
},
_ => {
ast = args[2].clone();
continue 'tco;
},
}
},
"fn*" => {
let a1 = args[1].clone();
let a2 = args[2].clone();
return Ok(malfunc(eval, a2, env, a1, _nil()));
},
"eval" => {
let a1 = (*args)[1].clone();
ast = try!(eval(a1, env.clone()));
env = env_root(&env);
continue 'tco;
},
_ => { // function call
let el = try!(eval_ast(tmp.clone(), env.clone()));
let args = match *el {
List(ref args,_) => args,
_ => return err_str("Invalid apply"),
};
return match *args.clone()[0] {
Func(f,_) => f(args[1..].to_vec()),
MalFunc(ref mf,_) => {
let mfc = mf.clone();
let alst = list(args[1..].to_vec());
let new_env = env_new(Some(mfc.env.clone()));
match env_bind(&new_env, mfc.params, alst) {
Ok(_) => {
ast = mfc.exp;
env = new_env;
continue 'tco;
},
Err(e) => err_str(&e),
}
},
_ => err_str("attempt to call non-function"),
}
},
}
}
}
// print
fn print(exp: MalVal) -> String {
exp.pr_str(true)
}
fn rep(str: &str, env: Env) -> Result<String,MalError> {<|fim▁hole|> let ast = try!(read(str.to_string()));
//println!("read: {}", ast);
let exp = try!(eval(ast, env));
Ok(print(exp))
}
fn main() {
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns().into_iter() {
env_set(&repl_env, symbol(&k), v);
}
// see eval() for definition of "eval"
env_set(&repl_env, symbol("*ARGV*"), list(vec![]));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", repl_env.clone());
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", repl_env.clone());
// Invoked with command line arguments
let args = stdenv::args();
if args.len() > 1 {
let mv_args = args.skip(2)
.map(|a| string(a))
.collect::<Vec<MalVal>>();
env_set(&repl_env, symbol("*ARGV*"), list(mv_args));
let lf = format!("(load-file \"{}\")",
stdenv::args().skip(1).next().unwrap());
return match rep(&lf, repl_env.clone()) {
Ok(_) => stdenv::set_exit_status(0),
Err(str) => {
println!("Error: {:?}", str);
stdenv::set_exit_status(1);
}
};
}
// repl loop
loop {
let line = readline::mal_readline("user> ");
match line { None => break, _ => () }
match rep(&line.unwrap(), repl_env.clone()) {
Ok(str) => println!("{}", str),
Err(ErrMalVal(_)) => (), // Blank line
Err(ErrString(s)) => println!("Error: {}", s),
}
}
}<|fim▁end|>
| |
<|file_name|>segmentation.py<|end_file_name|><|fim▁begin|>import os
import re
import string
from itertools import chain
from .detector_morse import Detector
from .detector_morse import slurp
# from .penn_treebank_tokenizer import word_tokenize
import nlup
from pug.nlp.constant import DATA_PATH
from pug.nlp.util import generate_files
# regex namespace only conflicts with regex kwarg in Tokenizer constructur
from pug.nlp.regex import CRE_TOKEN, RE_NONWORD
def list_ngrams(token_list, n=1, join=' '):
"""Return a list of n-tuples, one for each possible sequence of n items in the token_list
Arguments:
join (bool or str): if str, then join ngrom tuples on it before returning
True is equivalent to join=' '
default = True
See: http://stackoverflow.com/a/30609050/623735
>>> list_ngrams('goodbye cruel world'.split(), join=False)
[('goodbye',), ('cruel',), ('world',)]
>>> list_ngrams('goodbye cruel world'.split(), 2, join=False)
[('goodbye', 'cruel'), ('cruel', 'world')]
"""
join = ' ' if join is True else join
if isinstance(join, basestring):
return [join.join(ng) for ng in list_ngrams(token_list, n=n, join=False)]
return zip(*[token_list[i:] for i in range(n)])
def list_ngram_range(token_list, *args, **kwargs):
"""Return a list of n-tuples, one for each possible sequence of n items in the token_list
Arguments:
join (bool or str): if str, then join ngrom tuples on it before returning
True is equivalent to join=' '
default = True
>>> list_ngram_range('goodbye cruel world'.split(), 0, 2, join=False)
[('goodbye',), ('cruel',), ('world',), ('goodbye', 'cruel'), ('cruel', 'world')]
>>> list_ngram_range('goodbye cruel world'.split(), 2, join=False)<|fim▁hole|> >>> list_ngram_range('goodbye cruel world'.split(), 0, 2, join=True)
['goodbye', 'cruel', 'world', 'goodbye cruel', 'cruel world']
"""
m, n = (args if len(args) > 1 else ((0, args[0]) if args else (0, 1)))
join = args[2] if len(args) > 2 else kwargs.pop('join', True)
return list(chain(*(list_ngrams(token_list, i + 1, join=join) for i in range(0, n))))
def generate_sentences(text='', train_path=None, case_sensitive=True, epochs=20, classifier=nlup.BinaryAveragedPerceptron, **kwargs):
"""Generate sentences from a sequence of characters (text)
Thin wrapper for Kyle Gorman's "DetectorMorse" module
Arguments:
case_sensitive (int): whether to consider case to make decisions about sentence boundaries
epochs (int): number of epochs (iterations for classifier training)
"""
if train_path:
generate_sentences.detector = Detector(slurp(train_path), epochs=epochs, nocase=not case_sensitive)
# generate_sentences.detector = SentenceDetector(text=text, nocase=not case_sensitive, epochs=epochs, classifier=classifier)
return iter(generate_sentences.detector.segments(text))
generate_sentences.detector = nlup.decorators.IO(Detector.load)(os.path.join(DATA_PATH, 'wsj_detector_morse_model.json.gz'))
def str_strip(s, strip_chars=string.punctuation + ' \t\n\r'):
return s.strip(strip_chars)
def str_lower(s):
return s.lower()
def to_ascii(s, filler='-'):
if not s:
return ''
if not isinstance(s, basestring): # e.g. np.nan
return to_ascii(repr(s))
try:
return s.encode('utf8')
except:
return ''.join(c if c < chr(128) else filler for c in s if c)
stringify = to_ascii
def passthrough(s):
return s
class Tokenizer(object):
"""Callable and iterable class that yields substrings split on spaces or other configurable delimitters.
For both __init__ and __call__, doc is the first arg.
TODO: All args and functionality of __init__() and __call__() should be the same.
FIXME: Implement the `nltk.tokenize.TokenizerI` interface
Is it at all pythonic to make a class callable and iterable?
Is it pythonic to have to instantiate a TokenizerI instance and then call that instance's `tokenize` method?
>>> abc = (chr(ord('a') + (i % 26)) for i in xrange(1000))
>>> tokenize = Tokenizer(ngrams=5)
>>> ans = list(tokenize(' '.join(abc)))
>>> ans[:7]
['a', 'b', 'c', 'd', 'e', 'f', 'g']
>>> ans[1000:1005]
['a b', 'b c', 'c d', 'd e', 'e f']
>>> ans[1999:2004]
['a b c', 'b c d', 'c d e', 'd e f', 'e f g']
>>> tokenize = Tokenizer(stem='Porter')
>>> doc = "Here're some stemmable words provided to you for your stemming pleasure."
>>> sorted(set(tokenize(doc)) - set(Tokenizer(doc, stem='Lancaster')))
[u"Here'r", u'pleasur', u'some', u'stemmabl', u'your']
>>> sorted(set(Tokenizer(doc, stem='WordNet')) - set(Tokenizer(doc, stem='Lancaster')))
["Here're", 'pleasure', 'provided', 'some', 'stemmable', 'stemming', 'your']
"""
def __init__(self, doc=None, regex=CRE_TOKEN, strip=True, nonwords=False, nonwords_set=None, nonwords_regex=RE_NONWORD,
lower=None, stem=None, ngrams=1):
# specific set of characters to strip
self.strip_chars = None
if isinstance(strip, basestring):
self.strip_chars = strip
# strip_chars takes care of the stripping config, so no need for strip function anymore
self.strip = None
elif strip is True:
self.strip_chars = '-_*`()"' + '"'
strip = strip or None
# strip whitespace, overrides strip() method
self.strip = strip if callable(strip) else (str_strip if strip else None)
self.doc = to_ascii(doc)
self.regex = regex
if isinstance(self.regex, basestring):
self.regex = re.compile(self.regex)
self.nonwords = nonwords # whether to use the default REGEX for nonwords
self.nonwords_set = nonwords_set or set()
self.nonwords_regex = nonwords_regex
self.lower = lower if callable(lower) else (str_lower if lower else None)
self.stemmer_name, self.stem = 'passthrough', passthrough # stem can be a callable Stemmer instance or just a function
self.ngrams = ngrams or 1 # ngram degree, numger of ngrams per token
if isinstance(self.nonwords_regex, basestring):
self.nonwords_regex = re.compile(self.nonwords_regex)
elif self.nonwords:
try:
self.nonwords_set = set(self.nonwords)
except TypeError:
self.nonwords_set = set(['None', 'none', 'and', 'but'])
# if a set of nonwords has been provided dont use the internal nonwords REGEX?
self.nonwords = not bool(self.nonwords)
def __call__(self, doc):
"""Lazily tokenize a new document (tokens aren't generated until the class instance is iterated)
>>> list(Tokenizer()('new string to parse'))
['new', 'string', 'to', 'parse']
"""
# tokenization doesn't happen until you try to iterate through the Tokenizer instance or class
self.doc = to_ascii(doc)
# need to return self so that this will work: Tokenizer()('doc (str) to parse even though default doc is None')
return self
# to conform to this part of the nltk.tokenize.TokenizerI interface
tokenize = __call__
def __reduce__(self):
"""Unpickling constructor and args so that pickling can be done efficiently without any bound methods, etc"""
return (Tokenizer, (None, self.regex, self.strip, self.nonwords, self.nonwords_set, self.nonwords_regex,
self.lower, self.stemmer_name, self.ngrams))
def span_tokenize(self, s):
"""Identify the tokens using integer offsets `(start_i, end_i)` rather than copying them to a new sequence
The sequence of tokens (strings) can be generated with
`s[start_i:end_i] for start_i, end_i in span_tokenize(s)`
Returns:
generator of 2-tuples of ints, like ((int, int) for token in s)
"""
return
# raise NotImplementedError("span_tokenizer interface not yet implemented, so just suck it up and use RAM to tokenize() ;)")
def tokenize_sents(self, strings):
"""NTLK.
Apply ``self.tokenize()`` to each element of ``strings``. I.e.:
return [self.tokenize(s) for s in strings]
:rtype: list(list(str))
"""
return [self.tokenize(s) for s in strings]
def span_tokenize_sents(self, strings):
"""
Apply ``self.span_tokenize()`` to each element of ``strings``. I.e.:
return iter((self.span_tokenize(s) for s in strings))
:rtype: iter(list(tuple(int, int)))
"""
for s in strings:
yield list(self.span_tokenize(s))
def __iter__(self, ngrams=None):
r"""Generate a sequence of words or tokens, using a re.match iteratively through the str
TODO:
- need two different self.lower and lemmatize transforms, 1 before and 1 after nonword detection
- each of 3 nonword filters on a separate line, setting w=None when nonword "hits"
- refactor `nonwords` arg/attr to `ignore_stopwords` to be more explicit
>>> doc = "John D. Rock\n\nObjective: \n\tSeeking a position as Software --Architect-- / _Project Lead_ that can utilize my expertise and"
>>> doc += " experiences in business application development and proven records in delivering 90's software. "
>>> doc += "\n\nSummary: \n\tSoftware Architect"
>>> doc += " who has gone through several full product-delivery life cycles from requirements gathering to deployment / production, and"
>>> doc += " skilled in all areas of software development from client-side JavaScript to database modeling. With strong experiences in:"
>>> doc += " \n\tRequirements gathering and analysis."
The python splitter will produce 2 tokens that are only punctuation ("/")
>>> len([s for s in doc.split() if s])
72
The built-in nonword REGEX ignores all-punctuation words, so there are 2 less here:
>>> len(list(Tokenizer(doc, strip=False, nonwords=False)))
70
In addition, punctuation at the end of tokens is stripped so "D. Rock" doesn't tokenize to "D." but rather "D"
>>> run_together_tokens = ''.join(list(Tokenizer(doc, strip=False, nonwords=False)))
>>> '/' in run_together_tokens or ':' in ''.join(run_together_tokens)
False
But you can turn off stripping when instantiating the object.
>>> all(t in Tokenizer(doc, strip=False, nonwords=True) for t in ('D', '_Project', 'Lead_', "90's", "product-delivery"))
True
"""
ngrams = ngrams or self.ngrams
# FIXME: Improve memory efficiency by making this ngram tokenizer an actual generator
if ngrams > 1:
original_tokens = list(self.__iter__(ngrams=1))
for tok in original_tokens:
yield tok
for i in range(2, ngrams + 1):
for tok in list_ngrams(original_tokens, n=i, join=' '):
yield tok
else:
for w in self.regex.finditer(self.doc):
if w:
w = w.group()
w = w if not self.strip_chars else str_strip(w, self.strip_chars)
w = w if not self.strip else self.strip(w)
w = w if not self.stem else self.stem(w)
w = w if not self.lemmatize else self.lemmatize(w)
w = w if not self.lower else self.lower(w)
# FIXME: nonword check before and after preprossing? (lower, lemmatize, strip, stem)
# 1. check if the default nonwords REGEX filter is requested, if so, use it.
# 2. check if a customized nonwords REGES filter is provided, if so, use it.
# 3. make sure the word isn't in the provided (or empty) set of nonwords
if w and (not self.nonwords or not re.match(r'^' + RE_NONWORD + '$', w)) and (
not self.nonwords_regex or not self.nonwords_regex.match(w)) and (
w not in self.nonwords_set):
yield w
# can these all just be left to default assignments in __init__ or as class methods assigned to global `passthrough()`
def strip(self, s):
"""Strip punctuation surrounding a token"""
return s
def stem(self, s):
"""Find the lexial root of a word, e.g. convert 'running' to 'run'"""
return s
def lemmatize(self, s):
"""Find the semantic root of a word, e.g. convert 'was' to 'be'"""
return s
def __getstate__(self):
return self.__dict__
def __setstate__(self, d):
self.__dict__.update(d)
class PassageIter(object):
"""Passage (document, sentence, line, phrase) generator for files at indicated path
Walks all the text files it finds in the indicated path,
segmenting sentences and yielding them one at a time
References:
Radim's [word2vec tutorial](http://radimrehurek.com/2014/02/word2vec-tutorial/)
"""
def __init__(self, path='', ext='', level=None, dirs=False, files=True,
sentence_segmenter=generate_sentences, word_segmenter=string.split, verbosity=0):
self.file_generator = generate_files(path=path, ext='', level=None, dirs=False, files=True, verbosity=0)
def __iter__(self):
for fname in os.listdir(self.file_generator):
for line in open(os.path.join(self.dirname, fname)):
yield line.split()<|fim▁end|>
|
[('goodbye',), ('cruel',), ('world',), ('goodbye', 'cruel'), ('cruel', 'world')]
>>> list_ngram_range('goodbye cruel world'.split(), 0, 2, join='|')
['goodbye', 'cruel', 'world', 'goodbye|cruel', 'cruel|world']
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from flask import render_template, flash, redirect, url_for, request
from flask_login import login_required, current_user
from ..models import User, Sensors, Sensor_data
from . import chart
from pytz import timezone
# import pygal
from pyecharts import Line
import os
from ..main.forms import SelectMultipleSensorForm
tzchina = timezone('Asia/Shanghai')
utc = timezone('UTC')
@chart.route('/v1.0')
@login_required
def chart1():
username = current_user.username
return redirect(url_for('main.sensors', username=username))
@chart.route('/v2.0', methods=['GET', 'POST'])
@login_required
def chart3():
if Sensors.query.filter_by(author_id=current_user.id).first():
sensors = Sensors.query.filter_by(author_id=current_user.id).order_by(Sensors.id.desc()).all()
form = SelectMultipleSensorForm(sensors, prefix="sensorform")
valid = 0
<|fim▁hole|> options = form.sensor.data
line = Line(width=800, height=400)
for sensor in options:
sensor_data = Sensor_data.query.filter_by(sensor_id=sensor).order_by(-Sensor_data.id.desc()).all()
timestamp = []
data = []
for i in sensor_data:
timestamp.append(i.timestamp.replace(tzinfo=utc).astimezone(tzchina).strftime('%Y/%m/%d-%H:%M:%S'))
data.append(i.value)
if len(data) is 0:
no_sensor = 0
return render_template('no_sensor_dat.html', no_sensor=no_sensor)
else:
s = Sensors.query.filter_by(id=sensor).first()
title = s.name
attr = timestamp
d = data
line.add(title, attr, d, is_smooth=False, is_datazoom_show=True, mark_line=["average"],
mark_point=["min", "max"])
valid = 1
return render_template('sensor_chart.html', form=form, chart=line.render_embed(), valid=valid)
else:
valid = 0
return render_template('sensor_chart.html', form=form, valid=valid)
else:
no_sensor = 1
return render_template('no_sensor_dat.html', no_sensor=no_sensor)
#if request.method == 'POST':
#options = request.form.getlist('myform')
#as_dict = request.form.to_dict()
#print(request)
#print(options)<|fim▁end|>
|
if form.validate_on_submit():
|
<|file_name|>memory.go<|end_file_name|><|fim▁begin|>package store
import (
"errors"
"fmt"
"runtime"
"strconv"
"strings"
"sync"
"time"
"github.com/docker/go-events"
"github.com/docker/swarmkit/api"
pb "github.com/docker/swarmkit/api"
"github.com/docker/swarmkit/manager/state"
"github.com/docker/swarmkit/manager/state/watch"
"github.com/docker/swarmkit/protobuf/ptypes"
memdb "github.com/hashicorp/go-memdb"
"golang.org/x/net/context"
)
const (
indexID = "id"
indexName = "name"
indexServiceID = "serviceid"
indexNodeID = "nodeid"
indexSlot = "slot"
indexCN = "cn"
indexDesiredState = "desiredstate"
indexRole = "role"
indexMembership = "membership"
prefix = "_prefix"
// MaxChangesPerTransaction is the number of changes after which a new
// transaction should be started within Batch.
MaxChangesPerTransaction = 200
// MaxTransactionBytes is the maximum serialized transaction size.
MaxTransactionBytes = 1.5 * 1024 * 1024
)
var (
// ErrExist is returned by create operations if the provided ID is already
// taken.
ErrExist = errors.New("object already exists")
// ErrNotExist is returned by altering operations (update, delete) if the
// provided ID is not found.
ErrNotExist = errors.New("object does not exist")
// ErrNameConflict is returned by create/update if the object name is
// already in use by another object.
ErrNameConflict = errors.New("name conflicts with an existing object")
// ErrInvalidFindBy is returned if an unrecognized type is passed to Find.
ErrInvalidFindBy = errors.New("invalid find argument type")
// ErrSequenceConflict is returned when trying to update an object
// whose sequence information does not match the object in the store's.
ErrSequenceConflict = errors.New("update out of sequence")
objectStorers []ObjectStoreConfig
schema = &memdb.DBSchema{
Tables: map[string]*memdb.TableSchema{},
}
errUnknownStoreAction = errors.New("unknown store action")
)
func register(os ObjectStoreConfig) {
objectStorers = append(objectStorers, os)
schema.Tables[os.Name] = os.Table
}
// MemoryStore is a concurrency-safe, in-memory implementation of the Store
// interface.
type MemoryStore struct {
// updateLock must be held during an update transaction.
updateLock sync.Mutex
memDB *memdb.MemDB
queue *watch.Queue
proposer state.Proposer
}
// NewMemoryStore returns an in-memory store. The argument is an optional
// Proposer which will be used to propagate changes to other members in a
// cluster.
func NewMemoryStore(proposer state.Proposer) *MemoryStore {
memDB, err := memdb.NewMemDB(schema)
if err != nil {
// This shouldn't fail
panic(err)
}
return &MemoryStore{
memDB: memDB,
queue: watch.NewQueue(),
proposer: proposer,
}
}
// Close closes the memory store and frees its associated resources.
func (s *MemoryStore) Close() error {
return s.queue.Close()
}
func fromArgs(args ...interface{}) ([]byte, error) {
if len(args) != 1 {
return nil, fmt.Errorf("must provide only a single argument")
}
arg, ok := args[0].(string)
if !ok {
return nil, fmt.Errorf("argument must be a string: %#v", args[0])
}
// Add the null character as a terminator
arg += "\x00"
return []byte(arg), nil
}
func prefixFromArgs(args ...interface{}) ([]byte, error) {
val, err := fromArgs(args...)
if err != nil {
return nil, err
}
// Strip the null terminator, the rest is a prefix
n := len(val)
if n > 0 {
return val[:n-1], nil
}
return val, nil
}
// ReadTx is a read transaction. Note that transaction does not imply
// any internal batching. It only means that the transaction presents a
// consistent view of the data that cannot be affected by other
// transactions.
type ReadTx interface {
lookup(table, index, id string) Object
get(table, id string) Object
find(table string, by By, checkType func(By) error, appendResult func(Object)) error
}
type readTx struct {
memDBTx *memdb.Txn
}
// View executes a read transaction.
func (s *MemoryStore) View(cb func(ReadTx)) {
memDBTx := s.memDB.Txn(false)
readTx := readTx{
memDBTx: memDBTx,
}
cb(readTx)
memDBTx.Commit()
}
// Tx is a read/write transaction. Note that transaction does not imply
// any internal batching. The purpose of this transaction is to give the
// user a guarantee that its changes won't be visible to other transactions
// until the transaction is over.
type Tx interface {
ReadTx
create(table string, o Object) error
update(table string, o Object) error
delete(table, id string) error
}
type tx struct {
readTx
curVersion *api.Version
changelist []state.Event
}
// ApplyStoreActions updates a store based on StoreAction messages.
func (s *MemoryStore) ApplyStoreActions(actions []*api.StoreAction) error {
s.updateLock.Lock()
memDBTx := s.memDB.Txn(true)
tx := tx{
readTx: readTx{
memDBTx: memDBTx,
},
}
for _, sa := range actions {
if err := applyStoreAction(&tx, sa); err != nil {
memDBTx.Abort()
s.updateLock.Unlock()
return err
}
}
memDBTx.Commit()
for _, c := range tx.changelist {
s.queue.Publish(c)
}
if len(tx.changelist) != 0 {
s.queue.Publish(state.EventCommit{})
}
s.updateLock.Unlock()
return nil
}
func applyStoreAction(tx Tx, sa *api.StoreAction) error {
for _, os := range objectStorers {
err := os.ApplyStoreAction(tx, sa)
if err != errUnknownStoreAction {
return err
}
}
return errors.New("unrecognized action type")<|fim▁hole|>func (s *MemoryStore) update(proposer state.Proposer, cb func(Tx) error) error {
s.updateLock.Lock()
memDBTx := s.memDB.Txn(true)
var curVersion *api.Version
if proposer != nil {
curVersion = proposer.GetVersion()
}
var tx tx
tx.init(memDBTx, curVersion)
err := cb(&tx)
if err == nil {
if proposer == nil {
memDBTx.Commit()
} else {
var sa []*api.StoreAction
sa, err = tx.changelistStoreActions()
if err == nil {
if len(sa) != 0 {
err = proposer.ProposeValue(context.Background(), sa, func() {
memDBTx.Commit()
})
} else {
memDBTx.Commit()
}
}
}
}
if err == nil {
for _, c := range tx.changelist {
s.queue.Publish(c)
}
if len(tx.changelist) != 0 {
s.queue.Publish(state.EventCommit{})
}
} else {
memDBTx.Abort()
}
s.updateLock.Unlock()
return err
}
func (s *MemoryStore) updateLocal(cb func(Tx) error) error {
return s.update(nil, cb)
}
// Update executes a read/write transaction.
func (s *MemoryStore) Update(cb func(Tx) error) error {
return s.update(s.proposer, cb)
}
// Batch provides a mechanism to batch updates to a store.
type Batch struct {
tx tx
store *MemoryStore
// applied counts the times Update has run successfully
applied int
// committed is the number of times Update had run successfully as of
// the time pending changes were committed.
committed int
// transactionSizeEstimate is the running count of the size of the
// current transaction.
transactionSizeEstimate int
// changelistLen is the last known length of the transaction's
// changelist.
changelistLen int
err error
}
// Update adds a single change to a batch. Each call to Update is atomic, but
// different calls to Update may be spread across multiple transactions to
// circumvent transaction size limits.
func (batch *Batch) Update(cb func(Tx) error) error {
if batch.err != nil {
return batch.err
}
if err := cb(&batch.tx); err != nil {
return err
}
batch.applied++
for batch.changelistLen < len(batch.tx.changelist) {
sa, err := newStoreAction(batch.tx.changelist[batch.changelistLen])
if err != nil {
return err
}
batch.transactionSizeEstimate += sa.Size()
batch.changelistLen++
}
if batch.changelistLen >= MaxChangesPerTransaction || batch.transactionSizeEstimate >= (MaxTransactionBytes*3)/4 {
if err := batch.commit(); err != nil {
return err
}
// Yield the update lock
batch.store.updateLock.Unlock()
runtime.Gosched()
batch.store.updateLock.Lock()
batch.newTx()
}
return nil
}
func (batch *Batch) newTx() {
var curVersion *api.Version
if batch.store.proposer != nil {
curVersion = batch.store.proposer.GetVersion()
}
batch.tx.init(batch.store.memDB.Txn(true), curVersion)
batch.transactionSizeEstimate = 0
batch.changelistLen = 0
}
func (batch *Batch) commit() error {
if batch.store.proposer != nil {
var sa []*api.StoreAction
sa, batch.err = batch.tx.changelistStoreActions()
if batch.err == nil {
if len(sa) != 0 {
batch.err = batch.store.proposer.ProposeValue(context.Background(), sa, func() {
batch.tx.memDBTx.Commit()
})
} else {
batch.tx.memDBTx.Commit()
}
}
} else {
batch.tx.memDBTx.Commit()
}
if batch.err != nil {
batch.tx.memDBTx.Abort()
return batch.err
}
batch.committed = batch.applied
for _, c := range batch.tx.changelist {
batch.store.queue.Publish(c)
}
if len(batch.tx.changelist) != 0 {
batch.store.queue.Publish(state.EventCommit{})
}
return nil
}
// Batch performs one or more transactions that allow reads and writes
// It invokes a callback that is passed a Batch object. The callback may
// call batch.Update for each change it wants to make as part of the
// batch. The changes in the batch may be split over multiple
// transactions if necessary to keep transactions below the size limit.
// Batch holds a lock over the state, but will yield this lock every
// it creates a new transaction to allow other writers to proceed.
// Thus, unrelated changes to the state may occur between calls to
// batch.Update.
//
// This method allows the caller to iterate over a data set and apply
// changes in sequence without holding the store write lock for an
// excessive time, or producing a transaction that exceeds the maximum
// size.
//
// Batch returns the number of calls to batch.Update whose changes were
// successfully committed to the store.
func (s *MemoryStore) Batch(cb func(*Batch) error) (int, error) {
s.updateLock.Lock()
batch := Batch{
store: s,
}
batch.newTx()
if err := cb(&batch); err != nil {
batch.tx.memDBTx.Abort()
s.updateLock.Unlock()
return batch.committed, err
}
err := batch.commit()
s.updateLock.Unlock()
return batch.committed, err
}
func (tx *tx) init(memDBTx *memdb.Txn, curVersion *api.Version) {
tx.memDBTx = memDBTx
tx.curVersion = curVersion
tx.changelist = nil
}
func newStoreAction(c state.Event) (*api.StoreAction, error) {
for _, os := range objectStorers {
sa, err := os.NewStoreAction(c)
if err == nil {
return &sa, nil
} else if err != errUnknownStoreAction {
return nil, err
}
}
return nil, errors.New("unrecognized event type")
}
func (tx tx) changelistStoreActions() ([]*api.StoreAction, error) {
var actions []*api.StoreAction
for _, c := range tx.changelist {
sa, err := newStoreAction(c)
if err != nil {
return nil, err
}
actions = append(actions, sa)
}
return actions, nil
}
// lookup is an internal typed wrapper around memdb.
func (tx readTx) lookup(table, index, id string) Object {
j, err := tx.memDBTx.First(table, index, id)
if err != nil {
return nil
}
if j != nil {
return j.(Object)
}
return nil
}
// create adds a new object to the store.
// Returns ErrExist if the ID is already taken.
func (tx *tx) create(table string, o Object) error {
if tx.lookup(table, indexID, o.ID()) != nil {
return ErrExist
}
copy := o.Copy()
meta := copy.Meta()
if err := touchMeta(&meta, tx.curVersion); err != nil {
return err
}
copy.SetMeta(meta)
err := tx.memDBTx.Insert(table, copy)
if err == nil {
tx.changelist = append(tx.changelist, copy.EventCreate())
o.SetMeta(meta)
}
return err
}
// Update updates an existing object in the store.
// Returns ErrNotExist if the object doesn't exist.
func (tx *tx) update(table string, o Object) error {
oldN := tx.lookup(table, indexID, o.ID())
if oldN == nil {
return ErrNotExist
}
if tx.curVersion != nil {
if oldN.(Object).Meta().Version != o.Meta().Version {
return ErrSequenceConflict
}
}
copy := o.Copy()
meta := copy.Meta()
if err := touchMeta(&meta, tx.curVersion); err != nil {
return err
}
copy.SetMeta(meta)
err := tx.memDBTx.Insert(table, copy)
if err == nil {
tx.changelist = append(tx.changelist, copy.EventUpdate())
o.SetMeta(meta)
}
return err
}
// Delete removes an object from the store.
// Returns ErrNotExist if the object doesn't exist.
func (tx *tx) delete(table, id string) error {
n := tx.lookup(table, indexID, id)
if n == nil {
return ErrNotExist
}
err := tx.memDBTx.Delete(table, n)
if err == nil {
tx.changelist = append(tx.changelist, n.EventDelete())
}
return err
}
// Get looks up an object by ID.
// Returns nil if the object doesn't exist.
func (tx readTx) get(table, id string) Object {
o := tx.lookup(table, indexID, id)
if o == nil {
return nil
}
return o.Copy()
}
// findIterators returns a slice of iterators. The union of items from these
// iterators provides the result of the query.
func (tx readTx) findIterators(table string, by By, checkType func(By) error) ([]memdb.ResultIterator, error) {
switch by.(type) {
case byAll, orCombinator: // generic types
default: // all other types
if err := checkType(by); err != nil {
return nil, err
}
}
switch v := by.(type) {
case byAll:
it, err := tx.memDBTx.Get(table, indexID)
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case orCombinator:
var iters []memdb.ResultIterator
for _, subBy := range v.bys {
it, err := tx.findIterators(table, subBy, checkType)
if err != nil {
return nil, err
}
iters = append(iters, it...)
}
return iters, nil
case byName:
it, err := tx.memDBTx.Get(table, indexName, strings.ToLower(string(v)))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byCN:
it, err := tx.memDBTx.Get(table, indexCN, string(v))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byIDPrefix:
it, err := tx.memDBTx.Get(table, indexID+prefix, string(v))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byNamePrefix:
it, err := tx.memDBTx.Get(table, indexName+prefix, strings.ToLower(string(v)))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byNode:
it, err := tx.memDBTx.Get(table, indexNodeID, string(v))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byService:
it, err := tx.memDBTx.Get(table, indexServiceID, string(v))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case bySlot:
it, err := tx.memDBTx.Get(table, indexSlot, v.serviceID+"\x00"+strconv.FormatUint(uint64(v.slot), 10))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byDesiredState:
it, err := tx.memDBTx.Get(table, indexDesiredState, strconv.FormatInt(int64(v), 10))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byRole:
it, err := tx.memDBTx.Get(table, indexRole, strconv.FormatInt(int64(v), 10))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
case byMembership:
it, err := tx.memDBTx.Get(table, indexMembership, strconv.FormatInt(int64(v), 10))
if err != nil {
return nil, err
}
return []memdb.ResultIterator{it}, nil
default:
return nil, ErrInvalidFindBy
}
}
// find selects a set of objects calls a callback for each matching object.
func (tx readTx) find(table string, by By, checkType func(By) error, appendResult func(Object)) error {
fromResultIterators := func(its ...memdb.ResultIterator) {
ids := make(map[string]struct{})
for _, it := range its {
for {
obj := it.Next()
if obj == nil {
break
}
o := obj.(Object)
id := o.ID()
if _, exists := ids[id]; !exists {
appendResult(o.Copy())
ids[id] = struct{}{}
}
}
}
}
iters, err := tx.findIterators(table, by, checkType)
if err != nil {
return err
}
fromResultIterators(iters...)
return nil
}
// Save serializes the data in the store.
func (s *MemoryStore) Save(tx ReadTx) (*pb.StoreSnapshot, error) {
var snapshot pb.StoreSnapshot
for _, os := range objectStorers {
if err := os.Save(tx, &snapshot); err != nil {
return nil, err
}
}
return &snapshot, nil
}
// Restore sets the contents of the store to the serialized data in the
// argument.
func (s *MemoryStore) Restore(snapshot *pb.StoreSnapshot) error {
return s.updateLocal(func(tx Tx) error {
for _, os := range objectStorers {
if err := os.Restore(tx, snapshot); err != nil {
return err
}
}
return nil
})
}
// WatchQueue returns the publish/subscribe queue.
func (s *MemoryStore) WatchQueue() *watch.Queue {
return s.queue
}
// ViewAndWatch calls a callback which can observe the state of this
// MemoryStore. It also returns a channel that will return further events from
// this point so the snapshot can be kept up to date. The watch channel must be
// released with watch.StopWatch when it is no longer needed. The channel is
// guaranteed to get all events after the moment of the snapshot, and only
// those events.
func ViewAndWatch(store *MemoryStore, cb func(ReadTx) error, specifiers ...state.Event) (watch chan events.Event, cancel func(), err error) {
// Using Update to lock the store and guarantee consistency between
// the watcher and the the state seen by the callback. snapshotReadTx
// exposes this Tx as a ReadTx so the callback can't modify it.
err = store.Update(func(tx Tx) error {
if err := cb(tx); err != nil {
return err
}
watch, cancel = state.Watch(store.WatchQueue(), specifiers...)
return nil
})
if watch != nil && err != nil {
cancel()
cancel = nil
watch = nil
}
return
}
// touchMeta updates an object's timestamps when necessary and bumps the version
// if provided.
func touchMeta(meta *api.Meta, version *api.Version) error {
// Skip meta update if version is not defined as it means we're applying
// from raft or restoring from a snapshot.
if version == nil {
return nil
}
now, err := ptypes.TimestampProto(time.Now())
if err != nil {
return err
}
meta.Version = *version
// Updated CreatedAt if not defined
if meta.CreatedAt == nil {
meta.CreatedAt = now
}
meta.UpdatedAt = now
return nil
}<|fim▁end|>
|
}
|
<|file_name|>test-viewport.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {AmpDocSingle} from '../../src/service/ampdoc-impl';
import {
Viewport,
ViewportBindingDef,
ViewportBindingNatural_,
ViewportBindingNaturalIosEmbed_,
parseViewportMeta,
stringifyViewportMeta,
updateViewportMetaString,
} from '../../src/service/viewport-impl';
import {getStyle} from '../../src/style';
import {installPlatformService} from '../../src/service/platform-impl';
import {installTimerService} from '../../src/service/timer-impl';
import {installViewerService} from '../../src/service/viewer-impl';
import {loadPromise} from '../../src/event-helper';
import {setParentWindow} from '../../src/service';
import {toggleExperiment} from '../../src/experiments';
import {vsyncFor} from '../../src/vsync';
import * as sinon from 'sinon';
describe('Viewport', () => {
let sandbox;
let clock;
let viewport;
let binding;
let viewer;
let viewerMock;
let windowApi;
let ampdoc;
let viewerViewportHandler;
let updatedPaddingTop;
let viewportSize;
let vsyncTasks;
beforeEach(() => {
sandbox = sinon.sandbox.create();
clock = sandbox.useFakeTimers();
viewerViewportHandler = undefined;
viewer = {
isEmbedded: () => false,
getPaddingTop: () => 19,
onViewportEvent: handler => {
viewerViewportHandler = handler;
},
requestFullOverlay: () => {},
cancelFullOverlay: () => {},
postScroll: sandbox.spy(),
};
viewerMock = sandbox.mock(viewer);
windowApi = {
document: {
documentElement: {
style: {},
classList: {
add: function() {},
},
},
},
location: {},
navigator: window.navigator,
setTimeout: window.setTimeout,
clearTimeout: window.clearTimeout,
requestAnimationFrame: fn => window.setTimeout(fn, 16),
};
ampdoc = new AmpDocSingle(windowApi);
installTimerService(windowApi);
installPlatformService(windowApi);
installViewerService(windowApi);
binding = new ViewportBindingDef();
viewportSize = {width: 111, height: 222};
binding.getSize = () => {
return {width: viewportSize.width, height: viewportSize.height};
};
binding.getScrollTop = () => 17;
binding.getScrollLeft = () => 0;
updatedPaddingTop = undefined;
binding.updatePaddingTop = paddingTop => updatedPaddingTop = paddingTop;
viewport = new Viewport(ampdoc, binding, viewer);
viewport.fixedLayer_ = {update: () => {
return {then: callback => callback()};
}};
viewport.getSize();
// Use window since Animation by default will use window.
const vsync = vsyncFor(window);
vsyncTasks = [];
sandbox.stub(vsync, 'canAnimate').returns(true);
sandbox.stub(vsync, 'createAnimTask', (unusedContextNode, task) => {
return () => {
vsyncTasks.push(task);
};
});
});
afterEach(() => {
expect(vsyncTasks.length).to.equal(0);
viewerMock.verify();
sandbox.restore();
});
function runVsync() {
const tasks = vsyncTasks.slice(0);
vsyncTasks = [];
tasks.forEach(function(task) {
const state = {};
if (task.measure) {
task.measure(state);
}
task.mutate(state);
});
}
it('should pass through size and scroll', () => {
expect(viewport.getPaddingTop()).to.equal(19);
expect(updatedPaddingTop).to.equal(19);
expect(viewport.getSize().width).to.equal(111);
expect(viewport.getSize().height).to.equal(222);
expect(viewport.getTop()).to.equal(17);
expect(viewport.getRect().left).to.equal(0);
expect(viewport.getRect().top).to.equal(17);
expect(viewport.getRect().width).to.equal(111);
expect(viewport.getRect().height).to.equal(222);
});
it('should cache result for getRect()', () => {
assert.strictEqual(viewport.getRect(), viewport.getRect());
});
it('should invalidate getRect() cache after scroll', () => {
expect(viewport.getRect().top).to.equal(17);
// Scroll vertically.
binding.getScrollTop = () => 44;
viewport.scroll_();
expect(viewport.getRect().top).to.equal(44);
});
it('should invalidate getRect() cache after resize', () => {
expect(viewport.getRect().width).to.equal(111);
// Resize horizontally.
viewportSize.width = 112;
viewport.resize_();
expect(viewport.getRect().width).to.equal(112);
});
it('should not relayout on height resize', () => {
let changeEvent = null;
viewport.onChanged(event => {
changeEvent = event;
});
viewportSize.height = 223;
viewport.resize_();
expect(changeEvent).to.not.equal(null);
expect(changeEvent.relayoutAll).to.equal(false);
expect(changeEvent.velocity).to.equal(0);
});
it('should relayout on width resize', () => {
let changeEvent = null;
viewport.onChanged(event => {
changeEvent = event;
});
viewportSize.width = 112;
viewport.resize_();
expect(changeEvent).to.not.equal(null);
expect(changeEvent.relayoutAll).to.equal(true);
expect(changeEvent.velocity).to.equal(0);
});
it('should defer change event until fixed layer is complete', () => {
let changeEvent = null;
viewport.onChanged(event => {
changeEvent = event;
});
let fixedResolver;
const fixedPromise = new Promise(resolve => fixedResolver = resolve);
viewport.fixedLayer_ = {update: () => fixedPromise};
viewportSize.width = 112;
viewport.resize_();
expect(changeEvent).to.be.null;
fixedResolver();
return fixedPromise.then(() => {
expect(changeEvent).to.not.be.null;
});
});
it('should update padding when changed only', () => {
// Shouldn't call updatePaddingTop since it hasn't changed.
let bindingMock = sandbox.mock(binding);
viewerViewportHandler({paddingTop: 19});
bindingMock.verify();
// Should call updatePaddingTop.
bindingMock = sandbox.mock(binding);
viewport.fixedLayer_ = {updatePaddingTop: () => {}};
bindingMock.expects('updatePaddingTop').withArgs(0, true, 19).once();
viewerViewportHandler({paddingTop: 0});
bindingMock.verify();
});
it('should update padding for fixed layer', () => {
// Should call updatePaddingTop.
const bindingMock = sandbox.mock(binding);
bindingMock.expects('updatePaddingTop').withArgs(0, true, 19).once();
viewport.fixedLayer_ = {updatePaddingTop: () => {}};
const fixedLayerMock = sandbox.mock(viewport.fixedLayer_);
fixedLayerMock.expects('updatePaddingTop').withArgs(0).once();
viewerViewportHandler({paddingTop: 0});
bindingMock.verify();
fixedLayerMock.verify();
});
it('should update viewport when entering lightbox mode', () => {
viewport.vsync_ = {mutate: callback => callback()};
viewerMock.expects('requestFullOverlay').once();
const disableTouchZoomStub = sandbox.stub(viewport, 'disableTouchZoom');
const hideFixedLayerStub = sandbox.stub(viewport, 'hideFixedLayer');
const bindingMock = sandbox.mock(binding);
bindingMock.expects('updateLightboxMode').withArgs(true).once();
viewport.enterLightboxMode();
bindingMock.verify();
expect(disableTouchZoomStub.callCount).to.equal(1);
expect(hideFixedLayerStub.callCount).to.equal(1);
});
it('should update viewport when leaving lightbox mode', () => {
viewport.vsync_ = {mutate: callback => callback()};
viewerMock.expects('cancelFullOverlay').once();
const restoreOriginalTouchZoomStub = sandbox.stub(viewport,
'restoreOriginalTouchZoom');
const showFixedLayerStub = sandbox.stub(viewport, 'showFixedLayer');
const bindingMock = sandbox.mock(binding);
bindingMock.expects('updateLightboxMode').withArgs(false).once();
viewport.leaveLightboxMode();
bindingMock.verify();
expect(restoreOriginalTouchZoomStub.callCount).to.equal(1);
expect(showFixedLayerStub.callCount).to.equal(1);
});
it('should call binding.updateViewerViewport', () => {
const bindingMock = sandbox.mock(binding);
bindingMock.expects('updateViewerViewport').once();
viewerViewportHandler({paddingTop: 19});
bindingMock.verify();
});
it('should send scroll events', () => {
// 0 -> 6 -> 12 -> 16 -> 18
// scroll-10 scroll-20 scroll-30 2nd anim frame scroll-40
// when there's no scroll
expect(viewport.scrollAnimationFrameThrottled_).to.be.false;
expect(viewer.postScroll.callCount).to.equal(0);
// scroll to 10
viewport.getScrollTop = () => 10;
viewport.sendScrollMessage_();
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(0);
// 6 ticks later, still during first animation frame
clock.tick(6);
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
// scroll to 20
viewport.getScrollTop = () => 20;
viewport.sendScrollMessage_();
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(0);
// 6 ticks later, still during first animation frame
clock.tick(6);
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
// scroll to 30
viewport.getScrollTop = () => 30;
viewport.sendScrollMessage_();
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(0);
// 6 ticks later, second animation frame starts
clock.tick(6);
expect(viewport.scrollAnimationFrameThrottled_).to.be.false;
expect(viewer.postScroll.callCount).to.equal(1);
expect(viewer.postScroll.withArgs(30).calledOnce).to.be.true;
// scroll to 40
viewport.getScrollTop = () => 40;
viewport.sendScrollMessage_();
expect(viewport.scrollAnimationFrameThrottled_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(1);
});
it('should defer scroll events', () => {
let changeEvent = null;
let eventCount = 0;
viewport.onChanged(event => {
changeEvent = event;
eventCount++;
});
// when there's no scroll
expect(viewport.scrollTracking_).to.be.false;
// expect(changeEvent).to.equal(null);
expect(viewer.postScroll.callCount).to.equal(0);
// time 0: scroll to 34
// raf for viewer.postScroll, delay 36 ticks till raf for throttledScroll_
binding.getScrollTop = () => 34;
viewport.scroll_();
expect(viewport.scrollTracking_).to.be.true;
viewport.scroll_();
viewport.scroll_();
expect(changeEvent).to.equal(null);
expect(viewport.scrollTracking_).to.be.true;
clock.tick(8);
expect(changeEvent).to.equal(null);
clock.tick(8);
// time 16: scroll to 35<|fim▁hole|> // call viewer.postScroll, raf for viewer.postScroll
expect(changeEvent).to.equal(null);
expect(viewer.postScroll.callCount).to.equal(1);
binding.getScrollTop = () => 35;
viewport.scroll_();
clock.tick(16);
// time 32: scroll to 35
// call viewer.postScroll, raf for viewer.postScroll
viewport.scroll_();
expect(changeEvent).to.equal(null);
expect(viewport.scrollTracking_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(2);
// time 36:
// raf for throttledScroll_
clock.tick(16);
// time 48: scroll to 35
// call viewer.postScroll, call throttledScroll_
// raf for viewer.postScroll
// delay 36 ticks till raf for throttledScroll_
expect(viewport.scrollTracking_).to.be.false;
viewport.scroll_();
expect(changeEvent).to.not.equal(null);
expect(changeEvent.relayoutAll).to.equal(false);
expect(changeEvent.velocity).to.be.closeTo(0.020833, 1e-4);
expect(eventCount).to.equal(1);
expect(viewport.scrollTracking_).to.be.true;
expect(viewer.postScroll.callCount).to.equal(3);
changeEvent = null;
clock.tick(16);
// time 64:
// call viewer.postScroll
expect(viewer.postScroll.callCount).to.equal(4);
clock.tick(20);
// time 84:
// raf for throttledScroll_
clock.tick(16);
// time 100:
// call throttledScroll_
expect(changeEvent).to.not.equal(null);
expect(changeEvent.relayoutAll).to.equal(false);
expect(viewport.scrollTracking_).to.be.false;
expect(changeEvent.velocity).to.be.equal(0);
expect(eventCount).to.equal(2);
});
it('should update scroll pos and reset cache', () => {
const bindingMock = sandbox.mock(binding);
bindingMock.expects('setScrollTop').withArgs(117).once();
viewport.setScrollTop(117);
expect(viewport./*OK*/scrollTop_).to.be.null;
});
it('should change scrollTop for scrollIntoView and respect padding', () => {
const element = document.createElement('div');
const bindingMock = sandbox.mock(binding);
bindingMock.expects('getLayoutRect').withArgs(element)
.returns({top: 111}).once();
bindingMock.expects('setScrollTop').withArgs(111 - /* padding */ 19).once();
viewport.scrollIntoView(element);
bindingMock.verify();
});
it('should change scrollTop for animateScrollIntoView and respect ' +
'padding', () => {
const element = document.createElement('div');
const bindingMock = sandbox.mock(binding);
bindingMock.expects('getLayoutRect').withArgs(element)
.returns({top: 111}).once();
bindingMock.expects('setScrollTop').withArgs(111 - /* padding */ 19).once();
const duration = 1000;
const promise = viewport.animateScrollIntoView(element, 1000).then(() => {
bindingMock.verify();
});
clock.tick(duration);
runVsync();
return promise;
});
it('should not change scrollTop for animateScrollIntoView', () => {
const element = document.createElement('div');
const bindingMock = sandbox.mock(binding);
bindingMock.expects('getLayoutRect').withArgs(element)
.returns({top: 111}).once();
viewport.paddingTop_ = 0;
sandbox.stub(viewport, 'getScrollTop').returns(111);
bindingMock.expects('setScrollTop').withArgs(111).never();
const duration = 1000;
const promise = viewport.animateScrollIntoView(element, 1000).then(() => {
bindingMock.verify();
});
clock.tick(duration);
runVsync();
return promise;
});
it('should send cached scroll pos to getLayoutRect', () => {
const element = document.createElement('div');
const bindingMock = sandbox.mock(binding);
viewport.scrollTop_ = 111;
viewport.scrollLeft_ = 222;
bindingMock.expects('getLayoutRect').withArgs(element, 222, 111)
.returns('sentinel').once();
expect(viewport.getLayoutRect(element)).to.equal('sentinel');
});
it('should deletegate scrollWidth', () => {
const bindingMock = sandbox.mock(binding);
bindingMock.expects('getScrollWidth').withArgs().returns(111).once();
expect(viewport.getScrollWidth()).to.equal(111);
});
it('should deletegate scrollHeight', () => {
const bindingMock = sandbox.mock(binding);
bindingMock.expects('getScrollHeight').withArgs().returns(117).once();
expect(viewport.getScrollHeight()).to.equal(117);
});
it('should not set pan-y w/o experiment', () => {
// TODO(dvoytenko, #4894): Cleanup the experiment.
viewer.isEmbedded = () => true;
toggleExperiment(windowApi, 'pan-y', false);
viewport = new Viewport(ampdoc, binding, viewer);
expect(windowApi.document.documentElement.style['touch-action'])
.to.not.exist;
});
it('should not set pan-y when not embedded', () => {
// TODO(dvoytenko, #4894): Cleanup the experiment.
viewer.isEmbedded = () => false;
toggleExperiment(windowApi, 'pan-y', true);
viewport = new Viewport(ampdoc, binding, viewer);
expect(windowApi.document.documentElement.style['touch-action'])
.to.not.exist;
});
it('should set pan-y with experiment', () => {
// TODO(dvoytenko, #4894): Cleanup the experiment.
viewer.isEmbedded = () => true;
toggleExperiment(windowApi, 'pan-y', true);
viewport = new Viewport(ampdoc, binding, viewer);
expect(windowApi.document.documentElement.style['touch-action'])
.to.equal('pan-y');
});
it('should add class to HTML element with make-body-block experiment', () => {
viewer.isEmbedded = () => true;
toggleExperiment(windowApi, 'make-body-block', true);
const docElement = windowApi.document.documentElement;
const addStub = sandbox.stub(docElement.classList, 'add');
viewport = new Viewport(ampdoc, binding, viewer);
expect(addStub).to.be.calledWith('-amp-make-body-block');
});
describe('for child window', () => {
let viewport;
let bindingMock;
let iframe;
let iframeWin;
let ampdoc;
beforeEach(() => {
ampdoc = new AmpDocSingle(window);
viewport = new Viewport(ampdoc, binding, viewer);
bindingMock = sandbox.mock(binding);
iframe = document.createElement('iframe');
const html = '<div id="one"></div>';
let promise;
if ('srcdoc' in iframe) {
iframe.srcdoc = html;
promise = loadPromise(iframe);
document.body.appendChild(iframe);
} else {
iframe.src = 'about:blank';
document.body.appendChild(iframe);
const childDoc = iframe.contentWindow.document;
childDoc.open();
childDoc.write(html);
childDoc.close();
promise = Promise.resolve();
}
return promise.then(() => {
iframeWin = iframe.contentWindow;
setParentWindow(iframeWin, window);
});
});
afterEach(() => {
if (iframe.parentElement) {
iframe.parentElement.removeChild(iframe);
}
bindingMock.verify();
});
it('should calculate child window element rect via parent', () => {
viewport.scrollLeft_ = 0;
viewport.scrollTop_ = 0;
const element = iframeWin.document.createElement('div');
iframeWin.document.body.appendChild(element);
bindingMock.expects('getLayoutRect')
.withExactArgs(element, 0, 0)
.returns({left: 20, top: 10}).once();
bindingMock.expects('getLayoutRect')
.withExactArgs(iframe, 0, 0)
.returns({left: 211, top: 111}).once();
const rect = viewport.getLayoutRect(element);
expect(rect.left).to.equal(211 + 20);
expect(rect.top).to.equal(111 + 10);
});
it('should offset child window element with parent scroll pos', () => {
viewport.scrollLeft_ = 200;
viewport.scrollTop_ = 100;
const element = iframeWin.document.createElement('div');
iframeWin.document.body.appendChild(element);
bindingMock.expects('getLayoutRect')
.withExactArgs(element, 0, 0)
.returns({left: 20, top: 10}).once();
bindingMock.expects('getLayoutRect')
.withExactArgs(iframe, 200, 100)
.returns({left: 211, top: 111}).once();
const rect = viewport.getLayoutRect(element);
expect(rect.left).to.equal(211 + 20);
expect(rect.top).to.equal(111 + 10);
});
});
});
describe('Viewport META', () => {
describe('parseViewportMeta', () => {
it('should accept null or empty strings', () => {
expect(parseViewportMeta(null)).to.be.empty;
});
it('should parse single key-value', () => {
expect(parseViewportMeta('width=device-width')).to.deep.equal({
'width': 'device-width',
});
});
it('should parse two key-values', () => {
expect(parseViewportMeta('width=device-width,minimum-scale=1')).to.deep
.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
it('should parse empty value', () => {
expect(parseViewportMeta('width=device-width,minimal-ui')).to.deep.equal({
'width': 'device-width',
'minimal-ui': '',
});
expect(parseViewportMeta('minimal-ui,width=device-width')).to.deep.equal({
'width': 'device-width',
'minimal-ui': '',
});
});
it('should return last dupe', () => {
expect(parseViewportMeta('width=100,width=200')).to.deep.equal({
'width': '200',
});
});
it('should ignore extra delims', () => {
expect(parseViewportMeta(',,,width=device-width,,,,minimum-scale=1,,,'))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
it('should support semicolon', () => {
expect(parseViewportMeta('width=device-width;minimum-scale=1'))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
it('should support mix of comma and semicolon', () => {
expect(parseViewportMeta('width=device-width,minimum-scale=1;test=3;'))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
'test': '3',
});
});
it('should ignore extra mix delims', () => {
expect(parseViewportMeta(',,;;,width=device-width;;,minimum-scale=1,,;'))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
});
describe('stringifyViewportMeta', () => {
it('should stringify empty', () => {
expect(stringifyViewportMeta({})).to.equal('');
});
it('should stringify single key-value', () => {
expect(stringifyViewportMeta({'width': 'device-width'}))
.to.equal('width=device-width');
});
it('should stringify two key-values', () => {
const res = stringifyViewportMeta({
'width': 'device-width',
'minimum-scale': '1',
});
expect(res == 'width=device-width,minimum-scale=1' ||
res == 'minimum-scale=1,width=device-width')
.to.be.true;
});
it('should stringify empty values', () => {
const res = stringifyViewportMeta({
'width': 'device-width',
'minimal-ui': '',
});
expect(res == 'width=device-width,minimal-ui' ||
res == 'minimal-ui,width=device-width')
.to.be.true;
});
});
describe('updateViewportMetaString', () => {
it('should do nothing with empty values', () => {
expect(updateViewportMetaString(
'', {})).to.equal('');
expect(updateViewportMetaString(
'width=device-width', {})).to.equal('width=device-width');
});
it('should add a new value', () => {
expect(updateViewportMetaString(
'', {'minimum-scale': '1'})).to.equal('minimum-scale=1');
expect(parseViewportMeta(updateViewportMetaString(
'width=device-width', {'minimum-scale': '1'})))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
it('should replace the existing value', () => {
expect(parseViewportMeta(updateViewportMetaString(
'width=device-width,minimum-scale=2', {'minimum-scale': '1'})))
.to.deep.equal({
'width': 'device-width',
'minimum-scale': '1',
});
});
it('should delete the existing value', () => {
expect(parseViewportMeta(updateViewportMetaString(
'width=device-width,minimum-scale=1', {'minimum-scale': undefined})))
.to.deep.equal({
'width': 'device-width',
});
});
it('should ignore delete for a non-existing value', () => {
expect(parseViewportMeta(updateViewportMetaString(
'width=device-width', {'minimum-scale': undefined})))
.to.deep.equal({
'width': 'device-width',
});
});
it('should do nothing if values did not change', () => {
expect(updateViewportMetaString(
'width=device-width,minimum-scale=1', {'minimum-scale': '1'}))
.to.equal('width=device-width,minimum-scale=1');
});
});
describe('TouchZoom', () => {
let sandbox;
let clock;
let viewport;
let binding;
let viewer;
let viewerMock;
let windowApi;
let ampdoc;
let originalViewportMetaString, viewportMetaString;
let viewportMeta;
let viewportMetaSetter;
beforeEach(() => {
sandbox = sinon.sandbox.create();
clock = sandbox.useFakeTimers();
viewer = {
isEmbedded: () => false,
getPaddingTop: () => 0,
onViewportEvent: () => {},
isIframed: () => false,
};
viewerMock = sandbox.mock(viewer);
originalViewportMetaString = 'width=device-width,minimum-scale=1';
viewportMetaString = originalViewportMetaString;
viewportMeta = Object.create(null);
viewportMetaSetter = sandbox.spy();
Object.defineProperty(viewportMeta, 'content', {
get: () => viewportMetaString,
set: value => {
viewportMetaSetter(value);
viewportMetaString = value;
},
});
windowApi = {
document: {
documentElement: {
style: {},
classList: {
add: function() {},
},
},
querySelector: selector => {
if (selector == 'meta[name=viewport]') {
return viewportMeta;
}
return undefined;
},
},
navigator: window.navigator,
setTimeout: window.setTimeout,
clearTimeout: window.clearTimeout,
location: {},
};
ampdoc = new AmpDocSingle(windowApi);
installTimerService(windowApi);
installPlatformService(windowApi);
installViewerService(windowApi);
binding = new ViewportBindingDef();
viewport = new Viewport(ampdoc, binding, viewer);
});
afterEach(() => {
sandbox.restore();
});
it('should initialize original viewport meta', () => {
viewport.getViewportMeta_();
expect(viewport.originalViewportMetaString_).to.equal(viewportMetaString);
expect(viewportMetaSetter.callCount).to.equal(0);
});
it('should disable TouchZoom', () => {
viewport.disableTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(1);
expect(viewportMetaString).to.have.string('maximum-scale=1');
expect(viewportMetaString).to.have.string('user-scalable=no');
});
it('should ignore disable TouchZoom if already disabled', () => {
viewportMetaString = 'width=device-width,minimum-scale=1,' +
'maximum-scale=1,user-scalable=no';
viewport.disableTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(0);
});
it('should ignore disable TouchZoom if embedded', () => {
viewerMock.expects('isIframed').returns(true).atLeast(1);
viewport.disableTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(0);
});
it('should restore TouchZoom', () => {
viewport.disableTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(1);
expect(viewportMetaString).to.have.string('maximum-scale=1');
expect(viewportMetaString).to.have.string('user-scalable=no');
viewport.restoreOriginalTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(2);
expect(viewportMetaString).to.equal(originalViewportMetaString);
});
it('should reset TouchZoom; zooming state unknown', () => {
viewport.resetTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(1);
expect(viewportMetaString).to.have.string('maximum-scale=1');
expect(viewportMetaString).to.have.string('user-scalable=no');
clock.tick(1000);
expect(viewportMetaSetter.callCount).to.equal(2);
expect(viewportMetaString).to.equal(originalViewportMetaString);
});
it('should ignore reset TouchZoom if not currently zoomed', () => {
windowApi.document.documentElement.clientHeight = 500;
windowApi.innerHeight = 500;
viewport.resetTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(0);
});
it('should proceed with reset TouchZoom if currently zoomed', () => {
windowApi.document.documentElement.clientHeight = 500;
windowApi.innerHeight = 300;
viewport.resetTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(1);
});
it('should ignore reset TouchZoom if embedded', () => {
viewerMock.expects('isIframed').returns(true).atLeast(1);
viewport.resetTouchZoom();
expect(viewportMetaSetter.callCount).to.equal(0);
});
});
});
describe('ViewportBindingNatural', () => {
let sandbox;
let windowMock;
let binding;
let windowApi;
let documentElement;
let documentBody;
let windowEventHandlers;
let viewer;
let viewerMock;
beforeEach(() => {
sandbox = sinon.sandbox.create();
const WindowApi = function() {};
windowEventHandlers = {};
WindowApi.prototype.addEventListener = function(eventType, handler) {
windowEventHandlers[eventType] = handler;
};
windowApi = new WindowApi();
documentElement = {
style: {},
};
documentBody = {
style: {},
};
windowApi.document = {
documentElement,
body: documentBody,
defaultView: windowApi,
};
windowApi.navigator = {userAgent: ''};
windowMock = sandbox.mock(windowApi);
installPlatformService(windowApi);
viewer = {
isEmbedded: () => false,
getPaddingTop: () => 19,
onViewportEvent: () => {},
requestFullOverlay: () => {},
cancelFullOverlay: () => {},
postScroll: sandbox.spy(),
};
viewerMock = sandbox.mock(viewer);
binding = new ViewportBindingNatural_(windowApi, viewer);
});
afterEach(() => {
windowMock.verify();
viewerMock.verify();
sandbox.restore();
});
it('should setup overflow:visible on body', () => {
expect(documentBody.style.overflow).to.equal('visible');
});
it('should NOT require fixed layer transferring', () => {
expect(binding.requiresFixedLayerTransfer()).to.be.false;
});
it('should subscribe to scroll and resize events', () => {
expect(windowEventHandlers['scroll']).to.not.equal(undefined);
expect(windowEventHandlers['resize']).to.not.equal(undefined);
});
it('should update padding', () => {
windowApi.document = {
documentElement: {style: {}},
};
binding.updatePaddingTop(31);
expect(windowApi.document.documentElement.style.paddingTop).to
.equal('31px');
});
it('should calculate size', () => {
windowApi.innerWidth = 111;
windowApi.innerHeight = 222;
windowApi.document = {
documentElement: {
clientWidth: 333,
clientHeight: 444,
},
};
let size = binding.getSize();
expect(size.width).to.equal(111);
expect(size.height).to.equal(222);
delete windowApi.innerWidth;
delete windowApi.innerHeight;
size = binding.getSize();
expect(size.width).to.equal(333);
expect(size.height).to.equal(444);
});
it('should calculate scrollTop from scrollElement', () => {
windowApi.pageYOffset = 11;
windowApi.document = {
scrollingElement: {
scrollTop: 17,
},
};
expect(binding.getScrollTop()).to.equal(17);
});
it('should calculate scrollWidth from scrollElement', () => {
windowApi.pageYOffset = 11;
windowApi.document = {
scrollingElement: {
scrollWidth: 117,
},
};
expect(binding.getScrollWidth()).to.equal(117);
});
it('should calculate scrollHeight from scrollElement', () => {
windowApi.pageYOffset = 11;
windowApi.document = {
scrollingElement: {
scrollHeight: 119,
},
};
expect(binding.getScrollHeight()).to.equal(119);
});
it('should update scrollTop on scrollElement', () => {
windowApi.pageYOffset = 11;
windowApi.document = {
scrollingElement: {
scrollTop: 17,
},
};
binding.setScrollTop(21);
expect(windowApi.document.scrollingElement./*OK*/scrollTop).to.equal(21);
});
it('should fallback scrollTop to pageYOffset', () => {
windowApi.pageYOffset = 11;
windowApi.document = {scrollingElement: {}};
expect(binding.getScrollTop()).to.equal(11);
});
it('should offset client rect for layout', () => {
windowApi.pageXOffset = 100;
windowApi.pageYOffset = 200;
windowApi.document = {scrollingElement: {}};
const el = {
getBoundingClientRect: () => {
return {left: 11.5, top: 12.5, width: 13.5, height: 14.5};
},
};
const rect = binding.getLayoutRect(el);
expect(rect.left).to.equal(112); // round(100 + 11.5)
expect(rect.top).to.equal(213); // round(200 + 12.5)
expect(rect.width).to.equal(14); // round(13.5)
expect(rect.height).to.equal(15); // round(14.5)
});
it('should offset client rect for layout and position passed in', () => {
windowApi.pageXOffset = 1000;
windowApi.pageYOffset = 2000;
windowApi.document = {scrollingElement: {}};
const el = {
getBoundingClientRect: () => {
return {left: 11.5, top: 12.5, width: 13.5, height: 14.5};
},
};
const rect = binding.getLayoutRect(el, 100, 200);
expect(rect.left).to.equal(112); // round(100 + 11.5)
expect(rect.top).to.equal(213); // round(200 + 12.5)
expect(rect.width).to.equal(14); // round(13.5)
expect(rect.height).to.equal(15); // round(14.5)
});
});
describe('ViewportBindingNaturalIosEmbed', () => {
let sandbox;
let windowMock;
let binding;
let windowApi;
let windowEventHandlers;
let bodyEventListeners;
let bodyChildren;
beforeEach(() => {
sandbox = sinon.sandbox.create();
const WindowApi = function() {};
windowEventHandlers = {};
bodyEventListeners = {};
bodyChildren = [];
WindowApi.prototype.addEventListener = function(eventType, handler) {
windowEventHandlers[eventType] = handler;
};
windowApi = new WindowApi();
windowApi.innerWidth = 555;
windowApi.document = {
readyState: 'complete',
documentElement: {style: {}},
body: {
scrollWidth: 777,
scrollHeight: 999,
style: {},
appendChild: child => {
bodyChildren.push(child);
},
addEventListener: (eventType, handler) => {
bodyEventListeners[eventType] = handler;
},
},
createElement: tagName => {
return {
tagName,
id: '',
style: {},
scrollIntoView: sandbox.spy(),
};
},
};
windowMock = sandbox.mock(windowApi);
binding = new ViewportBindingNaturalIosEmbed_(windowApi);
return Promise.resolve();
});
afterEach(() => {
windowMock.verify();
sandbox.restore();
});
it('should require fixed layer transferring', () => {
expect(binding.requiresFixedLayerTransfer()).to.be.true;
});
it('should subscribe to resize events on window, scroll on body', () => {
expect(windowEventHandlers['resize']).to.not.equal(undefined);
expect(windowEventHandlers['scroll']).to.equal(undefined);
expect(bodyEventListeners['scroll']).to.not.equal(undefined);
});
it('should always have scrollWidth equal window.innerWidth', () => {
expect(binding.getScrollWidth()).to.equal(555);
});
it('should setup document for embed scrolling', () => {
const documentElement = windowApi.document.documentElement;
const body = windowApi.document.body;
expect(documentElement.style.overflowY).to.equal('auto');
expect(documentElement.style.webkitOverflowScrolling).to.equal('touch');
expect(body.style.overflowX).to.equal('hidden');
expect(body.style.overflowY).to.equal('auto');
expect(body.style.webkitOverflowScrolling).to.equal('touch');
expect(body.style.position).to.equal('absolute');
expect(body.style.top).to.equal(0);
expect(body.style.left).to.equal(0);
expect(body.style.right).to.equal(0);
expect(body.style.bottom).to.equal(0);
expect(bodyChildren.length).to.equal(3);
expect(bodyChildren[0].id).to.equal('-amp-scrollpos');
expect(bodyChildren[0].style.position).to.equal('absolute');
expect(bodyChildren[0].style.top).to.equal(0);
expect(bodyChildren[0].style.left).to.equal(0);
expect(bodyChildren[0].style.width).to.equal(0);
expect(bodyChildren[0].style.height).to.equal(0);
expect(bodyChildren[0].style.visibility).to.equal('hidden');
expect(bodyChildren[1].id).to.equal('-amp-scrollmove');
expect(bodyChildren[1].style.position).to.equal('absolute');
expect(bodyChildren[1].style.top).to.equal(0);
expect(bodyChildren[1].style.left).to.equal(0);
expect(bodyChildren[1].style.width).to.equal(0);
expect(bodyChildren[1].style.height).to.equal(0);
expect(bodyChildren[1].style.visibility).to.equal('hidden');
expect(bodyChildren[2].id).to.equal('-amp-endpos');
expect(bodyChildren[2].style.position).to.be.undefined;
expect(bodyChildren[2].style.top).to.be.undefined;
expect(bodyChildren[2].style.width).to.equal(0);
expect(bodyChildren[2].style.height).to.equal(0);
expect(bodyChildren[2].style.visibility).to.equal('hidden');
});
it('should update border on BODY', () => {
windowApi.document = {
body: {style: {}},
};
binding.updatePaddingTop(31);
expect(windowApi.document.body.style.borderTop).to
.equal('31px solid transparent');
});
it('should update border in lightbox mode', () => {
windowApi.document = {
body: {style: {}},
};
binding.updatePaddingTop(31);
expect(windowApi.document.body.style.borderTop).to
.equal('31px solid transparent');
expect(windowApi.document.body.style.borderTopStyle).to.be.undefined;
binding.updateLightboxMode(true);
expect(windowApi.document.body.style.borderTopStyle).to.equal('none');
binding.updateLightboxMode(false);
expect(windowApi.document.body.style.borderTopStyle).to.equal('solid');
expect(windowApi.document.body.style.borderBottomStyle).to.not.equal(
'solid');
expect(windowApi.document.body.style.borderLeftStyle).to.not.equal('solid');
expect(windowApi.document.body.style.borderRightStyle).to.not.equal(
'solid');
});
it('should calculate size', () => {
windowApi.innerWidth = 111;
windowApi.innerHeight = 222;
const size = binding.getSize();
expect(size.width).to.equal(111);
expect(size.height).to.equal(222);
});
it('should calculate scrollTop from scrollpos element', () => {
bodyChildren[0].getBoundingClientRect = () => {
return {top: -17, left: -11};
};
binding.onScrolled_();
expect(binding.getScrollTop()).to.equal(17);
});
it('should calculate scrollTop from scrollpos element with padding', () => {
bodyChildren[0].getBoundingClientRect = () => {
return {top: 0, left: -11};
};
binding.updatePaddingTop(10);
binding.onScrolled_();
// scrollTop = - BCR.top + paddingTop
expect(binding.getScrollTop()).to.equal(10);
});
it('should calculate scrollHeight from scrollpos/endpos elements', () => {
bodyChildren[0].getBoundingClientRect = () => {
return {top: -17, left: -11};
};
bodyChildren[2].getBoundingClientRect = () => {
return {top: 100, left: -11};
};
expect(binding.getScrollHeight()).to.equal(117);
});
it('should offset client rect for layout', () => {
bodyChildren[0].getBoundingClientRect = () => {
return {top: -200, left: -100};
};
binding.onScrolled_();
const el = {
getBoundingClientRect: () => {
return {left: 11.5, top: 12.5, width: 13.5, height: 14.5};
},
};
const rect = binding.getLayoutRect(el);
expect(rect.left).to.equal(112); // round(100 + 11.5)
expect(rect.top).to.equal(213); // round(200 + 12.5)
expect(rect.width).to.equal(14); // round(13.5)
expect(rect.height).to.equal(15); // round(14.5)
});
it('should set scroll position via moving element', () => {
const moveEl = bodyChildren[1];
binding.setScrollTop(10);
expect(getStyle(moveEl, 'transform')).to.equal('translateY(10px)');
expect(moveEl.scrollIntoView.callCount).to.equal(1);
expect(moveEl.scrollIntoView.firstCall.args[0]).to.equal(true);
});
it('should set scroll position via moving element with padding', () => {
binding.updatePaddingTop(19);
const moveEl = bodyChildren[1];
binding.setScrollTop(10);
// transform = scrollTop - paddingTop
expect(getStyle(moveEl, 'transform')).to.equal('translateY(-9px)');
expect(moveEl.scrollIntoView.callCount).to.equal(1);
expect(moveEl.scrollIntoView.firstCall.args[0]).to.equal(true);
});
it('should adjust scroll position when scrolled to 0', () => {
const posEl = bodyChildren[0];
posEl.getBoundingClientRect = () => {return {top: 0, left: 0};};
const moveEl = bodyChildren[1];
const event = {preventDefault: sandbox.spy()};
binding.adjustScrollPos_(event);
expect(getStyle(moveEl, 'transform')).to.equal('translateY(1px)');
expect(moveEl.scrollIntoView.callCount).to.equal(1);
expect(moveEl.scrollIntoView.firstCall.args[0]).to.equal(true);
expect(event.preventDefault.callCount).to.equal(1);
});
it('should adjust scroll position when scrolled to 0 w/padding', () => {
binding.updatePaddingTop(10);
const posEl = bodyChildren[0];
posEl.getBoundingClientRect = () => {return {top: 10, left: 0};};
const moveEl = bodyChildren[1];
const event = {preventDefault: sandbox.spy()};
binding.adjustScrollPos_(event);
// transform = 1 - updatePadding
expect(getStyle(moveEl, 'transform')).to.equal('translateY(-9px)');
expect(moveEl.scrollIntoView.callCount).to.equal(1);
expect(moveEl.scrollIntoView.firstCall.args[0]).to.equal(true);
expect(event.preventDefault.callCount).to.equal(1);
});
it('should adjust scroll position when scrolled to 0; w/o event', () => {
const posEl = bodyChildren[0];
posEl.getBoundingClientRect = () => {return {top: 0, left: 0};};
const moveEl = bodyChildren[1];
binding.adjustScrollPos_();
expect(moveEl.scrollIntoView.callCount).to.equal(1);
});
it('should NOT adjust scroll position when scrolled away from 0', () => {
const posEl = bodyChildren[0];
posEl.getBoundingClientRect = () => {return {top: -10, left: 0};};
const moveEl = bodyChildren[1];
const event = {preventDefault: sandbox.spy()};
binding.adjustScrollPos_(event);
expect(moveEl.scrollIntoView.callCount).to.equal(0);
expect(event.preventDefault.callCount).to.equal(0);
});
it('should NOT adjust scroll position when overscrolled', () => {
const posEl = bodyChildren[0];
posEl.getBoundingClientRect = () => {return {top: 10, left: 0};};
const moveEl = bodyChildren[1];
const event = {preventDefault: sandbox.spy()};
binding.adjustScrollPos_(event);
expect(moveEl.scrollIntoView.callCount).to.equal(0);
expect(event.preventDefault.callCount).to.equal(0);
});
});<|fim▁end|>
| |
<|file_name|>page.tsx<|end_file_name|><|fim▁begin|>/*
* SonarQube
* Copyright (C) 2009-2019 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import * as React from 'react';
import Helmet from 'react-helmet';
import { graphql } from 'gatsby';
import HeaderList from '../components/HeaderList';
import { MarkdownRemark, MarkdownRemarkConnection, MarkdownHeading } from '../@types/graphql-types';
interface Props {
data: {
allMarkdownRemark: Pick<MarkdownRemarkConnection, 'edges'>;
markdownRemark: Pick<MarkdownRemark, 'html' | 'headings' | 'frontmatter'>;
};
location: Location;
}
export default class Page extends React.PureComponent<Props> {
baseUrl = '';
componentDidMount() {
if (window) {
this.baseUrl = window.location.origin + '/';
}
const collapsables = document.getElementsByClassName('collapse');
for (let i = 0; i < collapsables.length; i++) {
collapsables[i].classList.add('close');
const firstChild = collapsables[i].firstElementChild;
if (firstChild) {
firstChild.outerHTML = firstChild.outerHTML
.replace(/<h2/gi, '<a href="#"')
.replace(/<\/h2>/gi, '</a>');
firstChild.addEventListener('click', (event: Event & { currentTarget: HTMLElement }) => {
event.preventDefault();
if (event.currentTarget.parentElement) {
event.currentTarget.parentElement.classList.toggle('close');
}
});
}
}
}
render() {
const page = this.props.data.markdownRemark;
const version = process.env.GATSBY_DOCS_VERSION || '';
const mainTitle = 'SonarQube Docs';
const pageTitle = page.frontmatter && page.frontmatter.title;
let htmlPageContent = page.html || '';
const realHeadingsList = removeExtraHeadings(htmlPageContent, page.headings || []);
htmlPageContent = removeTableOfContents(htmlPageContent);
htmlPageContent = createAnchorForHeadings(htmlPageContent, realHeadingsList);
htmlPageContent = replaceDynamicLinks(htmlPageContent);
htmlPageContent = replaceImageLinks(htmlPageContent);
return (
<>
<Helmet title={pageTitle ? `${pageTitle} | ${mainTitle}` : mainTitle}>
<html lang="en" />
<link href={`/${version}/favicon.ico`} rel="icon" />
<link
href={this.baseUrl + this.props.location.pathname.replace(version, 'latest')}
rel="canonical"
/>
<script type="text/javascript">{`
(function(window,document) {
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window, document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-1880045-11' , 'auto');
ga('send', 'pageview');
})(window,document);
`}</script>
</Helmet>
<HeaderList headers={realHeadingsList} />
<h1>{pageTitle || mainTitle}</h1>
<div className="markdown-content" dangerouslySetInnerHTML={{ __html: htmlPageContent }} />
</>
);
}
}
export const query = graphql`
query($slug: String!) {
allMarkdownRemark {
edges {
node {
html
fields {
slug
}
}
}
}
markdownRemark(fields: { slug: { eq: $slug } }) {
html
headings {
depth
value
}
frontmatter {
title
}
}
}
`;
function replaceImageLinks(content: string) {
const version = process.env.GATSBY_DOCS_VERSION || '';
if (version !== '') {
content = content.replace(/<img src="\/images\/(.*?)"/gim, `<img src="/${version}/images/$1"`);
}
return content;
}
function replaceDynamicLinks(content: string) {
// Make outside link open in a new tab
content = content.replace(
/<a href="http(.*?)">(.*?)<\/a>/gim,
'<a href="http$1" target="_blank">$2</a>'
);
// Render only the text part of links going inside the app
return content.replace(
/<a href="(.*)\/#(?:sonarqube|sonarcloud|sonarqube-admin)#.*?">(.*?)<\/a>/gim,
'$2'
);
}
/**
* For the sidebar table of content, we do not want headers for sonarcloud,
* collapsable container title, of table of contents headers.
*/
function removeExtraHeadings(content: string, headings: MarkdownHeading[]) {
return headings
.filter(heading => content.indexOf(`<div class="collapse"><h2>${heading.value}</h2>`) < 0)
.filter(heading => !heading.value || !heading.value.match(/Table of content/i))
.filter(heading => {
const regex = new RegExp(
`<!-- sonarcloud -->[\\s\\S]*<h2>${heading.value}<\\/h2>[\\s\\S]*<!-- /sonarcloud -->`,
'gim'
);
return !content.match(regex);
});
}
function createAnchorForHeadings(content: string, headings: MarkdownHeading[]) {
let counter = 1;
headings.forEach(h => {
if (h.depth === 2) {
content = content.replace(
`<h${h.depth}>${h.value}</h${h.depth}>`,
`<h${h.depth} id="header-${counter}">${h.value}</h${h.depth}>`
);
counter++;
}<|fim▁hole|> return content;
}
function removeTableOfContents(content: string) {
return content.replace(/<h[1-9]>Table Of Contents<\/h[1-9]>/i, '');
}<|fim▁end|>
|
});
|
<|file_name|>batch-history.component.ts<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Component, ElementRef, Injector, OnDestroy, OnInit} from '@angular/core';
import {CommonUtil} from '@common/util/common.util';
import {MomentDatePipe} from '@common/pipe/moment.date.pipe';
import {AbstractPopupComponent} from '@common/component/abstract-popup.component';
import {IngestionHistory, IngestionStatus} from '@domain/datasource/datasource';
import {DatasourceService} from '../../../../../../datasource/service/datasource.service';
@Component({
selector: 'batch-history',
templateUrl: './batch-history.component.html',
providers: [MomentDatePipe]
})
export class BatchHistoryComponent extends AbstractPopupComponent implements OnInit, OnDestroy {
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
private datasourceId: string;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=<|fim▁hole|> | Public Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
public batchFl: boolean = false;
public histories: IngestionHistory[];
public convertMilliseconds: (ms: number) => string = CommonUtil.convertMilliseconds;
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Constructor
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// 생성자
constructor(protected datasourceService: DatasourceService,
protected element: ElementRef,
protected injector: Injector) {
super(element, injector);
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Override Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
// Init
public ngOnInit() {
// Init
super.ngOnInit();
this.initView()
}
// Destory
public ngOnDestroy() {
// Destory
super.ngOnDestroy();
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Public Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* Init
* @param {string} datasourceId
*/
public init(datasourceId: string) {
// show
this.batchFl = true;
// 소스 id
this.datasourceId = datasourceId;
// 히스토리 조회
this.getBatchHistory(datasourceId);
}
/**
* get batch history status
* @param {IngestionHistory} history
* @returns {string}
*/
public getStatus(history: IngestionHistory): string {
const status = history.status;
let result = 'Fail';
switch (status) {
case IngestionStatus.SUCCESS:
result = 'Success';
break;
case IngestionStatus.FAILED:
result = 'Fail';
break;
case IngestionStatus.PASS:
result = 'Pass';
break;
case IngestionStatus.RUNNING:
result = 'Running';
break;
}
return result;
}
/**
* get status icon
* @param {IngestionHistory} history
* @returns {string}
*/
public getIconStatus(history: IngestionHistory): string {
const status = history.status;
let result = 'ddp-preparing';
switch (status) {
case IngestionStatus.SUCCESS:
result = 'ddp-success';
break;
case IngestionStatus.FAILED:
result = 'ddp-fail';
break;
case IngestionStatus.PASS:
result = 'ddp-preparing';
break;
case IngestionStatus.RUNNING:
result = 'ddp-preparing';
break;
}
return result;
}
/**
* 더 조회할 리스트가 있는지 여부
* @returns {boolean}
*/
public checkMoreContents(): boolean {
return (this.pageResult.number < this.pageResult.totalPages - 1);
}
/**
* 리스트 더보기
*/
public moreList() {
// 페이지 초기화
this.pageResult.number += 1;
// 리스트 재조회
this.getBatchHistory(this.datasourceId);
}
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Protected Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
| Private Method
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/**
* init view
*/
private initView() {
this.pageResult.number = 0;
this.pageResult.size = 20;
}
/**
* 배치 히스토리 조회
* @param {string} datasourceId
*/
private getBatchHistory(datasourceId: string) {
// 로딩 시작
this.loadingShow();
const params = {
page: this.pageResult.number,
size: this.pageResult.size
};
this.datasourceService.getBatchHistories(datasourceId, params)
.then((histories) => {
// page
this.pageResult = histories.page;
// 페이지가 첫번째면
if (histories.page.number === 0) {
this.histories = [];
}
// 데이터 있다면
this.histories = histories['_embedded'] ? this.histories.concat(histories['_embedded'].ingestionHistories) : [];
// 로딩 종료
this.loadingHide();
})
.catch(() => {
// 로딩 종료
this.loadingHide();
});
}
}<|fim▁end|>
|
| Protected Variables
|-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=*/
/*-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
|
<|file_name|>guard-module.ts<|end_file_name|><|fim▁begin|>import {join} from 'path';
import {ng} from '../../../utils/process';
import {expectFileToMatch} from '../../../utils/fs';
export default function() {
const modulePath = join('src', 'app', 'app.module.ts');
<|fim▁hole|> return ng('generate', 'guard', 'test-guard', '--module', 'app.module.ts')
.then(() => expectFileToMatch(modulePath,
/import { TestGuardGuard } from '.\/test-guard.guard'/))
.then(() => expectFileToMatch(modulePath,
/providers:\s*\[TestGuardGuard\]/m))
.then(() => process.chdir(join('src', 'app')))
.then(() => ng('generate', 'guard', 'test-guard2', '--module', 'app.module.ts'))
.then(() => process.chdir('../..'))
.then(() => expectFileToMatch(modulePath,
/import { TestGuard2Guard } from '.\/test-guard2.guard'/));
// TODO: Enable once schematic is updated for rxjs 6
// .then(() => ng('build'));
}<|fim▁end|>
| |
<|file_name|>Switch.tsx<|end_file_name|><|fim▁begin|>import classNames from 'classnames'
import ReactSwitch from 'react-switch'
type Props = {
ariaLabel: string
checked?: boolean<|fim▁hole|>
return (
<div className='switch'>
<ReactSwitch
aria-label={ariaLabel}
checked={checked}
checkedIcon={false}
className={switchClass}
height={22}
onChange={onChange}
uncheckedIcon={false}
width={37}
/>
</div>
)
}<|fim▁end|>
|
onChange: any
}
export const Switch = ({ ariaLabel, checked, onChange }: Props) => {
const switchClass = classNames(checked ? 'is-on' : 'is-off')
|
<|file_name|>Client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Yann GUIBET <[email protected]>
# See LICENSE for details.
import sys, os
from gevent import select, monkey, spawn, Greenlet, GreenletExit, sleep, socket
from base64 import b64encode
from hashlib import md5
from struct import pack, unpack
from zlib import adler32
from Proto import Proto
from Index import Index
from Config import *
class Client(Proto):
def __init__(self, vpn):
self.vpn = vpn
def close(self):
try:
self.sock.close()
except:
pass
def error(self, exp):
self.close()
<|fim▁hole|> try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect((host, port))
self.handshake(pubkey)
except Exception as e:
self.error(e)
raise
def handshake(self, pubkey):
self.send_id()
myiv = self.send_iv()
iv = self.get_iv(pubkey)
self.init_cipher(pubkey, myiv, iv)
def recv_file(self):
if self.srecvall(1) != "\x01":
self.ssend("\xFF")
raise Exception, "Bad Flags (0x01 expected)"
size = self.srecvall(4)
checksum = self.srecvall(4)
if adler32(size) != unpack('!I',checksum)[0]:
self.ssend("\xFF")
raise Exception, "Bad checksum"
size = unpack('!I', size)[0]
buffer = self.srecvall(size)
hash = self.srecvall(16)
if md5(buffer).digest() != hash:
self.ssend("\xFF")
raise Exception, "Bad md5 ..."
return buffer
def get_file(self, id, name):
path = os.path.join(inbox, name)
while os.path.exists(path):
name = "_"+name
path = os.path.join(inbox, name)
#raise Exception, "%s already exist ..." % path
self.ssend("\x02"+pack('!I',id))
buff = self.recv_file()
with open(path, "wb") as f:
f.write(buff)
def get_index(self, id):
index = Index(id)
buffer = index.get_xml().encode('utf-8')
hash = md5(buffer).digest()
self.ssend('\x03'+hash)
flag = self.srecvall(1)
if flag == "\x04":
buffer = self.recv_file()
index.set_xml(buffer)
elif flag == "\x05":
pass
else:
raise Exception, "Protocol Error"<|fim▁end|>
|
def connect(self, host, port, pubkey):
|
<|file_name|>test_amazon.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
import unittest
import tempfile
import time
<|fim▁hole|>sys.path.append(os.path.join(os.path.dirname(__file__), "..", "..", "ms3"))
from ms3.testing import MS3Server
from s3ffs_server import s3ffsServer, wait_until
class AmazonTestCase(unittest.TestCase):
def setUp(self):
self.local = tempfile.mkdtemp()
self.s3ffs = None
# In case there occurs an exception during setUp(), unittest
# doesn't call tearDown(), hence we need to make sure we don't
# leave any server processes running.
try:
self.s3ffs = s3ffsServer("s3ffs-us", mountpoint=self.local).start()
except Exception:
self.tearDown()
raise
def tearDown(self):
if self.s3ffs:
self.s3ffs.stop()
shutil.rmtree(self.local, True)
def test_mounted(self):
self.assertTrue(os.path.ismount(self.local))
def test_single_file(self):
content = "Hello, world!"
path = os.path.join(self.local, "file.txt")
with open(path, "w") as f:
f.write(content)
wait_until(os.path.exists, path)
self.assertTrue(open(path).read(), content)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
setup(name='monsql',
version='0.1.7',
packages = find_packages(),
author='firstprayer',
author_email='[email protected]',
description='MonSQL - Mongodb-style way for using mysql.',<|fim▁hole|>)<|fim▁end|>
|
url='https://github.com/firstprayer/monsql.git',
install_requires=[
'MySQL-python'
],
|
<|file_name|>angular-protractor.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Angular Protractor 1.5.0
// Project: https://github.com/angular/protractor
// Definitions by: Bill Armstrong <https://github.com/BillArmstrong>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
/// <reference path="../selenium-webdriver/selenium-webdriver.d.ts" />
declare module protractor {
//region Wrapped webdriver Items
class ActionSequence extends webdriver.ActionSequence {}
class Builder extends webdriver.Builder {}
class Capabilities extends webdriver.Capabilities {}
class Command extends webdriver.Command {}
class EventEmitter extends webdriver.EventEmitter {}
class Session extends webdriver.Session {}
class WebDriver extends webdriver.WebDriver {}
class WebElement extends webdriver.WebElement {}
class WebElementPromise extends webdriver.WebElementPromise { }
var Browser: webdriver.IBrowser;
var Button: webdriver.IButton;
var Capability: webdriver.ICapability;
var CommandName: webdriver.ICommandName;
var Key: webdriver.IKey;
module error {
class Error extends webdriver.error.Error {}
var ErrorCode: webdriver.error.IErrorCode;
}
module logging {
class Preferences extends webdriver.logging.Preferences { }
class Entry extends webdriver.logging.Entry { }
var Type: webdriver.logging.IType;
var Level: webdriver.logging.ILevelValues;
function getLevel(nameOrValue: string): webdriver.logging.ILevel;
function getLevel(nameOrValue: number): webdriver.logging.ILevel;
}
module promise {
class Thenable<T> extends webdriver.promise.Thenable<T> { }
class Promise<T> extends webdriver.promise.Promise<T> { }
class Deferred<T> extends webdriver.promise.Deferred<T> { }
class ControlFlow extends webdriver.promise.ControlFlow { }
class CancellationError extends webdriver.promise.CancellationError { }
/**
* Given an array of promises, will return a promise that will be fulfilled
* with the fulfillment values of the input array's values. If any of the
* input array's promises are rejected, the returned promise will be rejected
* with the same reason.
*
* @param {!Array.<(T|!webdriver.promise.Promise.<T>)>} arr An array of
* promises to wait on.
* @return {!webdriver.promise.Promise.<!Array.<T>>} A promise that is
* fulfilled with an array containing the fulfilled values of the
* input array, or rejected with the same reason as the first
* rejected value.
* @template T
*/
function all(arr: webdriver.promise.Promise<any>[]): webdriver.promise.Promise<any[]>;
/**
* Invokes the appropriate callback function as soon as a promised
* {@code value} is resolved. This function is similar to
* {@link webdriver.promise.when}, except it does not return a new promise.
* @param {*} value The value to observe.
* @param {Function} callback The function to call when the value is
* resolved successfully.
* @param {Function=} opt_errback The function to call when the value is
* rejected.
*/
function asap(value: any, callback: Function, opt_errback?: Function): void;
/**
* @return {!webdriver.promise.ControlFlow} The currently active control flow.
*/
function controlFlow(): webdriver.promise.ControlFlow;
/**
* Creates a new control flow. The provided callback will be invoked as the
* first task within the new flow, with the flow as its sole argument. Returns
* a promise that resolves to the callback result.
* @param {function(!webdriver.promise.ControlFlow)} callback The entry point
* to the newly created flow.
* @return {!webdriver.promise.Promise} A promise that resolves to the callback
* result.
*/
function createFlow<R>(callback: (flow: webdriver.promise.ControlFlow) => R): webdriver.promise.Promise<R>;
/**
* Determines whether a {@code value} should be treated as a promise.
* Any object whose "then" property is a function will be considered a promise.
*
* @param {*} value The value to test.
* @return {boolean} Whether the value is a promise.
*/
function isPromise(value: any): boolean;
/**
* Tests is a function is a generator.
* @param {!Function} fn The function to test.
* @return {boolean} Whether the function is a generator.
*/
function isGenerator(fn: Function): boolean;
/**
* Creates a promise that will be resolved at a set time in the future.
* @param {number} ms The amount of time, in milliseconds, to wait before
* resolving the promise.
* @return {!webdriver.promise.Promise} The promise.
*/
function delayed(ms: number): webdriver.promise.Promise<void>;
/**
* Calls a function for each element in an array, and if the function returns
* true adds the element to a new array.
*
* <p>If the return value of the filter function is a promise, this function
* will wait for it to be fulfilled before determining whether to insert the
* element into the new array.
*
* <p>If the filter function throws or returns a rejected promise, the promise
* returned by this function will be rejected with the same reason. Only the
* first failure will be reported; all subsequent errors will be silently
* ignored.
*
* @param {!(Array.<TYPE>|webdriver.promise.Promise.<!Array.<TYPE>>)} arr The
* array to iterator over, or a promise that will resolve to said array.
* @param {function(this: SELF, TYPE, number, !Array.<TYPE>): (
* boolean|webdriver.promise.Promise.<boolean>)} fn The function
* to call for each element in the array.
* @param {SELF=} opt_self The object to be used as the value of 'this' within
* {@code fn}.
* @template TYPE, SELF
*/
function filter<T>(arr: T[], fn: (element: T, index: number, array: T[]) => any, opt_self?: any): webdriver.promise.Promise<T[]>;
function filter<T>(arr: webdriver.promise.Promise<T[]>, fn: (element: T, index: number, array: T[]) => any, opt_self?: any): webdriver.promise.Promise<T[]>
<|fim▁hole|> * @return {!webdriver.promise.Deferred} The new deferred object.
*/
function defer<T>(): webdriver.promise.Deferred<T>;
/**
* Creates a promise that has been resolved with the given value.
* @param {*=} opt_value The resolved value.
* @return {!webdriver.promise.Promise} The resolved promise.
*/
function fulfilled<T>(opt_value?: T): webdriver.promise.Promise<T>;
/**
* Calls a function for each element in an array and inserts the result into a
* new array, which is used as the fulfillment value of the promise returned
* by this function.
*
* <p>If the return value of the mapping function is a promise, this function
* will wait for it to be fulfilled before inserting it into the new array.
*
* <p>If the mapping function throws or returns a rejected promise, the
* promise returned by this function will be rejected with the same reason.
* Only the first failure will be reported; all subsequent errors will be
* silently ignored.
*
* @param {!(Array.<TYPE>|webdriver.promise.Promise.<!Array.<TYPE>>)} arr The
* array to iterator over, or a promise that will resolve to said array.
* @param {function(this: SELF, TYPE, number, !Array.<TYPE>): ?} fn The
* function to call for each element in the array. This function should
* expect three arguments (the element, the index, and the array itself.
* @param {SELF=} opt_self The object to be used as the value of 'this' within
* {@code fn}.
* @template TYPE, SELF
*/
function map<T>(arr: T[], fn: (element: T, index: number, array: T[]) => any, opt_self?: any): webdriver.promise.Promise<T[]>
function map<T>(arr: webdriver.promise.Promise<T[]>, fn: (element: T, index: number, array: T[]) => any, opt_self?: any): webdriver.promise.Promise<T[]>
/**
* Creates a promise that has been rejected with the given reason.
* @param {*=} opt_reason The rejection reason; may be any value, but is
* usually an Error or a string.
* @return {!webdriver.promise.Promise} The rejected promise.
*/
function rejected(opt_reason?: any): webdriver.promise.Promise<void>;
/**
* Wraps a function that is assumed to be a node-style callback as its final
* argument. This callback takes two arguments: an error value (which will be
* null if the call succeeded), and the success value as the second argument.
* If the call fails, the returned promise will be rejected, otherwise it will
* be resolved with the result.
* @param {!Function} fn The function to wrap.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* result of the provided function's callback.
*/
function checkedNodeCall<T>(fn: Function, ...var_args: any[]): webdriver.promise.Promise<T>;
/**
* Consumes a {@code GeneratorFunction}. Each time the generator yields a
* promise, this function will wait for it to be fulfilled before feeding the
* fulfilled value back into {@code next}. Likewise, if a yielded promise is
* rejected, the rejection error will be passed to {@code throw}.
*
* <p>Example 1: the Fibonacci Sequence.
* <pre><code>
* webdriver.promise.consume(function* fibonacci() {
* var n1 = 1, n2 = 1;
* for (var i = 0; i < 4; ++i) {
* var tmp = yield n1 + n2;
* n1 = n2;
* n2 = tmp;
* }
* return n1 + n2;
* }).then(function(result) {
* console.log(result); // 13
* });
* </code></pre>
*
* <p>Example 2: a generator that throws.
* <pre><code>
* webdriver.promise.consume(function* () {
* yield webdriver.promise.delayed(250).then(function() {
* throw Error('boom');
* });
* }).thenCatch(function(e) {
* console.log(e.toString()); // Error: boom
* });
* </code></pre>
*
* @param {!Function} generatorFn The generator function to execute.
* @param {Object=} opt_self The object to use as "this" when invoking the
* initial generator.
* @param {...*} var_args Any arguments to pass to the initial generator.
* @return {!webdriver.promise.Promise.<?>} A promise that will resolve to the
* generator's final result.
* @throws {TypeError} If the given function is not a generator.
*/
function consume<T>(generatorFn: Function, opt_self?: any, ...var_args: any[]): webdriver.promise.Promise<T>;
/**
* Registers an observer on a promised {@code value}, returning a new promise
* that will be resolved when the value is. If {@code value} is not a promise,
* then the return promise will be immediately resolved.
* @param {*} value The value to observe.
* @param {Function=} opt_callback The function to call when the value is
* resolved successfully.
* @param {Function=} opt_errback The function to call when the value is
* rejected.
* @return {!webdriver.promise.Promise} A new promise.
*/
function when<T, R>(value: T, opt_callback?: (value: T) => any, opt_errback?: (error: any) => any): webdriver.promise.Promise<R>;
function when<T, R>(value: webdriver.promise.Promise<T>, opt_callback?: (value: T) => any, opt_errback?: (error: any) => any): webdriver.promise.Promise<R>;
/**
* Returns a promise that will be resolved with the input value in a
* fully-resolved state. If the value is an array, each element will be fully
* resolved. Likewise, if the value is an object, all keys will be fully
* resolved. In both cases, all nested arrays and objects will also be
* fully resolved. All fields are resolved in place; the returned promise will
* resolve on {@code value} and not a copy.
*
* Warning: This function makes no checks against objects that contain
* cyclical references:
*
* var value = {};
* value['self'] = value;
* webdriver.promise.fullyResolved(value); // Stack overflow.
*
* @param {*} value The value to fully resolve.
* @return {!webdriver.promise.Promise} A promise for a fully resolved version
* of the input value.
*/
function fullyResolved<T>(value: any): webdriver.promise.Promise<T>;
/**
* Changes the default flow to use when no others are active.
* @param {!webdriver.promise.ControlFlow} flow The new default flow.
* @throws {Error} If the default flow is not currently active.
*/
function setDefaultFlow(flow: webdriver.promise.ControlFlow): void;
}
module stacktrace {
class Frame extends webdriver.stacktrace.Frame { }
class Snapshot extends webdriver.stacktrace.Snapshot { }
/**
* Formats an error's stack trace.
* @param {!(Error|goog.testing.JsUnitException)} error The error to format.
* @return {!(Error|goog.testing.JsUnitException)} The formatted error.
*/
function format(error: any): any;
/**
* Gets the native stack trace if available otherwise follows the call chain.
* The generated trace will exclude all frames up to and including the call to
* this function.
* @return {!Array.<!webdriver.stacktrace.Frame>} The frames of the stack trace.
*/
function get(): webdriver.stacktrace.Frame[];
/**
* Whether the current browser supports stack traces.
*
* @constructor {boolean}
* @const
*/
var BROWSER_SUPPORTED: boolean;
}
module until {
class Condition<T> extends webdriver.until.Condition<T> { }
/**
* Creates a condition that will wait until the input driver is able to switch
* to the designated frame. The target frame may be specified as:
* <ol>
* <li>A numeric index into {@code window.frames} for the currently selected
* frame.
* <li>A {@link webdriver.WebElement}, which must reference a FRAME or IFRAME
* element on the current page.
* <li>A locator which may be used to first locate a FRAME or IFRAME on the
* current page before attempting to switch to it.
* </ol>
*
* <p>Upon successful resolution of this condition, the driver will be left
* focused on the new frame.
*
* @param {!(number|webdriver.WebElement|
* webdriver.Locator|webdriver.By.Hash|
* function(!webdriver.WebDriver): !webdriver.WebElement)} frame
* The frame identifier.
* @return {!until.Condition.<boolean>} A new condition.
*/
function ableToSwitchToFrame(frame: number): webdriver.until.Condition<boolean>;
function ableToSwitchToFrame(frame: webdriver.IWebElement): webdriver.until.Condition<boolean>;
function ableToSwitchToFrame(frame: webdriver.Locator): webdriver.until.Condition<boolean>;
function ableToSwitchToFrame(frame: (webdriver: webdriver.WebDriver) => webdriver.IWebElement): webdriver.until.Condition<boolean>;
function ableToSwitchToFrame(frame: any): webdriver.until.Condition<boolean>;
/**
* Creates a condition that waits for an alert to be opened. Upon success, the
* returned promise will be fulfilled with the handle for the opened alert.
*
* @return {!until.Condition.<!webdriver.Alert>} The new condition.
*/
function alertIsPresent(): webdriver.until.Condition<webdriver.Alert>;
/**
* Creates a condition that will wait for the given element to be disabled.
*
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isEnabled
*/
function elementIsDisabled(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element to be enabled.
*
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isEnabled
*/
function elementIsEnabled(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element to be deselected.
*
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isSelected
*/
function elementIsNotSelected(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element to be in the DOM,
* yet not visible to the user.
*
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isDisplayed
*/
function elementIsNotVisible(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element to be selected.
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isSelected
*/
function elementIsSelected(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element to become visible.
*
* @param {!webdriver.WebElement} element The element to test.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#isDisplayed
*/
function elementIsVisible(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will loop until an element is
* {@link webdriver.WebDriver#findElement found} with the given locator.
*
* @param {!(webdriver.Locator|webdriver.By.Hash|Function)} locator The locator
* to use.
* @return {!until.Condition.<!webdriver.WebElement>} The new condition.
*/
function elementLocated(locator: webdriver.Locator): webdriver.until.Condition<webdriver.IWebElement>;
function elementLocated(locator: any): webdriver.until.Condition<webdriver.IWebElement>;
/**
* Creates a condition that will wait for the given element's
* {@link webdriver.WebDriver#getText visible text} to contain the given
* substring.
*
* @param {!webdriver.WebElement} element The element to test.
* @param {string} substr The substring to search for.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#getText
*/
function elementTextContains(element: webdriver.IWebElement, substr: string): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element's
* {@link webdriver.WebDriver#getText visible text} to match the given
* {@code text} exactly.
*
* @param {!webdriver.WebElement} element The element to test.
* @param {string} text The expected text.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#getText
*/
function elementTextIs(element: webdriver.IWebElement, text: string): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the given element's
* {@link webdriver.WebDriver#getText visible text} to match a regular
* expression.
*
* @param {!webdriver.WebElement} element The element to test.
* @param {!RegExp} regex The regular expression to test against.
* @return {!until.Condition.<boolean>} The new condition.
* @see webdriver.WebDriver#getText
*/
function elementTextMatches(element: webdriver.IWebElement, regex: RegExp): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will loop until at least get element is
* {@link webdriver.WebDriver#findElement found} with the given locator.
*
* @param {!(webdriver.Locator|webdriver.By.Hash|Function)} locator The locator
* to use.
* @return {!until.Condition.<!Array.<!webdriver.WebElement>>} The new
* condition.
*/
function elementsLocated(locator: webdriver.Locator): webdriver.until.Condition<webdriver.IWebElement[]>;
function elementsLocated(locator: any): webdriver.until.Condition<webdriver.IWebElement[]>;
/**
* Creates a condition that will wait for the given element to become stale. An
* element is considered stale once it is removed from the DOM, or a new page
* has loaded.
*
* @param {!webdriver.WebElement} element The element that should become stale.
* @return {!until.Condition.<boolean>} The new condition.
*/
function stalenessOf(element: webdriver.IWebElement): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the current page's title to contain
* the given substring.
*
* @param {string} substr The substring that should be present in the page
* title.
* @return {!until.Condition.<boolean>} The new condition.
*/
function titleContains(substr: string): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the current page's title to match the
* given value.
*
* @param {string} title The expected page title.
* @return {!until.Condition.<boolean>} The new condition.
*/
function titleIs(title: string): webdriver.until.Condition<boolean>;
/**
* Creates a condition that will wait for the current page's title to match the
* given regular expression.
*
* @param {!RegExp} regex The regular expression to test against.
* @return {!until.Condition.<boolean>} The new condition.
*/
function titleMatches(regex: RegExp): webdriver.until.Condition<boolean>;
}
//endregion
/**
* Use as: element(locator)
*
* The ElementFinder can be treated as a WebElement for most purposes, in
* particular, you may perform actions (i.e. click, getText) on them as you
* would a WebElement. ElementFinders extend Promise, and once an action
* is performed on an ElementFinder, the latest result from the chain can be
* accessed using then. Unlike a WebElement, an ElementFinder will wait for
* angular to settle before performing finds or actions.
*
* ElementFinder can be used to build a chain of locators that is used to find
* an element. An ElementFinder does not actually attempt to find the element
* until an action is called, which means they can be set up in helper files
* before the page is available.
*
* @param {webdriver.Locator} locator An element locator.
* @return {ElementFinder}
*/
interface Element {
(locator: webdriver.Locator): ElementFinder;
/**
* ElementArrayFinder is used for operations on an array of elements (as opposed
* to a single element).
*
* @param {webdriver.Locator} locator An element locator.
* @return {ElementArrayFinder}
*/
all(locator: webdriver.Locator): ElementArrayFinder;
}
interface ElementFinder extends webdriver.IWebElement, webdriver.promise.IThenable<any> {
/**
* Calls to element may be chained to find elements within a parent.
*
* @alias element(locator).element(locator)
* @view
* <div class="parent">
* <div class="child">
* Child text
* <div>{{person.phone}}</div>
* </div>
* </div>
*
* @example
* // Chain 2 element calls.
* var child = element(by.css('.parent')).
* element(by.css('.child'));
* expect(child.getText()).toBe('Child text\n555-123-4567');
*
* // Chain 3 element calls.
* var triple = element(by.css('.parent')).
* element(by.css('.child')).
* element(by.binding('person.phone'));
* expect(triple.getText()).toBe('555-123-4567');
*
* @param {webdriver.Locator} subLocator
* @return {ElementFinder}
*/
element(subLocator: webdriver.Locator): ElementFinder;
/**
* Calls to element may be chained to find an array of elements within a parent.
*
* @alias element(locator).all(locator)
* @view
* <div class="parent">
* <ul>
* <li class="get">First</li>
* <li class="two">Second</li>
* <li class="three">Third</li>
* </ul>
* </div>
*
* @example
* var items = element(by.css('.parent')).all(by.tagName('li'))
*
* @param {webdriver.Locator} subLocator
* @return {ElementArrayFinder}
*/
all(subLocator: webdriver.Locator): ElementArrayFinder;
/**
* Shortcut for querying the document directly with css.
*
* @alias $(cssSelector)
* @view
* <div class="count">
* <span class="get">First</span>
* <span class="two">Second</span>
* </div>
*
* @example
* var get = $('.count .two');
* expect(get.getText()).toBe('Second');
*
* @param {string} selector A css selector
* @return {ElementFinder} which identifies the located
* {@link webdriver.WebElement}
*/
$(selector: string): ElementFinder;
/**
* Shortcut for querying the document directly with css.
*
* @alias $$(cssSelector)
* @view
* <div class="count">
* <span class="get">First</span>
* <span class="two">Second</span>
* </div>
*
* @example
* // The following protractor expressions are equivalent.
* var list = element.all(by.css('.count span'));
* expect(list.count()).toBe(2);
*
* list = $$('.count span');
* expect(list.count()).toBe(2);
* expect(list.get(0).getText()).toBe('First');
* expect(list.get(1).getText()).toBe('Second');
*
* @param {string} selector a css selector
* @return {ElementArrayFinder} which identifies the
* array of the located {@link webdriver.WebElement}s.
*/
$$(selector: string): ElementArrayFinder;
/**
* Determine whether the element is present on the page.
*
* @view
* <span>{{person.name}}</span>
*
* @example
* // Element exists.
* expect(element(by.binding('person.name')).isPresent()).toBe(true);
*
* // Element not present.
* expect(element(by.binding('notPresent')).isPresent()).toBe(false);
*
* @return {ElementFinder} which resolves to whether
* the element is present on the page.
*/
isPresent(): webdriver.promise.Promise<boolean>;
/**
* Override for WebElement.prototype.isElementPresent so that protractor waits
* for Angular to settle before making the check.
*
* @see ElementFinder.isPresent
*
* @param {webdriver.Locator} subLocator Locator for element to look for.
* @return {ElementFinder} which resolves to whether
* the element is present on the page.
*/
isElementPresent(subLocator: webdriver.Locator): webdriver.promise.Promise<boolean>;
/**
* @see ElementArrayFinder.prototype.locator
*
* @return {webdriver.Locator}
*/
locator(): webdriver.Locator;
/**
* Returns the WebElement represented by this ElementFinder.
* Throws the WebDriver error if the element doesn't exist.
*
* @example
* The following three expressions are equivalent.
* element(by.css('.parent')).getWebElement();
* browser.waitForAngular(); browser.driver.findElement(by.css('.parent'));
* browser.findElement(by.css('.parent'));
*
* @alias element(locator).getWebElement()
* @return {webdriver.WebElement}
*/
getWebElement(): webdriver.WebElement;
/**
* Evaluates the input as if it were on the scope of the current element.
* @see ElementArrayFinder.evaluate
*
* @param {string} expression
*
* @return {ElementFinder} which resolves to the evaluated expression.
*/
evaluate(expression: string): ElementFinder;
/**
* @see ElementArrayFinder.prototype.allowAnimations.
* @param {string} value
*
* @return {ElementFinder} which resolves to whether animation is allowed.
*/
allowAnimations(value: string): ElementFinder;
/**
* Create a shallow copy of ElementFinder.
*
* @return {!ElementFinder} A shallow copy of this.
*/
clone(): ElementFinder;
}
interface ElementArrayFinder extends webdriver.promise.IThenable<ElementFinder[]> {
/**
* Returns the elements as an array of WebElements.
*/
getWebElements(): webdriver.WebElement[];
/**
* Get an element within the ElementArrayFinder by index. The index starts at 0.
* Negative indices are wrapped (i.e. -i means ith element from last)
* This does not actually retrieve the underlying element.
*
* @alias element.all(locator).get(index)
* @view
* <ul class="items">
* <li>First</li>
* <li>Second</li>
* <li>Third</li>
* </ul>
*
* @example
* var list = element.all(by.css('.items li'));
* expect(list.get(0).getText()).toBe('First');
* expect(list.get(1).getText()).toBe('Second');
*
* @param {number} index Element index.
* @return {ElementFinder} finder representing element at the given index.
*/
get(index: number): ElementFinder;
/**
* Get the first matching element for the ElementArrayFinder. This does not
* actually retrieve the underlying element.
*
* @alias element.all(locator).first()
* @view
* <ul class="items">
* <li>First</li>
* <li>Second</li>
* <li>Third</li>
* </ul>
*
* @example
* var first = element.all(by.css('.items li')).first();
* expect(first.getText()).toBe('First');
*
* @return {ElementFinder} finder representing the first matching element
*/
first(): ElementFinder;
/**
* Get the last matching element for the ElementArrayFinder. This does not
* actually retrieve the underlying element.
*
* @alias element.all(locator).last()
* @view
* <ul class="items">
* <li>First</li>
* <li>Second</li>
* <li>Third</li>
* </ul>
*
* @example
* var last = element.all(by.css('.items li')).last();
* expect(last.getText()).toBe('Third');
*
* @return {ElementFinder} finder representing the last matching element
*/
last(): ElementFinder;
/**
* Count the number of elements represented by the ElementArrayFinder.
*
* @alias element.all(locator).count()
* @view
* <ul class="items">
* <li>First</li>
* <li>Second</li>
* <li>Third</li>
* </ul>
*
* @example
* var list = element.all(by.css('.items li'));
* expect(list.count()).toBe(3);
*
* @return {!webdriver.promise.Promise} A promise which resolves to the
* number of elements matching the locator.
*/
count(): webdriver.promise.Promise<number>;
/**
* Calls the input function on each ElementFinder represented by the ElementArrayFinder.
*
* @alias element.all(locator).each(eachFunction)
* @view
* <ul class="items">
* <li>First</li>
* <li>Second</li>
* <li>Third</li>
* </ul>
*
* @example
* element.all(by.css('.items li')).each(function(element) {
* // Will print First, Second, Third.
* element.getText().then(console.log);
* });
*
* @param {function(ElementFinder)} fn Input function
*/
each(fn: (element: ElementFinder, index: number) => void): void;
/**
* Apply a map function to each element within the ElementArrayFinder. The
* callback receives the ElementFinder as the first argument and the index as
* a second arg.
*
* @alias element.all(locator).map(mapFunction)
* @view
* <ul class="items">
* <li class="get">First</li>
* <li class="two">Second</li>
* <li class="three">Third</li>
* </ul>
*
* @example
* var items = element.all(by.css('.items li')).map(function(elm, index) {
* return {
* index: index,
* text: elm.getText(),
* class: elm.getAttribute('class')
* };
* });
* expect(items).toEqual([
* {index: 0, text: 'First', class: 'get'},
* {index: 1, text: 'Second', class: 'two'},
* {index: 2, text: 'Third', class: 'three'}
* ]);
*
* @param {function(ElementFinder, number)} mapFn Map function that
* will be applied to each element.
* @return {!webdriver.promise.Promise} A promise that resolves to an array
* of values returned by the map function.
*/
map<T>(mapFn: (element: ElementFinder, index: number) => T): webdriver.promise.Promise<T[]>;
/**
* Apply a filter function to each element within the ElementArrayFinder. Returns
* a new ElementArrayFinder with all elements that pass the filter function. The
* filter function receives the ElementFinder as the first argument
* and the index as a second arg.
* This does not actually retrieve the underlying list of elements, so it can
* be used in page objects.
*
* @alias element.all(locator).filter(filterFn)
* @view
* <ul class="items">
* <li class="get">First</li>
* <li class="two">Second</li>
* <li class="three">Third</li>
* </ul>
*
* @example
* element.all(by.css('.items li')).filter(function(elem, index) {
* return elem.getText().then(function(text) {
* return text === 'Third';
* });
* }).then(function(filteredElements) {
* filteredElements[0].click();
* });
*
* @param {function(ElementFinder, number): webdriver.WebElement.Promise} filterFn
* Filter function that will test if an element should be returned.
* filterFn can either return a boolean or a promise that resolves to a boolean.
* @return {!ElementArrayFinder} A ElementArrayFinder that represents an array
* of element that satisfy the filter function.
*/
filter(filterFn: (element: ElementFinder, index: number) => any): ElementArrayFinder;
/**
* Apply a reduce function against an accumulator and every element found
* using the locator (from left-to-right). The reduce function has to reduce
* every element into a single value (the accumulator). Returns promise of
* the accumulator. The reduce function receives the accumulator, current
* ElementFinder, the index, and the entire array of ElementFinders,
* respectively.
*
* @alias element.all(locator).reduce(reduceFn)
* @view
* <ul class="items">
* <li class="get">First</li>
* <li class="two">Second</li>
* <li class="three">Third</li>
* </ul>
*
* @example
* var value = element.all(by.css('.items li')).reduce(function(acc, elem) {
* return elem.getText().then(function(text) {
* return acc + text + ' ';
* });
* });
*
* expect(value).toEqual('First Second Third ');
*
* @param {function(number, ElementFinder, number, Array.<ElementFinder>)}
* reduceFn Reduce function that reduces every element into a single value.
* @param {*} initialValue Initial value of the accumulator.
* @return {!webdriver.promise.Promise} A promise that resolves to the final
* value of the accumulator.
*/
reduce<T>(reduceFn: (acc: T, element: ElementFinder, index: number, arr: ElementFinder[]) => webdriver.promise.Promise<T>, initialValue: T): webdriver.promise.Promise<T>;
reduce<T>(reduceFn: (acc: T, element: ElementFinder, index: number, arr: ElementFinder[]) => T, initialValue: T): webdriver.promise.Promise<T>;
/**
* Represents the ElementArrayFinder as an array of ElementFinders.
*
* @return {Array.<ElementFinder>} Return a promise, which resolves to a list
* of ElementFinders specified by the locator.
*/
asElementFinders_(): webdriver.promise.Promise<ElementFinder[]>;
/**
* Create a shallow copy of ElementArrayFinder.
*
* @return {!ElementArrayFinder} A shallow copy of this.
*/
clone(): ElementArrayFinder;
/**
* Calls to ElementArrayFinder may be chained to find an array of elements
* using the current elements in this ElementArrayFinder as the starting point.
* This function returns a new ElementArrayFinder which would contain the
* children elements found (and could also be empty).
*
* @alias element.all(locator).all(locator)
* @view
* <div id='id1' class="parent">
* <ul>
* <li class="foo">1a</li>
* <li class="baz">1b</li>
* </ul>
* </div>
* <div id='id2' class="parent">
* <ul>
* <li class="foo">2a</li>
* <li class="bar">2b</li>
* </ul>
* </div>
*
* @example
* var foo = element.all(by.css('.parent')).all(by.css('.foo'))
* expect(foo.getText()).toEqual(['1a', '2a'])
* var baz = element.all(by.css('.parent')).all(by.css('.baz'))
* expect(baz.getText()).toEqual(['1b'])
* var nonexistent = element.all(by.css('.parent')).all(by.css('.NONEXISTENT'))
* expect(nonexistent.getText()).toEqual([''])
*
* @param {webdriver.Locator} subLocator
* @return {ElementArrayFinder}
*/
all(locator: webdriver.Locator): ElementArrayFinder;
/**
* Shorthand function for finding arrays of elements by css.
*
* @constructor {function(string): ElementArrayFinder}
*/
$$(selector: string): ElementArrayFinder;
/**
* Returns an ElementFinder representation of ElementArrayFinder. It ensures
* that the ElementArrayFinder resolves to get and only get underlying element.
*
* @return {ElementFinder} An ElementFinder representation
* @private
*/
toElementFinder_(): ElementFinder;
/**
* Returns the most relevant locator.
*
* @example
* $('#ID1').locator() // returns by.css('#ID1')
* $('#ID1').$('#ID2').locator() // returns by.css('#ID2')
* $$('#ID1').filter(filterFn).get(0).click().locator() // returns by.css('#ID1')
*
* @return {webdriver.Locator}
*/
locator(): webdriver.Locator;
/**
* Evaluates the input as if it were on the scope of the current underlying
* elements.
*
* @view
* <span id="foo">{{variableInScope}}</span>
*
* @example
* var value = element(by.id('foo')).evaluate('variableInScope');
*
* @param {string} expression
*
* @return {ElementArrayFinder} which resolves to the
* evaluated expression for each underlying element.
* The result will be resolved as in
* {@link webdriver.WebDriver.executeScript}. In summary - primitives will
* be resolved as is, functions will be converted to string, and elements
* will be returned as a WebElement.
*/
evaluate(expression: string): ElementArrayFinder;
/**
* Determine if animation is allowed on the current underlying elements.
* @param {string} value
*
* @example
* // Turns off ng-animate animations for all elements in the <body>
* element(by.css('body')).allowAnimations(false);
*
* @return {ElementArrayFinder} which resolves to whether animation is allowed.
*/
allowAnimations(value: boolean): ElementArrayFinder;
/**
* Schedules a command to click on this element.
* @return {!webdriver.promise.Promise} A promise that will be resolved when
* the click command has completed.
*/
click(): webdriver.promise.Promise<void>;
/**
* Schedules a command to constructor a sequence on the DOM element represented by this
* instance.
* <p/>
* Modifier keys (SHIFT, CONTROL, ALT, META) are stateful; once a modifier is
* processed in the keysequence, that key state is toggled until get of the
* following occurs:
* <ul>
* <li>The modifier key is encountered again in the sequence. At this point the
* state of the key is toggled (along with the appropriate keyup/down events).
* </li>
* <li>The {@code webdriver.Key.NULL} key is encountered in the sequence. When
* this key is encountered, all modifier keys current in the down state are
* released (with accompanying keyup events). The NULL key can be used to
* simulate common keyboard shortcuts:
* <code>
* element.sendKeys("text was",
* webdriver.Key.CONTROL, "a", webdriver.Key.NULL,
* "now text is");
* // Alternatively:
* element.sendKeys("text was",
* webdriver.Key.chord(webdriver.Key.CONTROL, "a"),
* "now text is");
* </code></li>
* <li>The end of the keysequence is encountered. When there are no more keys
* to constructor, all depressed modifier keys are released (with accompanying keyup
* events).
* </li>
* </ul>
* <strong>Note:</strong> On browsers where native keyboard events are not yet
* supported (e.g. Firefox on OS X), key events will be synthesized. Special
* punctionation keys will be synthesized according to a standard QWERTY en-us
* keyboard layout.
*
* @param {...string} var_args The sequence of keys to
* constructor. All arguments will be joined into a single sequence (var_args is
* permitted for convenience).
* @return {!webdriver.promise.Promise} A promise that will be resolved when all
* keys have been typed.
*/
sendKeys(...var_args: string[]): webdriver.promise.Promise<void>;
/**
* Schedules a command to query for the tag/node name of this element.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* element's tag name.
*/
getTagName(): webdriver.promise.Promise<string[]>;
/**
* Schedules a command to query for the computed style of the element
* represented by this instance. If the element inherits the named style from
* its parent, the parent will be queried for its value. Where possible, color
* values will be converted to their hex representation (e.g. #00ff00 instead of
* rgb(0, 255, 0)).
* <p/>
* <em>Warning:</em> the value returned will be as the browser interprets it, so
* it may be tricky to form a proper assertion.
*
* @param {string} cssStyleProperty The name of the CSS style property to look
* up.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* requested CSS value.
*/
getCssValue(cssStyleProperty: string): webdriver.promise.Promise<string[]>;
/**
* Schedules a command to query for the value of the given attribute of the
* element. Will return the current value even if it has been modified after the
* page has been loaded. More exactly, this method will return the value of the
* given attribute, unless that attribute is not present, in which case the
* value of the property with the same name is returned. If neither value is
* set, null is returned. The "style" attribute is converted as best can be to a
* text representation with a trailing semi-colon. The following are deemed to
* be "boolean" attributes and will be returned as thus:
*
* <p>async, autofocus, autoplay, checked, compact, complete, controls, declare,
* defaultchecked, defaultselected, defer, disabled, draggable, ended,
* formnovalidate, hidden, indeterminate, iscontenteditable, ismap, itemscope,
* loop, multiple, muted, nohref, noresize, noshade, novalidate, nowrap, open,
* paused, pubdate, readonly, required, reversed, scoped, seamless, seeking,
* selected, spellcheck, truespeed, willvalidate
*
* <p>Finally, the following commonly mis-capitalized attribute/property names
* are evaluated as expected:
* <ul>
* <li>"class"
* <li>"readonly"
* </ul>
* @param {string} attributeName The name of the attribute to query.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* attribute's value.
*/
getAttribute(attributeName: string): webdriver.promise.Promise<string[]>;
/**
* Get the visible (i.e. not hidden by CSS) innerText of this element, including
* sub-elements, without any leading or trailing whitespace.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* element's visible text.
*/
getText(): webdriver.promise.Promise<string[]>;
/**
* Schedules a command to compute the size of this element's bounding box, in
* pixels.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* element's size as a {@code {width:number, height:number}} object.
*/
getSize(): webdriver.promise.Promise<webdriver.ISize[]>;
/**
* Schedules a command to compute the location of this element in page space.
* @return {!webdriver.promise.Promise} A promise that will be resolved to the
* element's location as a {@code {x:number, y:number}} object.
*/
getLocation(): webdriver.promise.Promise<webdriver.ILocation[]>;
/**
* Schedules a command to query whether the DOM element represented by this
* instance is enabled, as dicted by the {@code disabled} attribute.
* @return {!webdriver.promise.Promise} A promise that will be resolved with
* whether this element is currently enabled.
*/
isEnabled(): webdriver.promise.Promise<boolean[]>;
/**
* Schedules a command to query whether this element is selected.
* @return {!webdriver.promise.Promise} A promise that will be resolved with
* whether this element is currently selected.
*/
isSelected(): webdriver.promise.Promise<boolean[]>;
/**
* Schedules a command to submit the form containing this element (or this
* element if it is a FORM element). This command is a no-op if the element is
* not contained in a form.
* @return {!webdriver.promise.Promise} A promise that will be resolved when
* the form has been submitted.
*/
submit(): webdriver.promise.Promise<void>;
/**
* Schedules a command to clear the {@code value} of this element. This command
* has no effect if the underlying DOM element is neither a text INPUT element
* nor a TEXTAREA element.
* @return {!webdriver.promise.Promise} A promise that will be resolved when
* the element has been cleared.
*/
clear(): webdriver.promise.Promise<void>;
/**
* Schedules a command to test whether this element is currently displayed.
* @return {!webdriver.promise.Promise} A promise that will be resolved with
* whether this element is currently visible on the page.
*/
isDisplayed(): webdriver.promise.Promise<boolean[]>;
/**
* Schedules a command to retrieve the outer HTML of this element.
* @return {!webdriver.promise.Promise} A promise that will be resolved with
* the element's outer HTML.
*/
getOuterHtml(): webdriver.promise.Promise<string[]>;
/**
* @return {!webdriver.promise.Promise.<webdriver.WebElement.Id>} A promise
* that resolves to this element's JSON representation as defined by the
* WebDriver wire protocol.
* @see http://code.google.com/p/selenium/wiki/JsonWireProtocol
*/
getId(): webdriver.promise.Promise<webdriver.IWebElementId[]>
/**
* Schedules a command to retrieve the inner HTML of this element.
* @return {!webdriver.promise.Promise} A promise that will be resolved with the
* element's inner HTML.
*/
getInnerHtml(): webdriver.promise.Promise<string[]>;
}
interface LocatorWithColumn extends webdriver.Locator {
column(index: number): webdriver.Locator;
column(name: string): webdriver.Locator;
}
interface RepeaterLocator extends LocatorWithColumn {
row(index: number): LocatorWithColumn;
}
interface IProtractorLocatorStrategy extends webdriver.ILocatorStrategy {
/**
* Add a locator to this instance of ProtractorBy. This locator can then be
* used with element(by.locatorName(args)).
*
* @view
* <button ng-click="doAddition()">Go!</button>
*
* @example
* // Add the custom locator.
* by.addLocator('buttonTextSimple',
* function(buttonText, opt_parentElement, opt_rootSelector) {
* // This function will be serialized as a string and will execute in the
* // browser. The first argument is the text for the button. The second
* // argument is the parent element, if any.
* var using = opt_parentElement,
* buttons = using.querySelectorAll('button');
*
* // Return an array of buttons with the text.
* return Array.prototype.filter.call(buttons, function(button) {
* return button.textContent === buttonText;
* });
* });
*
* // Use the custom locator.
* element(by.buttonTextSimple('Go!')).click();
*
* @alias by.addLocator(locatorName, functionOrScript)
* @param {string} name The name of the new locator.
* @param {Function|string} script A script to be run in the context of
* the browser. This script will be passed an array of arguments
* that contains any args passed into the locator followed by the
* element scoping the search and the css selector for the root angular
* element. It should return an array of elements.
*/
addLocator(name: string, script: string): void;
addLocator(name: string, script: Function): void;
/**
* Find an element by binding.
*
* @view
* <span>{{person.name}}</span>
* <span ng-bind="person.email"></span>
*
* @example
* var span1 = element(by.binding('person.name'));
* expect(span1.getText()).toBe('Foo');
*
* var span2 = element(by.binding('person.email'));
* expect(span2.getText()).toBe('[email protected]');
*
* @param {string} bindingDescriptor
* @return {{findElementsOverride: findElementsOverride, toString: Function|string}}
*/
binding(bindingDescriptor: string): webdriver.Locator;
/**
* Find an element by exact binding.
*
* @view
* <span>{{ person.name }}</span>
* <span ng-bind="person-email"></span>
* <span>{{person_phone|uppercase}}</span>
*
* @example
* expect(element(by.exactBinding('person.name')).isPresent()).toBe(true);
* expect(element(by.exactBinding('person-email')).isPresent()).toBe(true);
* expect(element(by.exactBinding('person')).isPresent()).toBe(false);
* expect(element(by.exactBinding('person_phone')).isPresent()).toBe(true);
* expect(element(by.exactBinding('person_phone|uppercase')).isPresent()).toBe(true);
* expect(element(by.exactBinding('phone')).isPresent()).toBe(false);
*
* @param {string} bindingDescriptor
* @return {{findElementsOverride: findElementsOverride, toString: Function|string}}
*/
exactBinding(bindingDescriptor: string): webdriver.Locator;
/**
* Find an element by ng-model expression.
*
* @alias by.model(modelName)
* @view
* <input constructor="text" ng-model="person.name"/>
*
* @example
* var input = element(by.model('person.name'));
* input.sendKeys('123');
* expect(input.getAttribute('value')).toBe('Foo123');
*
* @param {string} model ng-model expression.
*/
model(model: string): webdriver.Locator;
/**
* Find a button by text.
*
* @view
* <button>Save</button>
*
* @example
* element(by.buttonText('Save'));
*
* @param {string} searchText
* @return {{findElementsOverride: findElementsOverride, toString: Function|string}}
*/
buttonText(searchText: string): webdriver.Locator;
/**
* Find a button by partial text.
*
* @view
* <button>Save my file</button>
*
* @example
* element(by.partialButtonText('Save'));
*
* @param {string} searchText
* @return {{findElementsOverride: findElementsOverride, toString: Function|string}}
*/
partialButtonText(searchText: string): webdriver.Locator;
/**
* Find elements inside an ng-repeat.
*
* @view
* <div ng-repeat="cat in pets">
* <span>{{cat.name}}</span>
* <span>{{cat.age}}</span>
* </div>
*
* <div class="book-img" ng-repeat-start="book in library">
* <span>{{$index}}</span>
* </div>
* <div class="book-info" ng-repeat-end>
* <h4>{{book.name}}</h4>
* <p>{{book.blurb}}</p>
* </div>
*
* @example
* // Returns the DIV for the second cat.
* var secondCat = element(by.repeater('cat in pets').row(1));
*
* // Returns the SPAN for the first cat's name.
* var firstCatName = element(by.repeater('cat in pets').
* row(0).column('{{cat.name}}'));
*
* // Returns a promise that resolves to an array of WebElements from a column
* var ages = element.all(
* by.repeater('cat in pets').column('{{cat.age}}'));
*
* // Returns a promise that resolves to an array of WebElements containing
* // all top level elements repeated by the repeater. For 2 pets rows resolves
* // to an array of 2 elements.
* var rows = element.all(by.repeater('cat in pets'));
*
* // Returns a promise that resolves to an array of WebElements containing all
* // the elements with a binding to the book's name.
* var divs = element.all(by.repeater('book in library').column('book.name'));
*
* // Returns a promise that resolves to an array of WebElements containing
* // the DIVs for the second book.
* var bookInfo = element.all(by.repeater('book in library').row(1));
*
* // Returns the H4 for the first book's name.
* var firstBookName = element(by.repeater('book in library').
* row(0).column('{{book.name}}'));
*
* // Returns a promise that resolves to an array of WebElements containing
* // all top level elements repeated by the repeater. For 2 books divs
* // resolves to an array of 4 elements.
* var divs = element.all(by.repeater('book in library'));
*/
repeater(repeatDescriptor: string): RepeaterLocator;
/**
* Find elements by CSS which contain a certain string.
*
* @view
* <ul>
* <li class="pet">Dog</li>
* <li class="pet">Cat</li>
* </ul>
*
* @example
* // Returns the DIV for the dog, but not cat.
* var dog = element(by.cssContainingText('.pet', 'Dog'));
*/
cssContainingText(cssSelector: string, searchText: string): webdriver.Locator;
/**
* Find an element by ng-options expression.
*
* @alias by.options(optionsDescriptor)
* @view
* <select ng-model="color" ng-options="c for c in colors">
* <option value="0" selected="selected">red</option>
* <option value="1">green</option>
* </select>
*
* @example
* var allOptions = element.all(by.options('c for c in colors'));
* expect(allOptions.count()).toEqual(2);
* var firstOption = allOptions.first();
* expect(firstOption.getText()).toEqual('red');
*
* @param {string} optionsDescriptor ng-options expression.
*/
options(optionsDescriptor: string): webdriver.Locator;
}
var By: IProtractorLocatorStrategy;
interface Protractor extends webdriver.WebDriver {
/**
* The wrapped webdriver instance. Use this to interact with pages that do
* not contain Angular (such as a log-in screen).
*
* @type {webdriver.WebDriver}
*/
driver: webdriver.WebDriver;
/**
* Helper function for finding elements.
*
* @constructor {function(webdriver.Locator): ElementFinder}
*/
element(locator: webdriver.Locator): ElementFinder;
/**
* Shorthand function for finding elements by css.
*
* @constructor {function(string): ElementFinder}
*/
$(selector: string): ElementFinder;
/**
* Shorthand function for finding arrays of elements by css.
*
* @constructor {function(string): ElementArrayFinder}
*/
$$(selector: string): ElementArrayFinder;
/**
* All get methods will be resolved against this base URL. Relative URLs are =
* resolved the way anchor tags resolve.
*
* @type {string}
*/
baseUrl: string;
/**
* The css selector for an element on which to find Angular. This is usually
* 'body' but if your ng-app is on a subsection of the page it may be
* a subelement.
*
* @type {string}
*/
rootEl: string;
/**
* If true, Protractor will not attempt to synchronize with the page before
* performing actions. This can be harmful because Protractor will not wait
* until $timeouts and $http calls have been processed, which can cause
* tests to become flaky. This should be used only when necessary, such as
* when a page continuously polls an API using $timeout.
*
* @type {boolean}
*/
ignoreSynchronization: boolean;
/**
* Timeout in milliseconds to wait for pages to load when calling `get`.
*
* @type {number}
*/
getPageTimeout: number;
/**
* An object that holds custom test parameters.
*
* @type {Object}
*/
params: any;
/**
* The reset URL to use between page loads.
*
* @type {string}
*/
resetUrl: string;
/**
* Instruct webdriver to wait until Angular has finished rendering and has
* no outstanding $http calls before continuing.
*
* @return {!webdriver.promise.Promise} A promise that will resolve to the
* scripts return value.
*/
waitForAngular(): webdriver.promise.Promise<void>;
/**
* Add a module to load before Angular whenever Protractor.get is called.
* Modules will be registered after existing modules already on the page,
* so any module registered here will override preexisting modules with the same
* name.
*
* @example
* browser.addMockModule('modName', function() {
* angular.module('modName', []).value('foo', 'bar');
* });
*
* @param {!string} name The name of the module to load or override.
* @param {!string|Function} script The JavaScript to load the module.
* @param {...*} varArgs Any additional arguments will be provided to
* the script and may be referenced using the `arguments` object.
*/
addMockModule(name: string, script: string, ...varArgs: any[]): void;
addMockModule(name: string, script: Function, ...varArgs: any[]): void;
/**
* Clear the list of registered mock modules.
*/
clearMockModules(): void;
/**
* Remove a registered mock module.
*
* @example
* browser.removeMockModule('modName');
*
* @param {!string} name The name of the module to remove.
*/
removeMockModule(name: string): void;
/**
* @see webdriver.WebDriver.get
*
* Navigate to the given destination and loads mock modules before
* Angular. Assumes that the page being loaded uses Angular.
* If you need to access a page which does not have Angular on load, use
* the wrapped webdriver directly.
*
* @param {string} destination Destination URL.
* @param {number=} opt_timeout Number of milliseconds to wait for Angular to
* start.
*/
get(destination: string, opt_timeout?: number): webdriver.promise.Promise<void>;
/**
* See webdriver.WebDriver.refresh
*
* Makes a full reload of the current page and loads mock modules before
* Angular. Assumes that the page being loaded uses Angular.
* If you need to access a page which does not have Angular on load, use
* the wrapped webdriver directly.
*
* @param {number=} opt_timeout Number of seconds to wait for Angular to start.
*/
refresh(opt_timeout?: number): webdriver.promise.Promise<void>;
/**
* Browse to another page using in-page navigation.
*
* @param {string} url In page URL using the same syntax as $location.url()
* @returns {!webdriver.promise.Promise} A promise that will resolve once
* page has been changed.
*/
setLocation(url: string): webdriver.promise.Promise<void>;
/**
* Returns the current absolute url from AngularJS.
*/
getLocationAbsUrl(): webdriver.promise.Promise<string>;
/**
* Pauses the test and injects some helper functions into the browser, so that
* debugging may be done in the browser console.
*
* This should be used under node in debug mode, i.e. with
* protractor debug <configuration.js>
*
* @example
* While in the debugger, commands can be scheduled through webdriver by
* entering the repl:
* debug> repl
* Press Ctrl + C to leave rdebug repl
* > ptor.findElement(protractor.By.input('user').sendKeys('Laura'));
* > ptor.debugger();
* debug> c
*
* This will run the sendKeys command as the next task, then re-enter the
* debugger.
*/
debugger(): void;
/**
* Beta (unstable) pause function for debugging webdriver tests. Use
* browser.pause() in your test to enter the protractor debugger from that
* point in the control flow.
* Does not require changes to the command line (no need to add 'debug').
*
* @example
* element(by.id('foo')).click();
* browser.pause();
* // Execution will stop before the next click action.
* element(by.id('bar')).click();
*
* @param {number=} opt_debugPort Optional port to use for the debugging process
*/
pause(opt_debugPort?: number): void;
}
// Interface for the global browser object.
interface IBrowser extends Protractor {
/**
* Fork another instance of protractor for use in interactive tests.
*
* @param {boolean} opt_useSameUrl Whether to navigate to current url on creation
* @param {boolean} opt_copyMockModules Whether to apply same mock modules on creation
* @return {Protractor} a protractor instance.
*/
forkNewDriverInstance(opt_useSameUrl?: boolean, opt_copyMockModules?: boolean): Protractor;
}
/**
* Create a new instance of Protractor by wrapping a webdriver instance.
*
* @param {webdriver.WebDriver} webdriver The configured webdriver instance.
* @param {string=} opt_baseUrl A URL to prepend to relative gets.
* @return {Protractor}
*/
function wrapDriver(webdriver: webdriver.WebDriver, opt_baseUrl?: string, opt_rootElement?: string): Protractor;
}
interface cssSelectorHelper {
(cssLocator: string): protractor.ElementFinder;
}
interface cssArraySelectorHelper {
(cssLocator: string): protractor.ElementArrayFinder;
}
declare var browser: protractor.IBrowser;
declare var by: protractor.IProtractorLocatorStrategy;
declare var By: protractor.IProtractorLocatorStrategy;
declare var element: protractor.Element;
declare var $: cssSelectorHelper;
declare var $$: cssArraySelectorHelper;
declare module 'protractor' {
export = protractor;
}<|fim▁end|>
|
/**
* Creates a new deferred object.
|
<|file_name|>class_method.rs<|end_file_name|><|fim▁begin|>// Copyright 2016-17 Alexander Reece
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::collections::BTreeMap;
use std::ops::Deref;
use inflections::Inflect;
use specs;
use common::{Field, DomainMapper};
#[derive(Debug, Clone)]
pub struct ClassMethod {
method: &'static specs::ClassMethod,
fields: Vec<Field>,
field_names: BTreeMap<&'static str, usize>,
field_vars: BTreeMap<String, usize>,
constant_case: String,
pascal_case: String,
snake_case: String,<|fim▁hole|> has_usable_fields: bool,
}
impl ClassMethod {
pub fn new(spec: &'static specs::Spec, method: &'static specs::ClassMethod) -> Self {
let domain_mapper = DomainMapper::from_spec(spec);
let fields = method.fields()
.map(|field| {
let domain = domain_mapper.map(field.domain());
Field::from_field(field, domain)
})
.collect::<Vec<_>>();
let has_lifetimes = fields.iter()
.filter(|field| !field.is_reserved())
.any(|field| !field.ty().is_copy());
let constant_case = method.name().to_constant_case();
let pascal_case = method.name().to_pascal_case();
let snake_case = method.name().to_snake_case();
let has_usable_fields = method.fields().any(|f| !f.is_reserved());
let field_names = method.fields()
.enumerate()
.map(|(index, field)| (field.name(), index))
.collect();
let field_vars = fields.iter()
.enumerate()
.map(|(index, field)| ((&**field.var_name()).clone(), index))
.collect();
ClassMethod {
method: method,
fields: fields,
field_names: field_names,
field_vars: field_vars,
constant_case: constant_case,
pascal_case: pascal_case,
snake_case: snake_case,
has_lifetimes: has_lifetimes,
has_usable_fields: has_usable_fields,
}
}
pub fn method(&self) -> &'static specs::ClassMethod {
self.method
}
pub fn name(&self) -> &'static str {
self.method.name()
}
pub fn field(&self, name: &str) -> Option<&Field> {
self.field_names.get(name)
.map(|index| &self.fields[*index])
}
pub fn field_by_var(&self, var_name: &str) -> Option<&Field> {
self.field_vars.get(var_name)
.map(|index| &self.fields[*index])
}
pub fn fields(&self) -> &[Field] {
&self.fields
}
pub fn constant_case(&self) -> &str {
&self.constant_case
}
pub fn snake_case(&self) -> &str {
&self.snake_case
}
pub fn pascal_case(&self) -> &str {
&self.pascal_case
}
pub fn has_lifetimes(&self) -> bool {
self.has_lifetimes
}
pub fn has_usable_fields(&self) -> bool {
self.has_usable_fields
}
}
impl Deref for ClassMethod {
type Target = specs::ClassMethod;
fn deref(&self) -> &Self::Target {
self.method
}
}<|fim▁end|>
|
has_lifetimes: bool,
|
<|file_name|>encryption.rs<|end_file_name|><|fim▁begin|>mod builder_key;
mod message;
mod origin_key;
mod service_key;
mod user_key;
pub use builder_key::{generate_builder_encryption_key,
BuilderSecretEncryptionKey,
BUILDER_KEY_NAME};
pub use message::{AnonymousBox,
SignedBox};
pub use origin_key::{generate_origin_encryption_key_pair,
OriginPublicEncryptionKey,
OriginSecretEncryptionKey};
pub use service_key::{generate_service_encryption_key_pair,
ServicePublicEncryptionKey,
ServiceSecretEncryptionKey};
pub use user_key::{generate_user_encryption_key_pair,
UserPublicEncryptionKey,
UserSecretEncryptionKey};
/// The suffix on the end of a public encryption key file
const PUBLIC_KEY_SUFFIX: &str = "pub";<|fim▁hole|>/// Format version identifier for secret encryption keys.
const SECRET_BOX_KEY_VERSION: &str = "BOX-SEC-1";
/// Private module to re-export the various sodiumoxide concepts we
/// use, to ensure everyone is using them consistently.
mod primitives {
pub use sodiumoxide::crypto::{box_::{curve25519xsalsa20poly1305::{gen_nonce,
Nonce,
PublicKey,
SecretKey},
gen_keypair,
open,
seal},
sealedbox};
}<|fim▁end|>
|
/// The suffix on the end of a secret encryption key file
const SECRET_BOX_KEY_SUFFIX: &str = "box.key";
/// Format version identifier for public encryption keys.
const PUBLIC_BOX_KEY_VERSION: &str = "BOX-PUB-1";
|
<|file_name|>evaluate.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/robertkrimen/otto"
)
func evaluateScript(src string, payload []FinalInput) (string, error) {<|fim▁hole|> javaScript.Set("eval", func(call otto.FunctionCall) otto.Value {
evalFunc = call.Argument(0)
return otto.UndefinedValue()
})
javaScript.Run(src)
arg, err := javaScript.ToValue(payload)
if err != nil {
return "", err
}
ret, err := evalFunc.Call(otto.NullValue(), arg)
if err != nil {
return "", err
}
return ret.ToString()
}<|fim▁end|>
|
javaScript := otto.New()
var evalFunc otto.Value
|
<|file_name|>GreeterPrivate.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2013 Canonical, Ltd.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|> * You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Author: Michael Terry <[email protected]>
*/
#include "Greeter.h"
#include "GreeterPrivate.h"
#include <QFuture>
#include <QFutureInterface>
#include <QFutureWatcher>
#include <QQueue>
#include <QtConcurrent>
#include <QVector>
#include <security/pam_appl.h>
namespace QLightDM
{
class GreeterImpl : public QObject
{
Q_OBJECT
struct AppData
{
GreeterImpl *impl;
pam_handle *handle;
};
typedef QFutureInterface<QString> ResponseFuture;
public:
explicit GreeterImpl(Greeter *parent, GreeterPrivate *greeterPrivate)
: QObject(parent),
greeter(parent),
greeterPrivate(greeterPrivate),
pamHandle(nullptr)
{
qRegisterMetaType<QLightDM::GreeterImpl::ResponseFuture>("QLightDM::GreeterImpl::ResponseFuture");
connect(&futureWatcher, &QFutureWatcher<int>::finished, this, &GreeterImpl::finishPam);
connect(this, SIGNAL(showMessage(pam_handle *, QString, QLightDM::Greeter::MessageType)),
this, SLOT(handleMessage(pam_handle *, QString, QLightDM::Greeter::MessageType)));
// This next connect is how we pass ResponseFutures between threads
connect(this, SIGNAL(showPrompt(pam_handle *, QString, QLightDM::Greeter::PromptType, QLightDM::GreeterImpl::ResponseFuture)),
this, SLOT(handlePrompt(pam_handle *, QString, QLightDM::Greeter::PromptType, QLightDM::GreeterImpl::ResponseFuture)),
Qt::BlockingQueuedConnection);
}
~GreeterImpl()
{
cancelPam();
}
void start(QString username)
{
// Clear out any existing PAM interactions first
cancelPam();
if (pamHandle != nullptr) {
// While we were cancelling pam above, we processed Qt events.
// Which may have allowed someone to call start() on us again.
// In which case, we'll bail on our current start() call.
// This isn't racy because it's all in the same thread.
return;
}
AppData *appData = new AppData();
appData->impl = this;
// Now actually start a new conversation with PAM
pam_conv conversation;
conversation.conv = converseWithPam;
conversation.appdata_ptr = static_cast<void*>(appData);
if (pam_start("lightdm", username.toUtf8(), &conversation, &pamHandle) == PAM_SUCCESS) {
appData->handle = pamHandle;
futureWatcher.setFuture(QtConcurrent::mapped(QList<pam_handle*>() << pamHandle, authenticateWithPam));
} else {
delete appData;
greeterPrivate->authenticated = false;
Q_EMIT greeter->showMessage(QStringLiteral("Internal error: could not start PAM authentication"), QLightDM::Greeter::MessageTypeError);
Q_EMIT greeter->authenticationComplete();
}
}
static int authenticateWithPam(pam_handle* const& pamHandle)
{
int pamStatus = pam_authenticate(pamHandle, 0);
if (pamStatus == PAM_SUCCESS) {
pamStatus = pam_acct_mgmt(pamHandle, 0);
}
if (pamStatus == PAM_NEW_AUTHTOK_REQD) {
pamStatus = pam_chauthtok(pamHandle, PAM_CHANGE_EXPIRED_AUTHTOK);
}
if (pamStatus == PAM_SUCCESS) {
pam_setcred(pamHandle, PAM_REINITIALIZE_CRED);
}
return pamStatus;
}
static int converseWithPam(int num_msg, const pam_message** msg,
pam_response** resp, void* appdata_ptr)
{
if (num_msg <= 0)
return PAM_CONV_ERR;
auto* tmp_response = static_cast<pam_response*>(calloc(num_msg, sizeof(pam_response)));
if (!tmp_response)
return PAM_CONV_ERR;
AppData *appData = static_cast<AppData*>(appdata_ptr);
GreeterImpl *impl = appData->impl;
pam_handle *handle = appData->handle;
int count;
QVector<ResponseFuture> responses;
for (count = 0; count < num_msg; ++count)
{
switch (msg[count]->msg_style)
{
case PAM_PROMPT_ECHO_ON:
{
QString message(msg[count]->msg);
responses.append(ResponseFuture());
responses.last().reportStarted();
Q_EMIT impl->showPrompt(handle, message, Greeter::PromptTypeQuestion, responses.last());
break;
}
case PAM_PROMPT_ECHO_OFF:
{
QString message(msg[count]->msg);
responses.append(ResponseFuture());
responses.last().reportStarted();
Q_EMIT impl->showPrompt(handle, message, Greeter::PromptTypeSecret, responses.last());
break;
}
case PAM_TEXT_INFO:
{
QString message(msg[count]->msg);
Q_EMIT impl->showMessage(handle, message, Greeter::MessageTypeInfo);
break;
}
default:
{
QString message(msg[count]->msg);
Q_EMIT impl->showMessage(handle, message, Greeter::MessageTypeError);
break;
}
}
}
int i = 0;
bool raise_error = false;
for (auto &response : responses)
{
pam_response* resp_item = &tmp_response[i++];
resp_item->resp_retcode = 0;
resp_item->resp = strdup(response.future().result().toUtf8());
if (!resp_item->resp)
{
raise_error = true;
break;
}
}
delete appData;
if (raise_error)
{
for (int i = 0; i < count; ++i)
free(tmp_response[i].resp);
free(tmp_response);
return PAM_CONV_ERR;
}
else
{
*resp = tmp_response;
return PAM_SUCCESS;
}
}
public Q_SLOTS:
bool respond(QString response)
{
if (!futures.isEmpty()) {
futures.dequeue().reportFinished(&response);
return true;
} else {
return false;
}
}
void cancelPam()
{
if (pamHandle != nullptr) {
QFuture<int> pamFuture = futureWatcher.future();
pam_handle *handle = pamHandle;
pamHandle = nullptr; // to disable normal finishPam() handling
pamFuture.cancel();
// Note the empty loop, we just want to clear the futures queue.
// Any further prompts from the pam thread will be immediately
// responded to/dismissed in handlePrompt().
while (respond(QString()));
// Now let signal/slot handling happen so the thread can finish
while (!pamFuture.isFinished()) {
QCoreApplication::processEvents();
}
pam_end(handle, PAM_CONV_ERR);
}
}
Q_SIGNALS:
void showMessage(pam_handle *handle, QString text, QLightDM::Greeter::MessageType type);
void showPrompt(pam_handle *handle, QString text, QLightDM::Greeter::PromptType type, QLightDM::GreeterImpl::ResponseFuture response);
private Q_SLOTS:
void finishPam()
{
if (pamHandle == nullptr) {
return;
}
int pamStatus = futureWatcher.result();
pam_end(pamHandle, pamStatus);
pamHandle = nullptr;
greeterPrivate->authenticated = (pamStatus == PAM_SUCCESS);
Q_EMIT greeter->authenticationComplete();
}
void handleMessage(pam_handle *handle, QString text, QLightDM::Greeter::MessageType type)
{
if (handle != pamHandle)
return;
Q_EMIT greeter->showMessage(text, type);
}
void handlePrompt(pam_handle *handle, QString text, QLightDM::Greeter::PromptType type, QLightDM::GreeterImpl::ResponseFuture future)
{
if (handle != pamHandle) {
future.reportResult(QString());
future.reportFinished();
return;
}
futures.enqueue(future);
Q_EMIT greeter->showPrompt(text, type);
}
private:
Greeter *greeter;
GreeterPrivate *greeterPrivate;
pam_handle* pamHandle;
QFutureWatcher<int> futureWatcher;
QQueue<ResponseFuture> futures;
};
GreeterPrivate::GreeterPrivate(Greeter* parent)
: authenticated(false),
authenticationUser(),
m_impl(new GreeterImpl(parent, this)),
q_ptr(parent)
{
}
GreeterPrivate::~GreeterPrivate()
{
delete m_impl;
}
void GreeterPrivate::handleAuthenticate()
{
m_impl->start(authenticationUser);
}
void GreeterPrivate::handleRespond(const QString &response)
{
m_impl->respond(response);
}
void GreeterPrivate::cancelAuthentication()
{
m_impl->cancelPam();
}
}
#include "GreeterPrivate.moc"<|fim▁end|>
|
* GNU General Public License for more details.
*
|
<|file_name|>ng2-header.component.ts<|end_file_name|><|fim▁begin|>import { Component, EventEmitter, Output, trigger, state, transition, style, animate, OnInit } from '@angular/core';
import { Observable } from 'rxjs/Observable';
@Component({
selector: 'ng2-header',
styleUrls: ['./ng2-header.scss'],
templateUrl: './ng2-header.html',
animations: [
trigger('isTopOfPage', [
state('true', style({
color: '#FFFFFF',
transform: 'scale(1)'
})),
state('false', style({
color: '#2b2b2b',
transform: 'scale(1.1)'
})),<|fim▁hole|>})
export class Ng2Header implements OnInit {
@Output() callback = new EventEmitter();
public isTopOfPage: boolean = true;
private didScroll: boolean = false;
constructor() {
}
ngOnInit() {
Observable.fromEvent(window, 'scroll').subscribe(() => {
if (!this.didScroll) {
this.didScroll = true;
setTimeout(() => this.scrollPage(), 250);
}
});
}
/**
* Method to handle click events
*/
public openSidebar(): void {
this.callback.emit();
}
/**
* Modify the header state depending on the offset
*/
private scrollPage(): void {
this.isTopOfPage = this.scrollY() < window.innerHeight - 100;
this.didScroll = false;
}
/**
* Get Y axis offset
* @returns {number}
*/
private scrollY(): number {
return window.pageYOffset || document.documentElement.scrollTop;
}
}<|fim▁end|>
|
transition('0 => 1', animate('100ms ease-in')),
transition('1 => 0', animate('100ms ease-out'))
])
]
|
<|file_name|>AddLink.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import styles from './AddLink.module.scss';
import { IAddLinkProps } from './IAddLinkProps';<|fim▁hole|>import {
Stack,
TextField,
Dialog,
DialogType,
DialogFooter,
PrimaryButton,
DefaultButton,
} from 'office-ui-fabric-react';
import * as tsStyles from './AddLinkStyles';
import { ITaskExternalReference } from '../../services/ITaskExternalReference';
import { ITaskDetails } from '../../services/ITaskDetails';
import { utilities } from '../../utilities';
import * as strings from 'MyTasksWebPartStrings';
export class AddLink extends React.Component<IAddLinkProps, IAddLinkState> {
private _newReferences: ITaskExternalReference = {} as ITaskExternalReference;
private _taskDetails:ITaskDetails = {} as ITaskDetails;
private _util = new utilities();
constructor(props: IAddLinkProps) {
super(props);
this.state = {
hideDialog: !this.props.displayDialog,
disableSaveButton: true,
link:'',
linkLabel:'',
};
this._taskDetails = this.props.taskDetails;
}
private _closeDialog = (ev?: React.MouseEvent<HTMLButtonElement, MouseEvent>) => {
ev.preventDefault();
this.setState({ hideDialog: true });
this.props.onDismiss(this._taskDetails);
}
private _onSave = async (ev?: React.MouseEvent<HTMLButtonElement, MouseEvent>) =>{
try {
let { link, linkLabel } = this.state;
const hasHttps = link.indexOf('https://') !== -1 ? true : false;
if (!hasHttps){
link = `https://${link}`;
}
const fileFullUrl: string =
(`${decodeURIComponent(link)}`).replace(/\./g, '%2E').replace(/\:/g, '%3A') + '?web=1';
this._newReferences[fileFullUrl] = {
alias: linkLabel ? linkLabel : link,
'@odata.type': '#microsoft.graph.plannerExternalReference',
type: await this._util.getFileType(link),
previewPriority: ' !'
};
for (const referenceKey of Object.keys(this._taskDetails.references)) {
const originalReference = this._taskDetails.references[referenceKey];
this._newReferences[referenceKey] = {
alias: originalReference.alias,
'@odata.type': '#microsoft.graph.plannerExternalReference',
type: originalReference.type,
previewPriority: ' !'
};
}
const updatedTaskDetails = await this.props.spservice.updateTaskDetailsProperty(
this.props.taskDetails.id,
'References',
this._newReferences,
this.props.taskDetails['@odata.etag']
);
this._taskDetails = updatedTaskDetails ;
// this._taskDetails.references = this._newReferences;
this.setState({ hideDialog: true });
this.props.onDismiss(this._taskDetails);
} catch (error) {
console.log(error);
}
}
private _onUrlCHange = (event: React.FormEvent<HTMLInputElement | HTMLTextAreaElement>, newValue:string) => {
this.setState({disableSaveButton : newValue.length > 0 ? false : true, link: newValue});
}
private _onChangeLabel = (event: React.FormEvent<HTMLInputElement | HTMLTextAreaElement>, newValue:string) => {
this.setState({ linkLabel: newValue});
}
public render(): React.ReactElement<IAddLinkProps> {
return (
<div>
<Dialog
hidden={this.state.hideDialog}
onDismiss={this._closeDialog}
minWidth={430}
maxWidth={430}
dialogContentProps={{
type: DialogType.normal,
title: strings.AddLinkLabel
}}
modalProps={{
isBlocking: true,
styles: tsStyles.modalStyles
// topOffsetFixed: true
}}>
<Stack gap='20'>
<TextField // prettier-ignore
label={strings.AddressLabel}
placeholder={strings.LinkWebAddressPlaceHolder}
prefix='https://'
borderless
ariaLabel='Url'
onChange={this._onUrlCHange}
styles={tsStyles.textFielUrlStyles}
/>
<TextField // prettier-ignore
label={strings.TextToDisplayLabel}
placeholder= {strings.LinkNameHerePlaceHolder}
ariaLabel={strings.AddressLabel}
borderless
onChange={this._onChangeLabel}
styles={tsStyles.textFielUrlStyles}
/>
</Stack>
<div style={{ marginTop: 45 }}>
<DialogFooter>
<PrimaryButton onClick={this._onSave} text={strings.SaveLabel} disabled={this.state.disableSaveButton} />
<DefaultButton onClick={this._closeDialog} text={strings.CancelLabel} />
</DialogFooter>
</div>
</Dialog>
</div>
);
}
}<|fim▁end|>
|
import { IAddLinkState } from './IAddLinkState';
|
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>def pytest_addoption(parser):
parser.addoption('--jenkins-docker', action='store',<|fim▁hole|><|fim▁end|>
|
default='jenkins/jenkins',
help='The Jenkins Docker container to launch')
|
<|file_name|>_tickvals.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class TickvalsValidator(_plotly_utils.basevalidators.DataArrayValidator):<|fim▁hole|> def __init__(self, plotly_name="tickvals", parent_name="mesh3d.colorbar", **kwargs):
super(TickvalsValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "data"),
**kwargs
)<|fim▁end|>
| |
<|file_name|>portal_wizard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import email_split
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
# welcome email sent to portal users
# (note that calling '_' has no effect except exporting those strings for translation)
WELCOME_EMAIL_SUBJECT = _("Your Odoo account at %(company)s")
WELCOME_EMAIL_BODY = _("""Dear %(name)s,
You have been given access to %(company)s's %(portal)s.
Your login account data is:
Username: %(login)s
Portal: %(portal_url)s
Database: %(db)s
You can set or change your password via the following url:
%(signup_url)s<|fim▁hole|>Odoo - Open Source Business Applications
http://www.openerp.com
""")
def extract_email(email):
""" extract the email address from a user-friendly email address """
addresses = email_split(email)
return addresses[0] if addresses else ''
class wizard(osv.osv_memory):
"""
A wizard to manage the creation/removal of portal users.
"""
_name = 'portal.wizard'
_description = 'Portal Access Management'
_columns = {
'portal_id': fields.many2one('res.groups', domain=[('is_portal', '=', True)], required=True,
string='Portal', help="The portal that users can be added in or removed from."),
'user_ids': fields.one2many('portal.wizard.user', 'wizard_id', string='Users'),
'welcome_message': fields.text(string='Invitation Message',
help="This text is included in the email sent to new users of the portal."),
}
def _default_portal(self, cr, uid, context):
portal_ids = self.pool.get('res.groups').search(cr, uid, [('is_portal', '=', True)])
return portal_ids and portal_ids[0] or False
_defaults = {
'portal_id': _default_portal,
}
def onchange_portal_id(self, cr, uid, ids, portal_id, context=None):
# for each partner, determine corresponding portal.wizard.user records
res_partner = self.pool.get('res.partner')
partner_ids = context and context.get('active_ids') or []
contact_ids = set()
user_changes = []
for partner in res_partner.browse(cr, SUPERUSER_ID, partner_ids, context):
for contact in (partner.child_ids or [partner]):
# make sure that each contact appears at most once in the list
if contact.id not in contact_ids:
contact_ids.add(contact.id)
in_portal = False
if contact.user_ids:
in_portal = portal_id in [g.id for g in contact.user_ids[0].groups_id]
user_changes.append((0, 0, {
'partner_id': contact.id,
'email': contact.email,
'in_portal': in_portal,
}))
return {'value': {'user_ids': user_changes}}
def action_apply(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
portal_user_ids = [user.id for user in wizard.user_ids]
self.pool.get('portal.wizard.user').action_apply(cr, uid, portal_user_ids, context)
return {'type': 'ir.actions.act_window_close'}
class wizard_user(osv.osv_memory):
"""
A model to configure users in the portal wizard.
"""
_name = 'portal.wizard.user'
_description = 'Portal User Config'
_columns = {
'wizard_id': fields.many2one('portal.wizard', string='Wizard', required=True, ondelete='cascade'),
'partner_id': fields.many2one('res.partner', string='Contact', required=True, readonly=True),
'email': fields.char(string='Email', size=240),
'in_portal': fields.boolean('In Portal'),
}
def get_error_messages(self, cr, uid, ids, context=None):
res_users = self.pool.get('res.users')
emails = []
error_empty = []
error_emails = []
error_user = []
ctx = dict(context or {}, active_test=False)
for wizard_user in self.browse(cr, SUPERUSER_ID, ids, context):
if wizard_user.in_portal and not self._retrieve_user(cr, SUPERUSER_ID, wizard_user, context):
email = extract_email(wizard_user.email)
if not email:
error_empty.append(wizard_user.partner_id)
elif email in emails and email not in error_emails:
error_emails.append(wizard_user.partner_id)
user = res_users.search(cr, SUPERUSER_ID, [('login', '=', email)], context=ctx)
if user:
error_user.append(wizard_user.partner_id)
emails.append(email)
error_msg = []
if error_empty:
error_msg.append("%s\n- %s" % (_("Some contacts don't have a valid email: "),
'\n- '.join(['%s' % (p.display_name,) for p in error_empty])))
if error_emails:
error_msg.append("%s\n- %s" % (_("Several contacts have the same email: "),
'\n- '.join([p.email for p in error_emails])))
if error_user:
error_msg.append("%s\n- %s" % (_("Some contacts have the same email as an existing portal user:"),
'\n- '.join(['%s <%s>' % (p.display_name, p.email) for p in error_user])))
if error_msg:
error_msg.append(_("To resolve this error, you can: \n"
"- Correct the emails of the relevant contacts\n"
"- Grant access only to contacts with unique emails"))
return error_msg
def action_apply(self, cr, uid, ids, context=None):
error_msg = self.get_error_messages(cr, uid, ids, context=context)
if error_msg:
raise osv.except_osv(_('Contacts Error'), "\n\n".join(error_msg))
for wizard_user in self.browse(cr, SUPERUSER_ID, ids, context):
portal = wizard_user.wizard_id.portal_id
user = self._retrieve_user(cr, SUPERUSER_ID, wizard_user, context)
if wizard_user.partner_id.email != wizard_user.email:
wizard_user.partner_id.write({'email': wizard_user.email})
if wizard_user.in_portal:
# create a user if necessary, and make sure it is in the portal group
if not user:
user = self._create_user(cr, SUPERUSER_ID, wizard_user, context)
if (not user.active) or (portal not in user.groups_id):
user.write({'active': True, 'groups_id': [(4, portal.id)]})
# prepare for the signup process
user.partner_id.signup_prepare()
self._send_email(cr, uid, wizard_user, context)
wizard_user.refresh()
else:
# remove the user (if it exists) from the portal group
if user and (portal in user.groups_id):
# if user belongs to portal only, deactivate it
if len(user.groups_id) <= 1:
user.write({'groups_id': [(3, portal.id)], 'active': False})
else:
user.write({'groups_id': [(3, portal.id)]})
def _retrieve_user(self, cr, uid, wizard_user, context=None):
""" retrieve the (possibly inactive) user corresponding to wizard_user.partner_id
@param wizard_user: browse record of model portal.wizard.user
@return: browse record of model res.users
"""
context = dict(context or {}, active_test=False)
res_users = self.pool.get('res.users')
domain = [('partner_id', '=', wizard_user.partner_id.id)]
user_ids = res_users.search(cr, uid, domain, context=context)
return user_ids and res_users.browse(cr, uid, user_ids[0], context=context) or False
def _create_user(self, cr, uid, wizard_user, context=None):
""" create a new user for wizard_user.partner_id
@param wizard_user: browse record of model portal.wizard.user
@return: browse record of model res.users
"""
res_users = self.pool.get('res.users')
create_context = dict(context or {}, noshortcut=True, no_reset_password=True) # to prevent shortcut creation
values = {
'email': extract_email(wizard_user.email),
'login': extract_email(wizard_user.email),
'partner_id': wizard_user.partner_id.id,
'groups_id': [(6, 0, [])],
}
user_id = res_users.create(cr, uid, values, context=create_context)
return res_users.browse(cr, uid, user_id, context)
def _send_email(self, cr, uid, wizard_user, context=None):
""" send notification email to a new portal user
@param wizard_user: browse record of model portal.wizard.user
@return: the id of the created mail.mail record
"""
res_partner = self.pool['res.partner']
this_context = context
this_user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context)
if not this_user.email:
raise osv.except_osv(_('Email Required'),
_('You must have an email address in your User Preferences to send emails.'))
# determine subject and body in the portal user's language
user = self._retrieve_user(cr, SUPERUSER_ID, wizard_user, context)
context = dict(this_context or {}, lang=user.lang)
ctx_portal_url = dict(context, signup_force_type_in_url='')
portal_url = res_partner._get_signup_url_for_action(cr, uid,
[user.partner_id.id],
context=ctx_portal_url)[user.partner_id.id]
res_partner.signup_prepare(cr, uid, [user.partner_id.id], context=context)
data = {
'company': this_user.company_id.name,
'portal': wizard_user.wizard_id.portal_id.name,
'welcome_message': wizard_user.wizard_id.welcome_message or "",
'db': cr.dbname,
'name': user.name,
'login': user.login,
'signup_url': user.signup_url,
'portal_url': portal_url,
}
mail_mail = self.pool.get('mail.mail')
mail_values = {
'email_from': this_user.email,
'email_to': user.email,
'subject': _(WELCOME_EMAIL_SUBJECT) % data,
'body_html': '<pre>%s</pre>' % (_(WELCOME_EMAIL_BODY) % data),
'state': 'outgoing',
'type': 'email',
}
mail_id = mail_mail.create(cr, uid, mail_values, context=this_context)
return mail_mail.send(cr, uid, [mail_id], context=this_context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|>
|
%(welcome_message)s
--
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from orangecontrib.recommendation.tests.coverage.base_tests \<|fim▁hole|><|fim▁end|>
|
import TestRatingModels, TestRankingModels
|
<|file_name|>Broadcasts.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016-2019 Projekt Substratum
* This file is part of Substratum.
*
* SPDX-License-Identifier: GPL-3.0-Or-Later
*/
package projekt.substratum.common;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.util.Log;
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
import projekt.substratum.Substratum;
import projekt.substratum.services.crash.AppCrashReceiver;
import projekt.substratum.services.packages.OverlayFound;
import projekt.substratum.services.packages.OverlayUpdater;
import projekt.substratum.services.packages.PackageModificationDetector;
import projekt.substratum.services.profiles.ScheduledProfileReceiver;
import projekt.substratum.services.system.InterfacerAuthorizationReceiver;
import static projekt.substratum.common.Internal.ENCRYPTION_KEY_EXTRA;
import static projekt.substratum.common.Internal.IV_ENCRYPTION_KEY_EXTRA;
import static projekt.substratum.common.Internal.MAIN_ACTIVITY_RECEIVER;
import static projekt.substratum.common.Internal.OVERLAY_REFRESH;
import static projekt.substratum.common.Internal.THEME_FRAGMENT_REFRESH;
import static projekt.substratum.common.References.ACTIVITY_FINISHER;
import static projekt.substratum.common.References.APP_CRASHED;
import static projekt.substratum.common.References.INTERFACER_PACKAGE;
import static projekt.substratum.common.References.KEY_RETRIEVAL;
import static projekt.substratum.common.References.MANAGER_REFRESH;
import static projekt.substratum.common.References.PACKAGE_ADDED;
import static projekt.substratum.common.References.PACKAGE_FULLY_REMOVED;
import static projekt.substratum.common.References.SUBSTRATUM_LOG;
import static projekt.substratum.common.References.TEMPLATE_RECEIVE_KEYS;
import static projekt.substratum.common.References.scheduledProfileReceiver;
public class Broadcasts {
/**
* Send a localized key message for encryption to take place
*
* @param context Context
* @param encryptionKey Encryption key
* @param ivEncryptKey IV encryption key
*/
private static void sendLocalizedKeyMessage(Context context,
byte[] encryptionKey,
byte[] ivEncryptKey) {
Substratum.log("KeyRetrieval",
"The system has completed the handshake for keys retrieval " +
"and is now passing it to the activity...");
Intent intent = new Intent(KEY_RETRIEVAL);
intent.putExtra(ENCRYPTION_KEY_EXTRA, encryptionKey);
intent.putExtra(IV_ENCRYPTION_KEY_EXTRA, ivEncryptKey);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* Close Substratum as a whole
*
* @param context Context
*/
public static void sendKillMessage(Context context) {
Substratum.log("SubstratumKiller",
"A crucial action has been conducted by the user and " +
"Substratum is now shutting down!");
Intent intent = new Intent(MAIN_ACTIVITY_RECEIVER);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the ThemeFragment
*
* @param context Context
*/
public static void sendRefreshMessage(Context context) {
Substratum.log("ThemeFragmentRefresher",
"A theme has been modified, sending update signal to refresh the list!");
Intent intent = new Intent(THEME_FRAGMENT_REFRESH);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the Overlays tab
*
* @param context Context
*/
public static void sendOverlayRefreshMessage(Context context) {
Substratum.log("OverlayRefresher",
"A theme has been modified, sending update signal to refresh the list!");
Intent intent = new Intent(OVERLAY_REFRESH);<|fim▁hole|> * Activity finisher when a theme was updated
*
* @param context Context
* @param packageName Package of theme to close
*/
public static void sendActivityFinisherMessage(Context context,
String packageName) {
Substratum.log("ThemeInstaller",
"A theme has been installed, sending update signal to app for further processing!");
Intent intent = new Intent(ACTIVITY_FINISHER);
intent.putExtra(Internal.THEME_PID, packageName);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* A package was installed, refresh the ManagerFragment
*
* @param context Context
*/
public static void sendRefreshManagerMessage(Context context) {
Intent intent = new Intent(MANAGER_REFRESH);
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
* Register the implicit intent broadcast receivers
*
* @param context Context
*/
public static void registerBroadcastReceivers(Context context) {
try {
IntentFilter intentPackageAdded = new IntentFilter(PACKAGE_ADDED);
intentPackageAdded.addDataScheme("package");
IntentFilter intentPackageFullyRemoved = new IntentFilter(PACKAGE_FULLY_REMOVED);
intentPackageFullyRemoved.addDataScheme("package");
if (Systems.checkOMS(context)) {
IntentFilter intentAppCrashed = new IntentFilter(APP_CRASHED);
context.getApplicationContext().registerReceiver(
new AppCrashReceiver(), intentAppCrashed);
context.getApplicationContext().registerReceiver(
new OverlayUpdater(), intentPackageAdded);
}
if (Systems.checkThemeInterfacer(context)) {
IntentFilter interfacerAuthorize = new IntentFilter(
INTERFACER_PACKAGE + ".CALLER_AUTHORIZED");
context.getApplicationContext().registerReceiver(
new InterfacerAuthorizationReceiver(), interfacerAuthorize);
}
context.getApplicationContext().registerReceiver(
new OverlayFound(), intentPackageAdded);
context.getApplicationContext().registerReceiver(
new PackageModificationDetector(), intentPackageAdded);
context.getApplicationContext().registerReceiver(
new PackageModificationDetector(), intentPackageFullyRemoved);
Substratum.log(SUBSTRATUM_LOG,
"Successfully registered broadcast receivers for Substratum functionality!");
} catch (Exception e) {
Log.e(SUBSTRATUM_LOG,
"Failed to register broadcast receivers for Substratum functionality...");
}
}
/**
* Register the profile screen off receiver
*
* @param context Context
*/
public static void registerProfileScreenOffReceiver(Context context) {
scheduledProfileReceiver = new ScheduledProfileReceiver();
context.registerReceiver(scheduledProfileReceiver,
new IntentFilter(Intent.ACTION_SCREEN_OFF));
}
/**
* Unload the profile screen off receiver
*
* @param context Context
*/
public static void unregisterProfileScreenOffReceiver(Context context) {
try {
context.unregisterReceiver(scheduledProfileReceiver);
} catch (Exception ignored) {
}
}
/**
* Start the key retrieval receiver to obtain the key from the theme
*
* @param context Context
*/
public static void startKeyRetrievalReceiver(Context context) {
try {
IntentFilter intentGetKeys = new IntentFilter(TEMPLATE_RECEIVE_KEYS);
context.getApplicationContext().registerReceiver(
new KeyRetriever(), intentGetKeys);
Substratum.log(SUBSTRATUM_LOG, "Successfully registered key retrieval receiver!");
} catch (Exception e) {
Log.e(SUBSTRATUM_LOG, "Failed to register key retrieval receiver...");
}
}
/**
* Key Retriever Receiver
*/
public static class KeyRetriever extends BroadcastReceiver {
@Override
public void onReceive(Context context,
Intent intent) {
sendLocalizedKeyMessage(
context,
intent.getByteArrayExtra(ENCRYPTION_KEY_EXTRA),
intent.getByteArrayExtra(IV_ENCRYPTION_KEY_EXTRA));
}
}
}<|fim▁end|>
|
LocalBroadcastManager.getInstance(context).sendBroadcast(intent);
}
/**
|
<|file_name|>cube_to_png.py<|end_file_name|><|fim▁begin|>#!bpy
"""
to run:
(aAtually I have not been able to run this from command line - the thing
exits without rendering. If I add bpy.ops.render, it alwys renders layers, rather
then compositing output)
blender -b --python this_fnm.py
"""
import bpy
from math import radians
import fnmatch
import os
###################################
def delete_old_stuff():
# escape edit mode
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
# delete all mesh objects
bpy.ops.object.select_by_type(type='MESH')
bpy.ops.object.delete()
# delete all lamps
bpy.ops.object.select_by_type(type='LAMP')
bpy.ops.object.delete()
# delete all font objects
bpy.ops.object.select_by_type(type='FONT')
bpy.ops.object.delete()
# delete all render layers but one
render_layers = bpy.context.scene.render.layers
for active_index in range (1,len(render_layers)):
render_layers.active_index = active_index
render_layers.remove(render_layers.active)
# delete all materials
for i in bpy.data.materials.values():
bpy.data.materials.remove(i)
# delete all textures
for i in bpy.data.textures.values():
bpy.data.textures.remove(i)
#####################################################################
def makeGlossyTextured (object, image_loaded, material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# uv map node
uv_node = nodes.new('ShaderNodeUVMap')
uv_node.uv_map = object.data.uv_textures.active.name
# image texture node
image_texture_node = nodes.new(type='ShaderNodeTexImage')
image_texture_node.image = image_loaded
links.new(uv_node.outputs['UV'], image_texture_node.inputs['Vector'])
# diffuse node and Material output are already generated, and linked
# so we just need to pipe in the testure into Diffuse BSDF node
diffuse_node = nodes.get("Diffuse BSDF")
links.new(image_texture_node.outputs[0], diffuse_node.inputs[0])
# add a glossy BSDF
glossy_node = nodes.new(type='ShaderNodeBsdfGlossy')
glossy_node.inputs["Color"].default_value = [1.0, 1.0, 1.0, 1.0]
glossy_node.inputs["Roughness"].default_value = 0.0
# add a mix node
mix_node = nodes.new(type='ShaderNodeMixShader')
links.new(diffuse_node.outputs[0], mix_node.inputs[1]) # whats mix.inputs[0]?
links.new(glossy_node.outputs[0], mix_node.inputs[2])
# output of the mix node into Material Output
mat_output = nodes.get("Material Output")
links.new(mix_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def makeEmission (material_name):
material = bpy.data.materials.new(material_name)
# as soon as we do this we]'' have Diffuse BSDF and Material Output nodes, linked:
material.use_nodes = True
nodes = material.node_tree.nodes
links = material.node_tree.links
# add an emission node
emission_node = nodes.new(type='ShaderNodeEmission')
emission_node.inputs["Color"].default_value = [0.335, 0.583, 0.8, 1.0]
emission_node.inputs["Strength"].default_value = 20.0
mat_output = nodes.get("Material Output")
links.new(emission_node.outputs[0], mat_output.inputs[0])
return material
#####################################################################
def set_camera(scene):
# Set camera rotation in euler angles
scene.camera.rotation_mode = 'XYZ'
scene.camera.rotation_euler[0] = radians(12)
scene.camera.rotation_euler[1] = 0.0
scene.camera.rotation_euler[2] = 0.0
# Set camera translation
scene.camera.location.x = 0.34
scene.camera.location.y = -1.2
scene.camera.location.z = 6.7
#####################################################################
def set_lights(scene):
# ambient
scene.world.light_settings.use_ambient_occlusion = True
scene.world.light_settings.distance = 2
# spotlight - sun
bpy.ops.object.lamp_add(type='SUN', location=(-2.0, 0.32, 6.5), rotation=(radians(-21), radians(-5), radians(69)))
lamp = bpy.context.object
#lamp.color = (0.43, 0.78,1.0,1.0) # the code does not complain, but not sure if it does anything
# lamp does not have strength, but the associated rendering node does (sigh)
# lamp.strength = 5.0 # this does not work
lamp.data.node_tree.nodes['Emission'].inputs['Strength'].default_value= 5.0
lamp.data.node_tree.nodes['Emission'].inputs['Color'].default_value= (0.43, 0.78,1.0,1.0)
lamp.cycles_visibility.shadow = False
# light emmission plane
bpy.ops.mesh.primitive_plane_add(location=(-10.0, 3.5, 12.0),
rotation=(radians(-104), radians(-98), radians(80)))
emission_plane = bpy.context.object
emission_plane.scale = (3.3, -5.5, -28.3)
emission_plane.name = "emission plane"
emission_plane.data.materials.append (makeEmission ("emission mat"))
emission_plane.cycles_visibility.shadow = False
#####################################################################
def create_object():
bpy.ops.mesh.primitive_cube_add(location=(0.0, 0.0, 0.7), enter_editmode=True, layers= [l==0 for l in range(20)])
bpy.ops.mesh.subdivide(number_cuts=4)
obj = bpy.context.object
obj.name = "cube"
obj.location.z += obj.dimensions.z/4
obj.modifiers.new("cube_subsurf", "SUBSURF")
obj.modifiers["cube_subsurf"].subdivision_type = 'CATMULL_CLARK'
obj.modifiers["cube_subsurf"].render_levels = 4
<|fim▁hole|> p.use_smooth = True
# texture layer: Smart projection
bpy.ops.mesh.uv_texture_add()
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.uv.cube_project(cube_size=0.1*obj.dimensions.x)
bpy.ops.object.mode_set(mode='OBJECT')
return obj
#####################################################################
def compositing(scene, outdir, outfile_base_name):
# let's try to work in some layers
render_layers = scene.render.layers
render_layers.active.name = "main"
render_layers.new("shadow")
render_layers["main"].layers = [l==0 for l in range(20)]
render_layers["shadow"].layers = [l==1 for l in range(20)]
render_layers["shadow"].use_pass_shadow = True
scene.layers[0] = True
scene.layers[1] = True
# and now ... compositing!
# I'll want transaprent background
scene.cycles.film_transparent = True
# switch on nodes and get reference
scene.use_nodes = True
tree = scene.node_tree
# the default nodes are Composite and RenderLayers, that contains out main layer already
# I better remove them if I am going to run this script repeatedly
for node in tree.nodes:
tree.nodes.remove(node)
links = tree.links
main_layer_node = tree.nodes.new('CompositorNodeRLayers')
main_layer_node.layer = "main"
main_layer_node.location = 200, -100
shadow_layer_node = tree.nodes.new('CompositorNodeRLayers')
shadow_layer_node.layer = "shadow"
shadow_layer_node.location = -400, 100
# note here: mix, not math
subtract_node = tree.nodes.new('CompositorNodeMixRGB')
subtract_node.blend_type = "SUBTRACT" # the default is add
subtract_node.location = -200, 200
# inputs[0] here is 'Fac' (?)
links.new(shadow_layer_node.outputs['Alpha'], subtract_node.inputs[1])
links.new(shadow_layer_node.outputs['Shadow'], subtract_node.inputs[2])
set_alpha_node = tree.nodes.new('CompositorNodeSetAlpha')
set_alpha_node.location = 0, 200
links.new(subtract_node.outputs['Image'], set_alpha_node.inputs['Alpha'])
blur_node = tree.nodes.new('CompositorNodeBlur')
blur_node.filter_type = 'FAST_GAUSS'
blur_node.size_x = 5
blur_node.size_y = 5
blur_node.location = 200, 200
links.new(set_alpha_node.outputs['Image'], blur_node.inputs['Image'])
alpha_over_node = tree.nodes.new('CompositorNodeAlphaOver')
alpha_over_node.location = 400, 0
# inputs[0] here is 'Fac' (?)
links.new(blur_node.outputs['Image'], alpha_over_node.inputs[1])
links.new(main_layer_node.outputs['Image'], alpha_over_node.inputs[2])
# create output node
#out_node = tree.nodes.new('CompositorNodeComposite')
out_node = tree.nodes.new('CompositorNodeOutputFile')
out_node.base_path = outdir
out_node.file_slots[0].path = outfile_base_name
out_node.location = 600,0
links.new(alpha_over_node.outputs['Image'], out_node.inputs['Image'])
# create Viewer Node
# viewer_node = tree.nodes.new('CompositorNodeViewer')
# viewer_node.location = 600, 200
# links.new(alpha_over_node.outputs['Image'], viewer_node.inputs['Image'])
###################################
if __name__ == '__main__':
delete_old_stuff()
scene = bpy.context.scene
scene.render.engine="CYCLES"
scene.unit_settings.system='METRIC'
image_loaded = bpy.data.images.load(os.path.abspath('Bck_v1.png'))
set_camera(scene)
set_lights(scene)
# floor - that' where the shadows are cast
# we are placing the floor in the second layer
bpy.ops.mesh.primitive_plane_add(location=(0.0, 0.0, 0.0), layers=[l==1 for l in range(20)])
floor = bpy.context.object
floor.scale = ((4,4,4))
floor.name = "floor"
floor.data.materials.append (bpy.data.materials.new('Plain Diffuse'))
# cube
object = create_object()
# image as a texture on the curface + gloss
material = makeGlossyTextured (object, image_loaded, 'Img Txtr Material')
# bind object and material
object.data.materials.append(material)
# do some compositing work to get shadow on transparent background
scene.render.resolution_y = 512
scene.render.resolution_x = int (scene.render.resolution_y*(object.dimensions.x/object.dimensions.y))
compositing(scene, os.getcwd(), "test")
# not working:
bpy.ops.render.render(use_viewport=True)<|fim▁end|>
|
mesh = obj.data
bpy.ops.object.editmode_toggle()
# show mesh as smooth
for p in mesh.polygons:
|
<|file_name|>bitcoin_ur_PK.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ur_PK" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>ShadowCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The BlackCoin developers
Copyright © 2014 The ShadowCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>ایڈریس یا لیبل میں ترمیم کرنے پر ڈبل کلک کریں</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>نیا ایڈریس بنائیں</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your ShadowCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>چٹ</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation> پتہ</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>چٹ کے بغیر</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>پاس فریز داخل کریں</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>نیا پاس فریز</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>نیا پاس فریز دہرائیں</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>بٹوا ان لاک</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>خفیہ کشائی کر یںبٹوے کے</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>پاس فریز تبدیل کریں</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-58"/>
<source>ShadowCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+280"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+242"/>
<source>Synchronizing with network...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-308"/>
<source>&Overview</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Show information about ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+250"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-247"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Send coins to a ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Modify configuration options for ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-200"/>
<source>ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+178"/>
<source>&About ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>&File</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>ShadowCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to ShadowCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-284"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+288"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid ShadowCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. ShadowCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>رقم</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation> پتہ</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>چٹ کے بغیر</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid ShadowCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>ShadowCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start ShadowCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start ShadowCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the ShadowCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the ShadowCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting ShadowCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show ShadowCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting ShadowCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the ShadowCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the ShadowCoin-Qt help message to get a list with possible ShadowCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>ShadowCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>ShadowCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the ShadowCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the ShadowCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 SDC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>بیلنس:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 SDC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a ShadowCoin address (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>چٹ کے بغیر</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a ShadowCoin address (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified ShadowCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a ShadowCoin address (e.g. SXywGBZBowrppUwwNUo1GCRDTibzJi7g2M)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter ShadowCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>رقم</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>ٹائپ</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation> پتہ</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>رقم</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>کو بھیجا</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(N / A)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>تمام</translation><|fim▁hole|> <location line="-15"/>
<source>Today</source>
<translation>آج</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>اس ہفتے</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>اس مہینے</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>پچھلے مہینے</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>اس سال</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>دیگر</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>کو بھیجا</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>ٹائپ</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>چٹ</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation> پتہ</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>رقم</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>ShadowCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or shadowcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: shadowcoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: shadowcoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 32112 or testnet: 22112)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 51736 or testnet: 51996)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong ShadowCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=shadowcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "ShadowCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>یہ مدد کا پیغام</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. ShadowCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of ShadowCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart ShadowCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>غلط رقم</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>ناکافی فنڈز</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. ShadowCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>نقص</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
|
</message>
<message>
|
<|file_name|>mininode.py<|end_file_name|><|fim▁begin|># mininode.py - Sscoin P2P network half-a-node
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# This python code was modified from ArtForz' public domain half-a-node, as
# found in the mini-node branch of http://github.com/jgarzik/pynode.
#
# NodeConn: an object which manages p2p connectivity to a sscoin node
# NodeConnCB: a base class that describes the interface for receiving
# callbacks with network messages from a NodeConn
# CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....:
# data structures that should map to corresponding structures in
# sscoin/primitives
# msg_block, msg_tx, msg_headers, etc.:
# data structures that represent network messages
# ser_*, deser_*: functions that handle serialization/deserialization
import struct
import socket
import asyncore
import time
import sys
import random
from binascii import hexlify, unhexlify
from io import BytesIO
from codecs import encode
import hashlib
from threading import RLock
from threading import Thread
import logging
import copy
import sscoin_hash
BIP0031_VERSION = 60000
MY_VERSION = 70206 # current MIN_PEER_PROTO_VERSION
MY_SUBVERSION = b"/python-mininode-tester:0.0.2/"
MAX_INV_SZ = 50000
MAX_BLOCK_SIZE = 1000000
COIN = 100000000L # 1 btc in satoshis
# Keep our own socket map for asyncore, so that we can track disconnects
# ourselves (to workaround an issue with closing an asyncore socket when
# using select)
mininode_socket_map = dict()
# One lock for synchronizing all data access between the networking thread (see
# NetworkThread below) and the thread running the test logic. For simplicity,
# NodeConn acquires this lock whenever delivering a message to to a NodeConnCB,
# and whenever adding anything to the send buffer (in send_message()). This
# lock should be acquired in the thread running the test logic to synchronize
# access to any data shared with the NodeConnCB or NodeConn.
mininode_lock = RLock()
# Serialization/deserialization tools
def sha256(s):
return hashlib.new('sha256', s).digest()
def hash256(s):
return sha256(sha256(s))
def sscoinhash(s):
return sscoin_hash.getPoWHash(s)
def deser_string(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
return f.read(nit)
def ser_string(s):
if len(s) < 253:
return struct.pack("B", len(s)) + s
elif len(s) < 0x10000:
return struct.pack("<BH", 253, len(s)) + s
elif len(s) < 0x100000000L:
return struct.pack("<BI", 254, len(s)) + s
return struct.pack("<BQ", 255, len(s)) + s
def deser_uint256(f):
r = 0L
for i in xrange(8):
t = struct.unpack("<I", f.read(4))[0]
r += t << (i * 32)
return r
def ser_uint256(u):
rs = b""
for i in xrange(8):
rs += struct.pack("<I", u & 0xFFFFFFFFL)
u >>= 32
return rs
def uint256_from_str(s):
r = 0L
t = struct.unpack("<IIIIIIII", s[:32])
for i in xrange(8):
r += t[i] << (i * 32)
return r
def uint256_from_compact(c):
nbytes = (c >> 24) & 0xFF
v = (c & 0xFFFFFFL) << (8 * (nbytes - 3))
return v
def deser_vector(f, c):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = c()
t.deserialize(f)
r.append(t)
return r
def ser_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += i.serialize()
return r
def deser_uint256_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_uint256(f)
r.append(t)
return r
def ser_uint256_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += ser_uint256(i)
return r
def deser_string_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = deser_string(f)
r.append(t)
return r
def ser_string_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for sv in l:
r += ser_string(sv)
return r
def deser_int_vector(f):
nit = struct.unpack("<B", f.read(1))[0]
if nit == 253:
nit = struct.unpack("<H", f.read(2))[0]
elif nit == 254:
nit = struct.unpack("<I", f.read(4))[0]
elif nit == 255:
nit = struct.unpack("<Q", f.read(8))[0]
r = []
for i in xrange(nit):
t = struct.unpack("<i", f.read(4))[0]
r.append(t)
return r
def ser_int_vector(l):
r = b""
if len(l) < 253:
r = struct.pack("B", len(l))
elif len(l) < 0x10000:
r = struct.pack("<BH", 253, len(l))
elif len(l) < 0x100000000L:
r = struct.pack("<BI", 254, len(l))
else:
r = struct.pack("<BQ", 255, len(l))
for i in l:
r += struct.pack("<i", i)
return r
# Deserialize from a hex string representation (eg from RPC)
def FromHex(obj, hex_string):
obj.deserialize(BytesIO(unhexlify(hex_string.encode('ascii'))))
return obj
# Convert a binary-serializable object to hex (eg for submission via RPC)
def ToHex(obj):
return hexlify(obj.serialize()).decode('ascii')
# Objects that map to sscoind objects, which can be serialized/deserialized
class CAddress(object):
def __init__(self):
self.nServices = 1
self.pchReserved = b"\x00" * 10 + b"\xff" * 2
self.ip = "0.0.0.0"
self.port = 0
def deserialize(self, f):
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.pchReserved = f.read(12)
self.ip = socket.inet_ntoa(f.read(4))
self.port = struct.unpack(">H", f.read(2))[0]
def serialize(self):<|fim▁hole|> r += self.pchReserved
r += socket.inet_aton(self.ip)
r += struct.pack(">H", self.port)
return r
def __repr__(self):
return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices,
self.ip, self.port)
class CInv(object):
typemap = {
0: "Error",
1: "TX",
2: "Block"}
def __init__(self, t=0, h=0L):
self.type = t
self.hash = h
def deserialize(self, f):
self.type = struct.unpack("<i", f.read(4))[0]
self.hash = deser_uint256(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.type)
r += ser_uint256(self.hash)
return r
def __repr__(self):
return "CInv(type=%s hash=%064x)" \
% (self.typemap[self.type], self.hash)
class CBlockLocator(object):
def __init__(self):
self.nVersion = MY_VERSION
self.vHave = []
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vHave = deser_uint256_vector(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256_vector(self.vHave)
return r
def __repr__(self):
return "CBlockLocator(nVersion=%i vHave=%s)" \
% (self.nVersion, repr(self.vHave))
class COutPoint(object):
def __init__(self, hash=0, n=0):
self.hash = hash
self.n = n
def deserialize(self, f):
self.hash = deser_uint256(f)
self.n = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += ser_uint256(self.hash)
r += struct.pack("<I", self.n)
return r
def __repr__(self):
return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n)
class CTxIn(object):
def __init__(self, outpoint=None, scriptSig=b"", nSequence=0):
if outpoint is None:
self.prevout = COutPoint()
else:
self.prevout = outpoint
self.scriptSig = scriptSig
self.nSequence = nSequence
def deserialize(self, f):
self.prevout = COutPoint()
self.prevout.deserialize(f)
self.scriptSig = deser_string(f)
self.nSequence = struct.unpack("<I", f.read(4))[0]
def serialize(self):
r = b""
r += self.prevout.serialize()
r += ser_string(self.scriptSig)
r += struct.pack("<I", self.nSequence)
return r
def __repr__(self):
return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \
% (repr(self.prevout), hexlify(self.scriptSig),
self.nSequence)
class CTxOut(object):
def __init__(self, nValue=0, scriptPubKey=b""):
self.nValue = nValue
self.scriptPubKey = scriptPubKey
def deserialize(self, f):
self.nValue = struct.unpack("<q", f.read(8))[0]
self.scriptPubKey = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<q", self.nValue)
r += ser_string(self.scriptPubKey)
return r
def __repr__(self):
return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \
% (self.nValue // COIN, self.nValue % COIN,
hexlify(self.scriptPubKey))
class CTransaction(object):
def __init__(self, tx=None):
if tx is None:
self.nVersion = 1
self.vin = []
self.vout = []
self.nLockTime = 0
self.sha256 = None
self.hash = None
else:
self.nVersion = tx.nVersion
self.vin = copy.deepcopy(tx.vin)
self.vout = copy.deepcopy(tx.vout)
self.nLockTime = tx.nLockTime
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.vin = deser_vector(f, CTxIn)
self.vout = deser_vector(f, CTxOut)
self.nLockTime = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_vector(self.vin)
r += ser_vector(self.vout)
r += struct.pack("<I", self.nLockTime)
return r
def rehash(self):
self.sha256 = None
self.calc_sha256()
def calc_sha256(self):
if self.sha256 is None:
self.sha256 = uint256_from_str(hash256(self.serialize()))
self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii')
def is_valid(self):
self.calc_sha256()
for tout in self.vout:
if tout.nValue < 0 or tout.nValue > 21000000 * COIN:
return False
return True
def __repr__(self):
return "CTransaction(nVersion=%i vin=%s vout=%s nLockTime=%i)" \
% (self.nVersion, repr(self.vin), repr(self.vout), self.nLockTime)
class CBlockHeader(object):
def __init__(self, header=None):
if header is None:
self.set_null()
else:
self.nVersion = header.nVersion
self.hashPrevBlock = header.hashPrevBlock
self.hashMerkleRoot = header.hashMerkleRoot
self.nTime = header.nTime
self.nBits = header.nBits
self.nNonce = header.nNonce
self.sha256 = header.sha256
self.hash = header.hash
self.calc_sha256()
def set_null(self):
self.nVersion = 1
self.hashPrevBlock = 0
self.hashMerkleRoot = 0
self.nTime = 0
self.nBits = 0
self.nNonce = 0
self.sha256 = None
self.hash = None
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.hashPrevBlock = deser_uint256(f)
self.hashMerkleRoot = deser_uint256(f)
self.nTime = struct.unpack("<I", f.read(4))[0]
self.nBits = struct.unpack("<I", f.read(4))[0]
self.nNonce = struct.unpack("<I", f.read(4))[0]
self.sha256 = None
self.hash = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
return r
def calc_sha256(self):
if self.sha256 is None:
r = b""
r += struct.pack("<i", self.nVersion)
r += ser_uint256(self.hashPrevBlock)
r += ser_uint256(self.hashMerkleRoot)
r += struct.pack("<I", self.nTime)
r += struct.pack("<I", self.nBits)
r += struct.pack("<I", self.nNonce)
self.sha256 = uint256_from_str(sscoinhash(r))
self.hash = encode(sscoinhash(r)[::-1], 'hex_codec').decode('ascii')
def rehash(self):
self.sha256 = None
self.calc_sha256()
return self.sha256
def __repr__(self):
return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce)
class CBlock(CBlockHeader):
def __init__(self, header=None):
super(CBlock, self).__init__(header)
self.vtx = []
def deserialize(self, f):
super(CBlock, self).deserialize(f)
self.vtx = deser_vector(f, CTransaction)
def serialize(self):
r = b""
r += super(CBlock, self).serialize()
r += ser_vector(self.vtx)
return r
def calc_merkle_root(self):
hashes = []
for tx in self.vtx:
tx.calc_sha256()
hashes.append(ser_uint256(tx.sha256))
while len(hashes) > 1:
newhashes = []
for i in xrange(0, len(hashes), 2):
i2 = min(i+1, len(hashes)-1)
newhashes.append(hash256(hashes[i] + hashes[i2]))
hashes = newhashes
return uint256_from_str(hashes[0])
def is_valid(self):
self.calc_sha256()
target = uint256_from_compact(self.nBits)
if self.sha256 > target:
return False
for tx in self.vtx:
if not tx.is_valid():
return False
if self.calc_merkle_root() != self.hashMerkleRoot:
return False
return True
def solve(self):
self.rehash()
target = uint256_from_compact(self.nBits)
while self.sha256 > target:
self.nNonce += 1
self.rehash()
def __repr__(self):
return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \
% (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot,
time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx))
class CUnsignedAlert(object):
def __init__(self):
self.nVersion = 1
self.nRelayUntil = 0
self.nExpiration = 0
self.nID = 0
self.nCancel = 0
self.setCancel = []
self.nMinVer = 0
self.nMaxVer = 0
self.setSubVer = []
self.nPriority = 0
self.strComment = b""
self.strStatusBar = b""
self.strReserved = b""
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
self.nRelayUntil = struct.unpack("<q", f.read(8))[0]
self.nExpiration = struct.unpack("<q", f.read(8))[0]
self.nID = struct.unpack("<i", f.read(4))[0]
self.nCancel = struct.unpack("<i", f.read(4))[0]
self.setCancel = deser_int_vector(f)
self.nMinVer = struct.unpack("<i", f.read(4))[0]
self.nMaxVer = struct.unpack("<i", f.read(4))[0]
self.setSubVer = deser_string_vector(f)
self.nPriority = struct.unpack("<i", f.read(4))[0]
self.strComment = deser_string(f)
self.strStatusBar = deser_string(f)
self.strReserved = deser_string(f)
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<q", self.nRelayUntil)
r += struct.pack("<q", self.nExpiration)
r += struct.pack("<i", self.nID)
r += struct.pack("<i", self.nCancel)
r += ser_int_vector(self.setCancel)
r += struct.pack("<i", self.nMinVer)
r += struct.pack("<i", self.nMaxVer)
r += ser_string_vector(self.setSubVer)
r += struct.pack("<i", self.nPriority)
r += ser_string(self.strComment)
r += ser_string(self.strStatusBar)
r += ser_string(self.strReserved)
return r
def __repr__(self):
return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \
% (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID,
self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority,
self.strComment, self.strStatusBar, self.strReserved)
class CAlert(object):
def __init__(self):
self.vchMsg = b""
self.vchSig = b""
def deserialize(self, f):
self.vchMsg = deser_string(f)
self.vchSig = deser_string(f)
def serialize(self):
r = b""
r += ser_string(self.vchMsg)
r += ser_string(self.vchSig)
return r
def __repr__(self):
return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \
% (len(self.vchMsg), len(self.vchSig))
# Objects that correspond to messages on the wire
class msg_version(object):
command = b"version"
def __init__(self):
self.nVersion = MY_VERSION
self.nServices = 1
self.nTime = int(time.time())
self.addrTo = CAddress()
self.addrFrom = CAddress()
self.nNonce = random.getrandbits(64)
self.strSubVer = MY_SUBVERSION
self.nStartingHeight = -1
def deserialize(self, f):
self.nVersion = struct.unpack("<i", f.read(4))[0]
if self.nVersion == 10300:
self.nVersion = 300
self.nServices = struct.unpack("<Q", f.read(8))[0]
self.nTime = struct.unpack("<q", f.read(8))[0]
self.addrTo = CAddress()
self.addrTo.deserialize(f)
if self.nVersion >= 106:
self.addrFrom = CAddress()
self.addrFrom.deserialize(f)
self.nNonce = struct.unpack("<Q", f.read(8))[0]
self.strSubVer = deser_string(f)
if self.nVersion >= 209:
self.nStartingHeight = struct.unpack("<i", f.read(4))[0]
else:
self.nStartingHeight = None
else:
self.addrFrom = None
self.nNonce = None
self.strSubVer = None
self.nStartingHeight = None
def serialize(self):
r = b""
r += struct.pack("<i", self.nVersion)
r += struct.pack("<Q", self.nServices)
r += struct.pack("<q", self.nTime)
r += self.addrTo.serialize()
r += self.addrFrom.serialize()
r += struct.pack("<Q", self.nNonce)
r += ser_string(self.strSubVer)
r += struct.pack("<i", self.nStartingHeight)
return r
def __repr__(self):
return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \
% (self.nVersion, self.nServices, time.ctime(self.nTime),
repr(self.addrTo), repr(self.addrFrom), self.nNonce,
self.strSubVer, self.nStartingHeight)
class msg_verack(object):
command = b"verack"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_verack()"
class msg_addr(object):
command = b"addr"
def __init__(self):
self.addrs = []
def deserialize(self, f):
self.addrs = deser_vector(f, CAddress)
def serialize(self):
return ser_vector(self.addrs)
def __repr__(self):
return "msg_addr(addrs=%s)" % (repr(self.addrs))
class msg_alert(object):
command = b"alert"
def __init__(self):
self.alert = CAlert()
def deserialize(self, f):
self.alert = CAlert()
self.alert.deserialize(f)
def serialize(self):
r = b""
r += self.alert.serialize()
return r
def __repr__(self):
return "msg_alert(alert=%s)" % (repr(self.alert), )
class msg_inv(object):
command = b"inv"
def __init__(self, inv=None):
if inv is None:
self.inv = []
else:
self.inv = inv
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_inv(inv=%s)" % (repr(self.inv))
class msg_getdata(object):
command = b"getdata"
def __init__(self, inv=None):
self.inv = inv if inv != None else []
def deserialize(self, f):
self.inv = deser_vector(f, CInv)
def serialize(self):
return ser_vector(self.inv)
def __repr__(self):
return "msg_getdata(inv=%s)" % (repr(self.inv))
class msg_getblocks(object):
command = b"getblocks"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getblocks(locator=%s hashstop=%064x)" \
% (repr(self.locator), self.hashstop)
class msg_tx(object):
command = b"tx"
def __init__(self, tx=CTransaction()):
self.tx = tx
def deserialize(self, f):
self.tx.deserialize(f)
def serialize(self):
return self.tx.serialize()
def __repr__(self):
return "msg_tx(tx=%s)" % (repr(self.tx))
class msg_block(object):
command = b"block"
def __init__(self, block=None):
if block is None:
self.block = CBlock()
else:
self.block = block
def deserialize(self, f):
self.block.deserialize(f)
def serialize(self):
return self.block.serialize()
def __repr__(self):
return "msg_block(block=%s)" % (repr(self.block))
class msg_getaddr(object):
command = b"getaddr"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_getaddr()"
class msg_ping_prebip31(object):
command = b"ping"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_ping() (pre-bip31)"
class msg_ping(object):
command = b"ping"
def __init__(self, nonce=0L):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_ping(nonce=%08x)" % self.nonce
class msg_pong(object):
command = b"pong"
def __init__(self, nonce=0):
self.nonce = nonce
def deserialize(self, f):
self.nonce = struct.unpack("<Q", f.read(8))[0]
def serialize(self):
r = b""
r += struct.pack("<Q", self.nonce)
return r
def __repr__(self):
return "msg_pong(nonce=%08x)" % self.nonce
class msg_mempool(object):
command = b"mempool"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_mempool()"
class msg_sendheaders(object):
command = b"sendheaders"
def __init__(self):
pass
def deserialize(self, f):
pass
def serialize(self):
return b""
def __repr__(self):
return "msg_sendheaders()"
# getheaders message has
# number of entries
# vector of hashes
# hash_stop (hash of last desired block header, 0 to get as many as possible)
class msg_getheaders(object):
command = b"getheaders"
def __init__(self):
self.locator = CBlockLocator()
self.hashstop = 0L
def deserialize(self, f):
self.locator = CBlockLocator()
self.locator.deserialize(f)
self.hashstop = deser_uint256(f)
def serialize(self):
r = b""
r += self.locator.serialize()
r += ser_uint256(self.hashstop)
return r
def __repr__(self):
return "msg_getheaders(locator=%s, stop=%064x)" \
% (repr(self.locator), self.hashstop)
# headers message has
# <count> <vector of block headers>
class msg_headers(object):
command = b"headers"
def __init__(self):
self.headers = []
def deserialize(self, f):
# comment in sscoind indicates these should be deserialized as blocks
blocks = deser_vector(f, CBlock)
for x in blocks:
self.headers.append(CBlockHeader(x))
def serialize(self):
blocks = [CBlock(x) for x in self.headers]
return ser_vector(blocks)
def __repr__(self):
return "msg_headers(headers=%s)" % repr(self.headers)
class msg_reject(object):
command = b"reject"
REJECT_MALFORMED = 1
def __init__(self):
self.message = b""
self.code = 0
self.reason = b""
self.data = 0L
def deserialize(self, f):
self.message = deser_string(f)
self.code = struct.unpack("<B", f.read(1))[0]
self.reason = deser_string(f)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
self.data = deser_uint256(f)
def serialize(self):
r = ser_string(self.message)
r += struct.pack("<B", self.code)
r += ser_string(self.reason)
if (self.code != self.REJECT_MALFORMED and
(self.message == b"block" or self.message == b"tx")):
r += ser_uint256(self.data)
return r
def __repr__(self):
return "msg_reject: %s %d %s [%064x]" \
% (self.message, self.code, self.reason, self.data)
# Helper function
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
# This is what a callback should look like for NodeConn
# Reimplement the on_* functions to provide handling for events
class NodeConnCB(object):
def __init__(self):
self.verack_received = False
# deliver_sleep_time is helpful for debugging race conditions in p2p
# tests; it causes message delivery to sleep for the specified time
# before acquiring the global lock and delivering the next message.
self.deliver_sleep_time = None
def set_deliver_sleep_time(self, value):
with mininode_lock:
self.deliver_sleep_time = value
def get_deliver_sleep_time(self):
with mininode_lock:
return self.deliver_sleep_time
# Spin until verack message is received from the node.
# Tests may want to use this as a signal that the test can begin.
# This can be called from the testing thread, so it needs to acquire the
# global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
def deliver(self, conn, message):
deliver_sleep = self.get_deliver_sleep_time()
if deliver_sleep is not None:
time.sleep(deliver_sleep)
with mininode_lock:
try:
getattr(self, 'on_' + message.command)(conn, message)
except:
print "ERROR delivering %s (%s)" % (repr(message),
sys.exc_info()[0])
def on_version(self, conn, message):
if message.nVersion >= 209:
conn.send_message(msg_verack())
conn.ver_send = min(MY_VERSION, message.nVersion)
if message.nVersion < 209:
conn.ver_recv = conn.ver_send
def on_verack(self, conn, message):
conn.ver_recv = conn.ver_send
self.verack_received = True
def on_inv(self, conn, message):
want = msg_getdata()
for i in message.inv:
if i.type != 0:
want.inv.append(i)
if len(want.inv):
conn.send_message(want)
def on_addr(self, conn, message): pass
def on_alert(self, conn, message): pass
def on_getdata(self, conn, message): pass
def on_getblocks(self, conn, message): pass
def on_tx(self, conn, message): pass
def on_block(self, conn, message): pass
def on_getaddr(self, conn, message): pass
def on_headers(self, conn, message): pass
def on_getheaders(self, conn, message): pass
def on_ping(self, conn, message):
if conn.ver_send > BIP0031_VERSION:
conn.send_message(msg_pong(message.nonce))
def on_reject(self, conn, message): pass
def on_close(self, conn): pass
def on_mempool(self, conn): pass
def on_pong(self, conn, message): pass
# More useful callbacks and functions for NodeConnCB's which have a single NodeConn
class SingleNodeConnCB(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node
def sync_with_ping(self, timeout=30):
def received_pong():
return (self.last_pong.nonce == self.ping_counter)
self.send_message(msg_ping(nonce=self.ping_counter))
success = wait_until(received_pong, timeout)
self.ping_counter += 1
return success
# The actual NodeConn class
# This class provides an interface for a p2p connection to a specified node
class NodeConn(asyncore.dispatcher):
messagemap = {
b"version": msg_version,
b"verack": msg_verack,
b"addr": msg_addr,
b"alert": msg_alert,
b"inv": msg_inv,
b"getdata": msg_getdata,
b"getblocks": msg_getblocks,
b"tx": msg_tx,
b"block": msg_block,
b"getaddr": msg_getaddr,
b"ping": msg_ping,
b"pong": msg_pong,
b"headers": msg_headers,
b"getheaders": msg_getheaders,
b"reject": msg_reject,
b"mempool": msg_mempool,
}
MAGIC_BYTES = {
"mainnet": b"\xbf\x0c\x6b\xbd", # mainnet
"testnet3": b"\xce\xe2\xca\xff", # testnet3
"regtest": b"\xfc\xc1\xb7\xdc" # regtest
}
def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=1):
asyncore.dispatcher.__init__(self, map=mininode_socket_map)
self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport))
self.dstaddr = dstaddr
self.dstport = dstport
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.sendbuf = b""
self.recvbuf = b""
self.ver_send = 209
self.ver_recv = 209
self.last_sent = 0
self.state = "connecting"
self.network = net
self.cb = callback
self.disconnect = False
# stuff version msg into sendbuf
vt = msg_version()
vt.nServices = services
vt.addrTo.ip = self.dstaddr
vt.addrTo.port = self.dstport
vt.addrFrom.ip = "0.0.0.0"
vt.addrFrom.port = 0
self.send_message(vt, True)
print 'MiniNode: Connecting to Sscoin Node IP # ' + dstaddr + ':' \
+ str(dstport)
try:
self.connect((dstaddr, dstport))
except:
self.handle_close()
self.rpc = rpc
def show_debug_msg(self, msg):
self.log.debug(msg)
def handle_connect(self):
self.show_debug_msg("MiniNode: Connected & Listening: \n")
self.state = "connected"
def handle_close(self):
self.show_debug_msg("MiniNode: Closing Connection to %s:%d... "
% (self.dstaddr, self.dstport))
self.state = "closed"
self.recvbuf = b""
self.sendbuf = b""
try:
self.close()
except:
pass
self.cb.on_close(self)
def handle_read(self):
try:
t = self.recv(8192)
if len(t) > 0:
self.recvbuf += t
self.got_data()
except:
pass
def readable(self):
return True
def writable(self):
with mininode_lock:
length = len(self.sendbuf)
return (length > 0)
def handle_write(self):
with mininode_lock:
try:
sent = self.send(self.sendbuf)
except:
self.handle_close()
return
self.sendbuf = self.sendbuf[sent:]
def got_data(self):
try:
while True:
if len(self.recvbuf) < 4:
return
if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]:
raise ValueError("got garbage %s" % repr(self.recvbuf))
if self.ver_recv < 209:
if len(self.recvbuf) < 4 + 12 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = None
if len(self.recvbuf) < 4 + 12 + 4 + msglen:
return
msg = self.recvbuf[4+12+4:4+12+4+msglen]
self.recvbuf = self.recvbuf[4+12+4+msglen:]
else:
if len(self.recvbuf) < 4 + 12 + 4 + 4:
return
command = self.recvbuf[4:4+12].split(b"\x00", 1)[0]
msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0]
checksum = self.recvbuf[4+12+4:4+12+4+4]
if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen:
return
msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen]
th = sha256(msg)
h = sha256(th)
if checksum != h[:4]:
raise ValueError("got bad checksum " + repr(self.recvbuf))
self.recvbuf = self.recvbuf[4+12+4+4+msglen:]
if command in self.messagemap:
f = BytesIO(msg)
t = self.messagemap[command]()
t.deserialize(f)
self.got_message(t)
else:
self.show_debug_msg("Unknown command: '" + command + "' " +
repr(msg))
except Exception as e:
print 'got_data:', repr(e)
def send_message(self, message, pushbuf=False):
if self.state != "connected" and not pushbuf:
return
self.show_debug_msg("Send %s" % repr(message))
command = message.command
data = message.serialize()
tmsg = self.MAGIC_BYTES[self.network]
tmsg += command
tmsg += b"\x00" * (12 - len(command))
tmsg += struct.pack("<I", len(data))
if self.ver_send >= 209:
th = sha256(data)
h = sha256(th)
tmsg += h[:4]
tmsg += data
with mininode_lock:
self.sendbuf += tmsg
self.last_sent = time.time()
def got_message(self, message):
if message.command == b"version":
if message.nVersion <= BIP0031_VERSION:
self.messagemap[b'ping'] = msg_ping_prebip31
if self.last_sent + 30 * 60 < time.time():
self.send_message(self.messagemap[b'ping']())
self.show_debug_msg("Recv %s" % repr(message))
self.cb.deliver(self, message)
def disconnect_node(self):
self.disconnect = True
class NetworkThread(Thread):
def run(self):
while mininode_socket_map:
# We check for whether to disconnect outside of the asyncore
# loop to workaround the behavior of asyncore when using
# select
disconnected = []
for fd, obj in mininode_socket_map.items():
if obj.disconnect:
disconnected.append(obj)
[ obj.handle_close() for obj in disconnected ]
asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1)
# An exception we can raise if we detect a potential disconnect
# (p2p or rpc) before the test is complete
class EarlyDisconnectError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)<|fim▁end|>
|
r = b""
r += struct.pack("<Q", self.nServices)
|
<|file_name|>issue-66286.rs<|end_file_name|><|fim▁begin|>// force-host
// no-prefer-dynamic
#![crate_type = "proc-macro"]
extern crate proc_macro;<|fim▁hole|>#[proc_macro_attribute]
pub fn vec_ice(_attr: TokenStream, input: TokenStream) -> TokenStream {
// This redundant convert is necessary to reproduce ICE.
input.into_iter().collect()
}<|fim▁end|>
|
use proc_macro::TokenStream;
|
<|file_name|>zz_edit_gen.go<|end_file_name|><|fim▁begin|>// Copyright 2017-2021 The Usacloud Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by 'github.com/sacloud/usacloud/tools/gen-commands'; DO NOT EDIT
package disk
import (
"github.com/sacloud/usacloud/pkg/cmd/core"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
)
func (p *editParameter) CleanupEmptyValue(fs *pflag.FlagSet) {
}
func (p *editParameter) buildFlags(fs *pflag.FlagSet) {
fs.StringVarP(&p.Zone, "zone", "", p.Zone, "(*required) ")
fs.StringVarP(&p.Parameters, "parameters", "", p.Parameters, "Input parameters in JSON format")
fs.BoolVarP(&p.GenerateSkeleton, "generate-skeleton", "", p.GenerateSkeleton, "Output skeleton of parameters with JSON format (aliases: --skeleton)")
fs.BoolVarP(&p.Example, "example", "", p.Example, "Output example parameters with JSON format")
fs.BoolVarP(&p.AssumeYes, "assumeyes", "y", p.AssumeYes, "Assume that the answer to any question which would be asked is yes")
fs.StringVarP(&p.OutputType, "output-type", "o", p.OutputType, "Output format options: [table/json/yaml] (aliases: --out)")
fs.BoolVarP(&p.Quiet, "quiet", "q", p.Quiet, "Output IDs only")
fs.StringVarP(&p.Format, "format", "", p.Format, "Output format in Go templates (aliases: --fmt)")
fs.StringVarP(&p.Query, "query", "", p.Query, "Query for JSON output")
fs.StringVarP(&p.QueryDriver, "query-driver", "", p.QueryDriver, "Name of the driver that handles queries to JSON output options: [jmespath/jq]")
fs.StringVarP(&p.EditDisk.HostName, "host-name", "", p.EditDisk.HostName, "")
fs.StringVarP(&p.EditDisk.Password, "password", "", p.EditDisk.Password, "")
fs.StringVarP(&p.EditDisk.IPAddress, "ip-address", "", p.EditDisk.IPAddress, "")
fs.IntVarP(&p.EditDisk.NetworkMaskLen, "netmask", "", p.EditDisk.NetworkMaskLen, "(aliases: --network-mask-len)")
fs.StringVarP(&p.EditDisk.DefaultRoute, "gateway", "", p.EditDisk.DefaultRoute, "(aliases: --default-route)")
fs.BoolVarP(&p.EditDisk.DisablePWAuth, "disable-pw-auth", "", p.EditDisk.DisablePWAuth, "")
fs.BoolVarP(&p.EditDisk.EnableDHCP, "enable-dhcp", "", p.EditDisk.EnableDHCP, "")
fs.BoolVarP(&p.EditDisk.ChangePartitionUUID, "change-partition-uuid", "", p.EditDisk.ChangePartitionUUID, "")
fs.StringSliceVarP(&p.EditDisk.SSHKeys, "ssh-keys", "", p.EditDisk.SSHKeys, "")
fs.VarP(core.NewIDSliceFlag(&p.EditDisk.SSHKeyIDs, &p.EditDisk.SSHKeyIDs), "ssh-key-ids", "", "")
fs.BoolVarP(&p.EditDisk.IsSSHKeysEphemeral, "make-ssh-keys-ephemeral", "", p.EditDisk.IsSSHKeysEphemeral, "")
fs.VarP(core.NewIDSliceFlag(&p.EditDisk.NoteIDs, &p.EditDisk.NoteIDs), "note-ids", "", "")
fs.StringVarP(&p.EditDisk.NotesData, "notes", "", p.EditDisk.NotesData, "")
fs.BoolVarP(&p.EditDisk.IsNotesEphemeral, "make-notes-ephemeral", "", p.EditDisk.IsNotesEphemeral, "")
fs.BoolVarP(&p.NoWait, "no-wait", "", p.NoWait, "")
fs.SetNormalizeFunc(p.normalizeFlagName)
}
func (p *editParameter) normalizeFlagName(_ *pflag.FlagSet, name string) pflag.NormalizedName {
switch name {
case "skeleton":
name = "generate-skeleton"
case "out":
name = "output-type"
case "fmt":
name = "format"
case "network-mask-len":
name = "netmask"
case "default-route":
name = "gateway"
}
return pflag.NormalizedName(name)
}
func (p *editParameter) buildFlagsUsage(cmd *cobra.Command) {
var sets []*core.FlagSet
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("diskedit", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("host-name"))
fs.AddFlag(cmd.LocalFlags().Lookup("password"))
fs.AddFlag(cmd.LocalFlags().Lookup("ip-address"))
fs.AddFlag(cmd.LocalFlags().Lookup("netmask"))
fs.AddFlag(cmd.LocalFlags().Lookup("gateway"))
fs.AddFlag(cmd.LocalFlags().Lookup("disable-pw-auth"))
fs.AddFlag(cmd.LocalFlags().Lookup("enable-dhcp"))
fs.AddFlag(cmd.LocalFlags().Lookup("change-partition-uuid"))
fs.AddFlag(cmd.LocalFlags().Lookup("ssh-keys"))
fs.AddFlag(cmd.LocalFlags().Lookup("ssh-key-ids"))
fs.AddFlag(cmd.LocalFlags().Lookup("make-ssh-keys-ephemeral"))
fs.AddFlag(cmd.LocalFlags().Lookup("note-ids"))
fs.AddFlag(cmd.LocalFlags().Lookup("notes"))
fs.AddFlag(cmd.LocalFlags().Lookup("make-notes-ephemeral"))
sets = append(sets, &core.FlagSet{
Title: "Edit disk options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("zone", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("zone"))
sets = append(sets, &core.FlagSet{<|fim▁hole|> })
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("wait", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("no-wait"))
sets = append(sets, &core.FlagSet{
Title: "Wait options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("input", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("assumeyes"))
fs.AddFlag(cmd.LocalFlags().Lookup("generate-skeleton"))
fs.AddFlag(cmd.LocalFlags().Lookup("parameters"))
sets = append(sets, &core.FlagSet{
Title: "Input options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("output", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("format"))
fs.AddFlag(cmd.LocalFlags().Lookup("output-type"))
fs.AddFlag(cmd.LocalFlags().Lookup("query"))
fs.AddFlag(cmd.LocalFlags().Lookup("query-driver"))
fs.AddFlag(cmd.LocalFlags().Lookup("quiet"))
sets = append(sets, &core.FlagSet{
Title: "Output options",
Flags: fs,
})
}
{
var fs *pflag.FlagSet
fs = pflag.NewFlagSet("example", pflag.ContinueOnError)
fs.SortFlags = false
fs.AddFlag(cmd.LocalFlags().Lookup("example"))
sets = append(sets, &core.FlagSet{
Title: "Parameter example",
Flags: fs,
})
}
core.BuildFlagsUsage(cmd, sets)
}
func (p *editParameter) setCompletionFunc(cmd *cobra.Command) {
}
func (p *editParameter) SetupCobraCommandFlags(cmd *cobra.Command) {
p.buildFlags(cmd.Flags())
p.buildFlagsUsage(cmd)
p.setCompletionFunc(cmd)
}<|fim▁end|>
|
Title: "Zone options",
Flags: fs,
|
<|file_name|>sum-children.test.js<|end_file_name|><|fim▁begin|>'use strict';
var path = require('path')
, chai = require('chai')
, expect = chai.expect
, sumChildren = require(path.join(__dirname, '..', 'lib', 'util', 'sum-children'))
;
describe("simplifying timings lists", function () {
it("should correctly reduce a simple list", function () {
expect(sumChildren([[22, 42]])).equal(20);
});
it("should accurately sum overlapping child traces", function () {
var intervals = [];
// start with a simple interval
intervals.push([ 0, 22]);
// add another interval completely encompassed by the first
intervals.push([ 5, 10]);
// add another that starts within the first range but extends beyond
intervals.push([11, 33]);
// add a final interval that's entirely disjoint
intervals.push([35, 39]);
expect(sumChildren(intervals)).equal(37);
});
it("should accurately sum partially overlapping child traces", function () {
var intervals = [];
// start with a simple interval
intervals.push([ 0, 22]);
// add another interval completely encompassed by the first
intervals.push([ 5, 10]);
// add another that starts simultaneously with the first range but that extends beyond
<|fim▁hole|>
it("should accurately sum partially overlapping, open-ranged child traces", function () {
var intervals = [];
// start with a simple interval
intervals.push([ 0, 22]);
// add a range that starts at the exact end of the first
intervals.push([22, 33]);
expect(sumChildren(intervals)).equal(33);
});
});<|fim▁end|>
|
intervals.push([ 0, 33]);
expect(sumChildren(intervals)).equal(33);
});
|
<|file_name|>ColEmiMatDAO.java<|end_file_name|><|fim▁begin|>//Copyright (c) 2011 Municipalidad de Rosario and Coop. de Trabajo Tecso Ltda.
//This file is part of SIAT. SIAT is licensed under the terms
//of the GNU General Public License, version 3.
//See terms in COPYING file or <http://www.gnu.org/licenses/gpl.txt>
package ar.gov.rosario.siat.def.buss.dao;
import org.hibernate.Query;
import org.hibernate.classic.Session;
import ar.gov.rosario.siat.base.buss.dao.GenericDAO;
import ar.gov.rosario.siat.base.buss.dao.SiatHibernateUtil;
import ar.gov.rosario.siat.def.buss.bean.ColEmiMat;
public class ColEmiMatDAO extends GenericDAO {
public ColEmiMatDAO() {
super(ColEmiMat.class);
}
/**
* Obtiene una columna de una matriz de emision
* por su codigo
*/
public ColEmiMat getByCodigo(String codColumna) throws Exception {
ColEmiMat colEmiMat;
String queryString = "from ColEmiMat t where t.codColumna = :codigo";
Session session = SiatHibernateUtil.currentSession();
Query query = session.createQuery(queryString).setString("codigo", codColumna);
colEmiMat = (ColEmiMat) query.uniqueResult();
return colEmiMat; <|fim▁hole|>}<|fim▁end|>
|
}
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.test import TestCase
from models import Project<|fim▁hole|>class ProjectsTest(TestCase):
fixtures = ['test_data.json']
def test_project_listing(self):
"""
Verify that the project listing page contains all projects within the
page's context.
"""
response = self.client.get(reverse("projects:list"))
self.failUnlessEqual(response.status_code, 200)
try:
response.context['project_list']
except KeyError:
self.fail("Template context did not contain project_list object.")
for project in Project.objects.published():
self.assertTrue(project in response.context['project_list'])
def test_verify_author_detail_pages(self):
"""
Verify that each author has a detail page and that the author is
contained within the page's context.
"""
for project in Project.objects.all():
response = self.client.get(project.get_absolute_url())
if project.published():
self.assertTrue(response.status_code == 200)
try:
self.failUnlessEqual(response.context['project'], project)
except KeyError:
self.fail("Template context did not contain project object.")
else:
self.assertTrue(response.status_code == 404)<|fim▁end|>
| |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import sys
import os
import numpy as np<|fim▁hole|>sys.path.append(os.path.join(os.getcwd(), ".."))
from run_utils import run_kmc, parse_input
from ParameterJuggler import ParameterSet
def main():
controller, path, app, cfg, n_procs = parse_input(sys.argv)
alpha_values = ParameterSet(cfg, "alpha\s*=\s*(.*)\;")
alpha_values.initialize_set(np.linspace(0.5, 2, 16))
heights = ParameterSet(cfg, "confiningSurfaceHeight\s*=\s*(.*)\;")
heights.initialize_set([20.])
diffusions = ParameterSet(cfg, "diffuse\s*=\s*(.*)\;")
diffusions.initialize_set([3])
controller.register_parameter_set(alpha_values)
controller.register_parameter_set(heights)
controller.register_parameter_set(diffusions)
controller.set_repeats(20)
controller.run(run_kmc, path, app, cfg, ask=False, n_procs=n_procs, shuffle=True)
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>test_eigb.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, absolute_import, division
import sys
sys.path.append('../')
import numpy as np
import tt
from tt.eigb import *
import time
""" This code computes many eigenvalus of the Laplacian operator """
d = 8
f = 8
A = tt.qlaplace_dd([d]*f)
#A = (-1)*A
#A = tt.eye(2,d)
n = [2] *(d * f)
r = [8] *(d * f + 1)<|fim▁hole|>r[0] = 1
r[d * f] = 8 #Number of eigenvalues sought
x = tt.rand(n, d * f, r)
#x = tt_ones(2,d)
t = time.time()
y, lam = eigb(A, x, 1e-6)
t1 = time.time()
print('Eigenvalues:', lam)
print('Time is:', t1-t)<|fim▁end|>
| |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Wokkel documentation build configuration file, created by
# sphinx-quickstart on Mon May 7 11:15:38 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['apilinks_sphinxext']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Wokkel'
copyright = u'2003-2012, Ralph Meijer'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '18.0.0'
# The full version, including alpha/beta/rc tags.
release = '18.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'listings']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# pydoctor API base URL
apilinks_base_url = 'api/'
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'index': ['localtoc.html', 'indexsidebar.html', 'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Wokkeldoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Wokkel.tex', u'Wokkel Documentation',
u'Ralph Meijer', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'wokkel', u'Wokkel Documentation',
[u'Ralph Meijer'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Wokkel', u'Wokkel Documentation',
u'Ralph Meijer', 'Wokkel', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'<|fim▁end|>
|
# -*- coding: utf-8 -*-
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
from fir_artifacts import views
app_name='fir_artifacts'<|fim▁hole|> url(r'^(?P<artifact_id>\d+)/correlations/$', views.artifacts_correlations, name='correlations'),
url(r'^files/(?P<content_type>\d+)/upload/(?P<object_id>\d+)/$', views.upload_file, name='upload_file'),
url(r'^files/(?P<content_type>\d+)/archive/(?P<object_id>\d+)/$', views.download_archive, name='download_archive'),
url(r'^files/(?P<file_id>\d+)/remove/$', views.remove_file, name='remove_file'),
url(r'^files/(?P<file_id>\d+)/download/$', views.download, name='download_file'),
]<|fim▁end|>
|
urlpatterns = [
url(r'^(?P<artifact_id>\d+)/detach/(?P<relation_name>\w+)/(?P<relation_id>\d+)/$', views.detach_artifact, name='detach'),
|
<|file_name|>mnemonic.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <boost/algorithm/string.hpp>
#include <bitcoin/bitcoin.hpp>
#include <wallet/wallet.hpp>
using namespace bc;
using namespace libwallet;
int display_help()
{
puts("Usage:");
puts("");
puts(" mnemonic <<< \"[WORD1] [WORD2] ...\"");
puts(" mnemonic <<< SEED");
puts("");
puts("Please email suggestions and questions to <[email protected]>.");
return -1;
}
int main(int argc, char** argv)
{
std::istreambuf_iterator<char> it(std::cin);
std::istreambuf_iterator<char> end;
std::string data(it, end);<|fim▁hole|> if (words.empty())
return display_help();
else if (words.size() == 1 &&
words[0].size() == libwallet::deterministic_wallet::seed_size)
{
const std::string seed = words[0];
string_list words = encode_mnemonic(seed);
bool first = true;
for (const std::string& word: words)
{
if (!first)
std::cout << " ";
std::cout << word;
first = false;
}
std::cout << std::endl;
return 0;
}
else
{
std::cout << decode_mnemonic(words) << std::endl;
return 0;
}
// Should never happen!
return 0;
}<|fim▁end|>
|
boost::algorithm::trim(data);
string_list words;
boost::split(words, data, boost::is_any_of("\n\t "));
|
<|file_name|>cinder_250.py<|end_file_name|><|fim▁begin|>"""
Installs and configures Cinder
"""
import os
import re
import uuid
import logging
from packstack.installer import exceptions
from packstack.installer import processors
from packstack.installer import validators
from packstack.installer import basedefs
from packstack.installer import utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
from packstack.installer import exceptions
from packstack.installer import output_messages
# Controller object will
# be initialized from main flow
controller = None
# Plugin name
PLUGIN_NAME = "OS-Cinder"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Cinder configuration")
paramsList = [
{"CMD_OPTION" : "cinder-host",
"USAGE" : "The IP address of the server on which to install Cinder",
"PROMPT" : "Enter the IP address of the Cinder server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-db-passwd",
"USAGE" : "The password to use for the Cinder to access DB",
"PROMPT" : "Enter the password for the Cinder DB access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_DB_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-ks-passwd",
"USAGE" : "The password to use for the Cinder to authenticate with Keystone",
"PROMPT" : "Enter the password for the Cinder Keystone access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_KS_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-backend",
"USAGE" : ("The Cinder backend to use, valid options are: "
"lvm, gluster, nfs"),
"PROMPT" : "Enter the Cinder backend to be configured",
"OPTION_LIST" : ["lvm", "gluster", "nfs"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "lvm",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_BACKEND",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDER",
"DESCRIPTION" : "Cinder Config parameters",
"PRE_CONDITION" : "CONFIG_CINDER_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-create",
"USAGE" : ("Create Cinder's volumes group. This should only be done for "
"testing on a proof-of-concept installation of Cinder. This "
"will create a file-backed volume group and is not suitable "
"for production usage."),
"PROMPT" : ("Should Cinder's volumes group be created (for proof-of-concept "
"installation)?"),
"OPTION_LIST" : ["y", "n"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "y",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_CREATE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMECREATE",
"DESCRIPTION" : "Cinder volume create Config parameters",<|fim▁hole|> "POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_vg_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm' and
config.get('CONFIG_CINDER_VOLUMES_CREATE', 'y') == 'y')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-size",
"USAGE" : ("Cinder's volumes group size. Note that actual volume size "
"will be extended with 3% more space for VG metadata."),
"PROMPT" : "Enter Cinder's volumes group usable size",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : "20G",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_SIZE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMESIZE",
"DESCRIPTION" : "Cinder volume size Config parameters",
"PRE_CONDITION" : check_lvm_vg_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_gluster_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'gluster')
paramsList = [
{"CMD_OPTION" : "cinder-gluster-mounts",
"USAGE" : ("A single or comma separated list of gluster volume shares "
"to mount, eg: ip-address:/vol-name "),
"PROMPT" : ("Enter a single or comma separated list of gluster volume "
"shares to use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_GLUSTER_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERGLUSTERMOUNTS",
"DESCRIPTION" : "Cinder gluster Config parameters",
"PRE_CONDITION" : check_gluster_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_nfs_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'nfs')
paramsList = [
{"CMD_OPTION" : "cinder-nfs-mounts",
"USAGE" : ("A single or comma seprated list of NFS exports to mount, "
"eg: ip-address:/export-name "),
"PROMPT" : ("Enter a single or comma seprated list of NFS exports to "
"use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_NFS_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERNFSMOUNTS",
"DESCRIPTION" : "Cinder NFS Config parameters",
"PRE_CONDITION" : check_nfs_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CINDER_INSTALL'] != 'y':
return
cinder_steps = [
{'title': 'Installing dependencies for Cinder', 'functions':[install_cinder_deps]},
{'title': 'Adding Cinder Keystone manifest entries', 'functions':[create_keystone_manifest]},
{'title': 'Adding Cinder manifest entries', 'functions':[create_manifest]}
]
if controller.CONF['CONFIG_CINDER_BACKEND'] == 'lvm':
cinder_steps.append({'title': 'Checking if the Cinder server has a cinder-volumes vg', 'functions':[check_cinder_vg]})
controller.addSequence("Installing OpenStack Cinder", [], [], cinder_steps)
def install_cinder_deps(config):
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
pkgs = []
if config['CONFIG_CINDER_BACKEND'] == 'lvm':
pkgs.append('lvm2')
for p in pkgs:
server.append("rpm -q %(package)s || yum install -y %(package)s" % dict(package=p))
server.execute()
def check_cinder_vg(config):
cinders_volume = 'cinder-volumes'
# Do we have a cinder-volumes vg?
have_cinders_volume = False
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
have_cinders_volume = True
except exceptions.ScriptRuntimeError:
pass
# Configure system LVM settings (snapshot_autoextend)
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('sed -i -r "s/^ *snapshot_autoextend_threshold +=.*/'
' snapshot_autoextend_threshold = 80/" '
'/etc/lvm/lvm.conf')
server.append('sed -i -r "s/^ *snapshot_autoextend_percent +=.*/'
' snapshot_autoextend_percent = 20/" '
'/etc/lvm/lvm.conf')
try:
server.execute()
except exceptions.ScriptRuntimeError:
logging.info("Warning: Unable to set system LVM settings.")
if config["CONFIG_CINDER_VOLUMES_CREATE"] != "y":
if not have_cinders_volume:
raise exceptions.MissingRequirements("The cinder server should"
" contain a cinder-volumes volume group")
else:
if have_cinders_volume:
controller.MESSAGES.append(
output_messages.INFO_CINDER_VOLUMES_EXISTS)
return
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('systemctl')
try:
server.execute()
rst_cmd = 'systemctl restart openstack-cinder-volume.service'
except exceptions.ScriptRuntimeError:
rst_cmd = 'service openstack-cinder-volume restart'
server.clear()
logging.info("A new cinder volumes group will be created")
err = "Cinder's volume group '%s' could not be created" % \
cinders_volume
cinders_volume_path = '/var/lib/cinder'
server.append('mkdir -p %s' % cinders_volume_path)
logging.debug("Volume's path: %s" % cinders_volume_path)
match = re.match('^(?P<size>\d+)G$',
config['CONFIG_CINDER_VOLUMES_SIZE'].strip())
if not match:
msg = 'Invalid Cinder volumes VG size.'
raise exceptions.ParamValidationError(msg)
cinders_volume_size = int(match.group('size')) * 1024
cinders_reserve = int(cinders_volume_size * 0.03)
cinders_volume_size = cinders_volume_size + cinders_reserve
cinders_volume_path = os.path.join(cinders_volume_path, cinders_volume)
server.append('dd if=/dev/zero of=%s bs=1 count=0 seek=%sM'
% (cinders_volume_path, cinders_volume_size))
server.append('LOFI=$(losetup --show -f %s)' % cinders_volume_path)
server.append('pvcreate $LOFI')
server.append('vgcreate %s $LOFI' % cinders_volume)
# Add the loop device on boot
server.append('grep %(volume)s /etc/rc.d/rc.local || '
'echo "losetup -f %(path)s && '
'vgchange -a y %(volume)s && '
'%(restart_cmd)s" '
'>> /etc/rc.d/rc.local' %
{'volume': cinders_volume, 'restart_cmd': rst_cmd,
'path': cinders_volume_path})
server.append('grep "#!" /etc/rc.d/rc.local || '
'sed -i \'1i#!/bin/sh\' /etc/rc.d/rc.local')
server.append('chmod +x /etc/rc.d/rc.local')
# Let's make sure it exists
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
except exceptions.ScriptRuntimeError:
# Release loop device if cinder's volume creation
# fails.
try:
logging.debug("Release loop device, volume creation failed")
server = utils.ScriptRunner(controller.CONF['CONFIG_CINDER_HOST'])
server.append('losetup -d $(losetup -j %s | cut -d : -f 1)' %
cinders_volume_path
)
server.execute()
except:
pass
raise exceptions.MissingRequirements(err)
def create_keystone_manifest(config):
manifestfile = "%s_keystone.pp" % controller.CONF['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_cinder.pp")
appendManifestFile(manifestfile, manifestdata)
def create_manifest(config):
manifestfile = "%s_cinder.pp" % controller.CONF['CONFIG_CINDER_HOST']
manifestdata = getManifestTemplate("cinder.pp")
if config['CONFIG_CINDER_BACKEND'] == "gluster":
manifestdata += getManifestTemplate("cinder_gluster.pp")
if config['CONFIG_CINDER_BACKEND'] == "nfs":
manifestdata += getManifestTemplate("cinder_nfs.pp")
if config['CONFIG_CEILOMETER_INSTALL'] == 'y':
manifestdata += getManifestTemplate('cinder_ceilometer.pp')
hosts = config['CONFIG_NOVA_COMPUTE_HOSTS'].split(",")
config['FIREWALL_ALLOWED'] = ",".join(["'%s'" % i.strip() for i in hosts if i.strip()])
config['FIREWALL_SERVICE_NAME'] = "cinder"
config['FIREWALL_PORTS'] = "'3260', '8776'"
manifestdata += getManifestTemplate("firewall.pp")
appendManifestFile(manifestfile, manifestdata)<|fim▁end|>
|
"PRE_CONDITION" : check_lvm_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
|
<|file_name|>build.js<|end_file_name|><|fim▁begin|>'use strict';
<|fim▁hole|> .require(require.resolve('./main'), { entry: true })
.bundle({ debug: true });
};
// Test
if (!module.parent) {
go().pipe(process.stdout);
}<|fim▁end|>
|
var browserify = require('browserify');
var go = module.exports = function () {
return browserify()
|
<|file_name|>group-detail.component.ts<|end_file_name|><|fim▁begin|>import {Component, Input} from '@angular/core';
import {Title} from '@angular/platform-browser';
import {RouteParams, ROUTER_DIRECTIVES, ROUTER_PROVIDERS } from '@angular/router-deprecated';
import {TaxonImage} from './taxon-image';
import {Taxon} from './taxon';
import {Group} from './group';
import {TaxonService} from './taxon.service';
@Component({
selector: 'group-detail',
styles: [`
.done-true {
text-decoration: line-through;
color: grey;
}`
],
template: `
<div class="row">
<div class="col-xs-12">
<h1>Group {{id}} - <small>{{taxons.length}} arter</small></h1>
</div>
<div class="col-xs-12">
<table class="small">
<tr *ngFor="let taxon of taxons">
<td>
<div *ngIf="taxon.hasImage">
<a [routerLink]="['TaxonDetail', {id: taxon.slug }]">
<em>{{taxon.latin}}</em> - {{taxon.name}}
</a>
</div>
<div *ngIf="!taxon.hasImage">
<em>{{taxon.latin}}</em> - {{taxon.name}}
</div>
</td>
<td align="right" style="white-space: nowrap;">
<div *ngIf="taxon.wingSpanMin!=0">
{{taxon.wingSpanMin}}-{{taxon.wingSpanMax}} mm
</div>
</td>
<td style="padding: 2px 5px;">
<div>
<a href="http://www.lepidoptera.se/arter/{{taxon.slugSv}}">[länk]</a>
</div>
</td><|fim▁hole|> </div>
<div *ngFor="let item of taxonImages" class="col-xs-12 col-md-6 col-lg-4">
<a [routerLink]="['TaxonDetail', {id: item.slug }]">
<img src="{{item.image}}" class="img-responsive" alt="{{item.latin}} - {{item.name}} © {{item.photographer}}" />
</a>
<p class="text-center">
<small>
<em>{{item.latin}}<span *ngIf="item.unsure">?</span></em> - {{item.name}}<span *ngIf="item.unsure">?</span><br/>
<!--{{item.date}}, {{item.site}} © {{item.photographer}}-->
</small>
</p>
</div>
</div>
`,
directives: [ROUTER_DIRECTIVES],
providers: [TaxonService,Title]
})
export class GroupDetailComponent {
private taxonImages:TaxonImage[] = [];
private taxons:Taxon[] = [];
id: string;
constructor(_routeParams:RouteParams, _service: TaxonService, _title: Title){
let id = _routeParams.get('id');
this.id = id;
this.taxonImages = _service.getTaxonImagesForGroup(id);
this.taxons = _service.getTaxonsForGroup(id);
_title.setTitle('Grupp ' + id + ' - Coleophoridae - Säckmalar');
}
}<|fim▁end|>
|
</tr>
</table>
|
<|file_name|>lib_entity.py<|end_file_name|><|fim▁begin|>"""
syntax
entity ::= {id(utc millisecond), type, id_from, id_to, status, data, source, note}
data example1: search wikipedia
{ "id":1378327851001,
"type":"name-name",
"id-from":"MIT",
"id-to":"Massachusetts Institute of Technology",
"status":"auto",
"date":"2013-09-04",
"source":"wikipedia+dbpedia"
}
data example2: search dbpedia
{ "id":1378327851002,
"type":"name-uri",
"id-from":"Massachusetts Institute of Technology",
"id-to":"http://dbpedia.org/resource/Massachusetts_Institute_of_Technology",
"status":"auto",
"date":"2013-09-04",
"source":"dbpedia"
}
data example3: manual assert
{ "id":1378327851003,
"type":"name-name",
"id-from":"Mit",
"id-to":"MIT",
"status":"auto",
"date":"2013-09-04",
"source":"man"
}
data
* map_data id_from > type > record
* list_new [record]
* list_fail [id_from] -- avoid retry
* map_type_lookup type > lookup-function input: id_from; output:{type>record}
operation
* load(dir_name, entity_type)<|fim▁hole|>
notes
* persistent storage in csv file
* records is ordered by created
1. api
2. web test
3. algorithm develop
"""
class DataNamedEntity(object):
ENTITY_TYPE_ORGANIZATION = "organisation"
ENTITY_TYPE_PERSON = "person"
ENTITY_TYPE_PLACE = "place"
# LIST_HEADER = ["altLabel","title","subtitle", "uri","source","status","redirects","disambiguates","matched_entity_type"]
LIST_HEADER = ["altLabel", "title", "subtitle", "uri", "source", "status", "redirects", "disambiguates",
"matched_entity_type", "row_type"]
def __log__(self, msg):
print( "[{}]{}",format(type(self),msg) )
def __init__(self, dir_data, entity_type):
# init config
self.config = {
"entity_type": entity_type,
"fn_data": "%s/%s.csv" % (dir_data, entity_type) ,
"fn_new": "%s/%s.new.csv" % (dir_data, entity_type) ,
}
#load data
data_json = []
if os.path.exists(self.config["fn_data"]):
data_json = UtilCsv.csv2json(self.config["fn_data"])
self.__log__("load {} entries from [{}]".format(
len(data_json),
self.config["fn_data"]))
else:
with open (self.config["fn_data"],'w') as f:
csvwriter = UnicodeWriter(f)
headers = EntityPerson.LIST_HEADER
csvwriter.writerow(headers)
#init internal_memory
self.dict_name ={}
for entry in data_json:
#default label_type
if not entry["label_type"]:
entry["label_type"]="text"
data_person = {}
for p in ["name","sense","modified"]
data_person[p]=entry[p]
UtilJson.add_init_dict(
self.dict_name
[ entry["label_type"] ],
entry["label_text"],
data_person
)
#init new row
self.list_new_entity =[]
def add_new_data(self, entry):
#source_id
#email
#homepage
#name
#organization
#country
def write_new_data(self, filemode="w"):
headers = DbpediaApi.LIST_HEADER
print "{0} new mapping entries added ".format(len(self.list_new_entity))
#start the new data file, to be merged to original data
with open (self.config["fn_new"],filemode) as f:
csvwriter = UnicodeWriter(f)
for entry in self.list_new_entity:
row = UtilString.json2list(entry, headers)
csvwriter.writerow(row)<|fim▁end|>
|
pass1: remove obsoleted relation r1 where r1.id_from=r2.id_from and r1.created<r2.created
alternatively, use a hashtable with keys {type, id-from}
* find(id_from, type, recursive=False)
* add(record)
|
<|file_name|>table_edit.js<|end_file_name|><|fim▁begin|>function loadText()
{
var txtLang = document.getElementsByName("txtLang");
txtLang[0].innerHTML = "St\u00F8rrelse";
txtLang[1].innerHTML = "Egenskaber";
txtLang[2].innerHTML = "Typografi";
txtLang[3].innerHTML = "Bredde";
txtLang[4].innerHTML = "Bredde styret af indhold";
txtLang[5].innerHTML = "Tabelbredde";
txtLang[6].innerHTML = "Tilpas til vindue";
txtLang[7].innerHTML = "H\u00F8jde";
txtLang[8].innerHTML = "Bredde styret af indhold";
txtLang[9].innerHTML = "Tabelbredde";
txtLang[10].innerHTML = "Tilpas til vindue";
txtLang[11].innerHTML = "Justering";
txtLang[12].innerHTML = "Margen";
txtLang[13].innerHTML = "Venstre";
txtLang[14].innerHTML = "H\u00F8jre";
txtLang[15].innerHTML = "Top";
txtLang[16].innerHTML = "Nederst";
txtLang[17].innerHTML = "Ramme";
txtLang[18].innerHTML = "Collapse";
txtLang[19].innerHTML = "Baggrund";
txtLang[20].innerHTML = "Celle afstand";
txtLang[21].innerHTML = "Celle margen";
txtLang[22].innerHTML = "Typografi";
var optLang = document.getElementsByName("optLang");
optLang[0].text = "pixels"
optLang[1].text = "procent"
optLang[2].text = "pixels"
optLang[3].text = "procent"
optLang[4].text = "Venstre"
optLang[5].text = "Centrer"
optLang[6].text = "H\u00F8jre"
optLang[7].text = "Ingen"
optLang[8].text = "Ja"
optLang[9].text = "Nej"
document.getElementById("btnPick").value="V\u00E6lg";
document.getElementById("btnImage").value="Billede";
<|fim▁hole|> document.getElementById("btnCancel").value = "Annuller";
document.getElementById("btnApply").value = "Opdater";
document.getElementById("btnOk").value = " Ok ";
}
function getText(s)
{
switch(s)
{
case "Custom Colors": return "Egne farver";
case "More Colors...": return "Flere farver...";
default:return "";
}
}
function writeTitle()
{
document.write("<title>Tabel egenskaber</title>")
}<|fim▁end|>
| |
<|file_name|>processor.py<|end_file_name|><|fim▁begin|>import re
import copy
import logging
import datetime
import objectpath
from indra.statements import *
logger = logging.getLogger(__name__)
class EidosProcessor(object):
"""This processor extracts INDRA Statements from Eidos JSON-LD output.
Parameters
----------
json_dict : dict
A JSON dictionary containing the Eidos extractions in JSON-LD format.
Attributes
----------
statements : list[indra.statements.Statement]
A list of INDRA Statements that were extracted by the processor.
"""
def __init__(self, json_dict, grounding_ns=None):
self.doc = EidosDocument(json_dict)
self.grounding_ns = grounding_ns
self.statements = []
def extract_causal_relations(self):
"""Extract causal relations as Statements."""
# Get the extractions that are labeled as directed and causal
relations = [e for e in self.doc.extractions if
'DirectedRelation' in e['labels'] and
'Causal' in e['labels']]
# For each relation, we try to extract an INDRA Statement and
# save it if its valid
for relation in relations:
stmt = self.get_causal_relation(relation)
if stmt is not None:
self.statements.append(stmt)
def extract_correlations(self):
events = [e for e in self.doc.extractions if
'UndirectedRelation' in e['labels'] and
'Correlation' in e['labels']]
for event in events:
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
arg_ids = find_args(event, 'argument')
if len(arg_ids) != 2:
logger.warning('Skipping correlation with not 2 arguments.')
# Resolve coreferences by ID
arg_ids = [self.doc.coreferences.get(arg_id, arg_id)
for arg_id in arg_ids]
# Get the actual entities
args = [self.doc.entities[arg_id] for arg_id in arg_ids]
# Make Events from the entities
members = [self.get_event(arg) for arg in args]
# Get the evidence
evidence = self.get_evidence(event)
st = Association(members, evidence=[evidence])
self.statements.append(st)
def extract_events(self):
events = [e for e in self.doc.extractions if
'Concept-Expanded' in e['labels']]
for event_entry in events:
event = self.get_event(event_entry)
evidence = self.get_evidence(event_entry)
event.evidence = [evidence]
if not event.context and evidence.context:
event.context = copy.deepcopy(evidence.context)
evidence.context = None
self.statements.append(event)
def get_event_by_id(self, event_id):
# Resolve coreferences by ID
event_id = self.doc.coreferences.get(event_id, event_id)
# Get the actual entity
event = self.doc.entities[event_id]
return self.get_event(event)
def get_event(self, event):
concept = self.get_concept(event)
states = event.get('states', [])
extracted_states = self.extract_entity_states(states)
polarity = extracted_states.get('polarity')
adjectives = extracted_states.get('adjectives')
delta = QualitativeDelta(polarity=polarity, adjectives=adjectives)
timex = extracted_states.get('time_context', None)
geo = extracted_states.get('geo_context', None)
context = WorldContext(time=timex, geo_location=geo) \
if timex or geo else None
stmt = Event(concept, delta=delta, context=context)
return stmt
def get_causal_relation(self, relation):
# For now, just take the first source and first destination.
# Later, might deal with hypergraph representation.
subj_id = find_arg(relation, 'source')
obj_id = find_arg(relation, 'destination')
if subj_id is None or obj_id is None:
return None
subj = self.get_event_by_id(subj_id)
obj = self.get_event_by_id(obj_id)
evidence = self.get_evidence(relation)
# We also put the adjectives and polarities into annotations since
# they could otherwise get squashed upon preassembly
evidence.annotations['subj_polarity'] = subj.delta.polarity
evidence.annotations['obj_polarity'] = obj.delta.polarity
evidence.annotations['subj_adjectives'] = subj.delta.adjectives
evidence.annotations['obj_adjectives'] = obj.delta.adjectives
evidence.annotations['subj_context'] = subj.context.to_json() if \
subj.context else {}
evidence.annotations['obj_context'] = obj.context.to_json() if \
obj.context else {}
st = Influence(subj, obj, evidence=[evidence])
return st
def get_evidence(self, relation):
"""Return the Evidence object for the INDRA Statment."""
provenance = relation.get('provenance')
# First try looking up the full sentence through provenance
text = None
context = None
if provenance:
sentence_tag = provenance[0].get('sentence')
if sentence_tag and '@id' in sentence_tag:
sentence_id = sentence_tag['@id']
sentence = self.doc.sentences.get(sentence_id)
if sentence is not None:
text = _sanitize(sentence['text'])
# Here we try to get the title of the document and set it
# in the provenance
doc_id = provenance[0].get('document', {}).get('@id')
if doc_id:
title = self.doc.documents.get(doc_id, {}).get('title')
if title:
provenance[0]['document']['title'] = title
annotations = {'found_by': relation.get('rule'),
'provenance': provenance}
if self.doc.dct is not None:
annotations['document_creation_time'] = self.doc.dct.to_json()
epistemics = {}
negations = self.get_negation(relation)
hedgings = self.get_hedging(relation)
if hedgings:
epistemics['hedgings'] = hedgings
if negations:
# This is the INDRA standard to show negation
epistemics['negated'] = True
# But we can also save the texts associated with the negation
# under annotations, just in case it's needed
annotations['negated_texts'] = negations
# If that fails, we can still get the text of the relation
if text is None:
text = _sanitize(relation.get('text'))
ev = Evidence(source_api='eidos', text=text, annotations=annotations,
context=context, epistemics=epistemics)
return ev
@staticmethod
def get_negation(event):
"""Return negation attached to an event.
Example: "states": [{"@type": "State", "type": "NEGATION",
"text": "n't"}]
"""
states = event.get('states', [])
if not states:
return []
negs = [state for state in states
if state.get('type') == 'NEGATION']<|fim▁hole|> @staticmethod
def get_hedging(event):
"""Return hedging markers attached to an event.
Example: "states": [{"@type": "State", "type": "HEDGE",
"text": "could"}
"""
states = event.get('states', [])
if not states:
return []
hedgings = [state for state in states
if state.get('type') == 'HEDGE']
hedging_texts = [hedging['text'] for hedging in hedgings]
return hedging_texts
def extract_entity_states(self, states):
if states is None:
return {'polarity': None, 'adjectives': []}
polarity = None
adjectives = []
time_context = None
geo_context = None
for state in states:
if polarity is None:
if state['type'] == 'DEC':
polarity = -1
# Handle None entry here
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'INC':
polarity = 1
mods = state.get('modifiers') if \
state.get('modifiers') else []
adjectives += [mod['text'] for mod in mods]
elif state['type'] == 'QUANT':
adjectives.append(state['text'])
if state['type'] == 'TIMEX':
time_context = self.time_context_from_ref(state)
elif state['type'] == 'LocationExp':
# TODO: here we take only the first geo_context occurrence.
# Eidos sometimes provides a list of locations, it may
# make sense to break those up into multiple statements
# each with one location
if not geo_context:
geo_context = self.geo_context_from_ref(state)
return {'polarity': polarity, 'adjectives': adjectives,
'time_context': time_context, 'geo_context': geo_context}
def get_groundings(self, entity):
"""Return groundings as db_refs for an entity."""
def get_grounding_entries(grounding):
if not grounding:
return None
entries = []
values = grounding.get('values', [])
# Values could still have been a None entry here
if values:
for entry in values:
ont_concept = entry.get('ontologyConcept')
value = entry.get('value')
if ont_concept is None or value is None:
continue
entries.append((ont_concept, value))
return entries
# Save raw text and Eidos scored groundings as db_refs
db_refs = {'TEXT': entity['text']}
groundings = entity.get('groundings')
if not groundings:
return db_refs
for g in groundings:
entries = get_grounding_entries(g)
# Only add these groundings if there are actual values listed
if entries:
key = g['name'].upper()
if self.grounding_ns is not None and \
key not in self.grounding_ns:
continue
if key == 'UN':
db_refs[key] = [(s[0].replace(' ', '_'), s[1])
for s in entries]
elif key == 'WM_FLATTENED' or key == 'WM':
db_refs['WM'] = [(s[0].strip('/'), s[1])
for s in entries]
else:
db_refs[key] = entries
return db_refs
def get_concept(self, entity):
"""Return Concept from an Eidos entity."""
# Use the canonical name as the name of the Concept
name = entity['canonicalName']
db_refs = self.get_groundings(entity)
concept = Concept(name, db_refs=db_refs)
return concept
def time_context_from_ref(self, timex):
"""Return a time context object given a timex reference entry."""
# If the timex has a value set, it means that it refers to a DCT or
# a TimeExpression e.g. "value": {"@id": "_:DCT_1"} and the parameters
# need to be taken from there
value = timex.get('value')
if value:
# Here we get the TimeContext directly from the stashed DCT
# dictionary
tc = self.doc.timexes.get(value['@id'])
return tc
return None
def geo_context_from_ref(self, ref):
"""Return a ref context object given a location reference entry."""
value = ref.get('value')
if value:
# Here we get the RefContext from the stashed geoloc dictionary
rc = self.doc.geolocs.get(value['@id'])
return rc
return None
def get_all_events(self):
"""Return a list of all standalone events from a list
of statements."""
events = []
for stmt in self.statements:
stmt = copy.deepcopy(stmt)
if isinstance(stmt, Influence):
for member in [stmt.subj, stmt.obj]:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Association):
for member in stmt.members:
member.evidence = stmt.evidence[:]
# Remove the context since it may be for the other member
for ev in member.evidence:
ev.context = None
events.append(member)
elif isinstance(stmt, Event):
events.append(stmt)
return events
class EidosDocument(object):
def __init__(self, json_dict):
self.tree = objectpath.Tree(json_dict)
self.extractions = []
self.sentences = {}
self.entities = {}
self.documents = {}
self.coreferences = {}
self.timexes = {}
self.geolocs = {}
self.dct = None
self._preprocess_extractions()
def _preprocess_extractions(self):
extractions = \
self.tree.execute("$.extractions[(@.@type is 'Extraction')]")
if not extractions:
return
# Listify for multiple reuse
self.extractions = list(extractions)
# Build a dictionary of entities
entities = [e for e in self.extractions if 'Concept' in
e.get('labels', [])]
self.entities = {entity['@id']: entity for entity in entities}
# Build a dictionary of sentences and document creation times (DCTs)
documents = self.tree.execute("$.documents[(@.@type is 'Document')]")
self.sentences = {}
for document in documents:
dct = document.get('dct')
title = document.get('title')
self.documents[document['@id']] = {'title': title}
# We stash the DCT here as a TimeContext object
if dct is not None:
self.dct = self.time_context_from_dct(dct)
self.timexes[dct['@id']] = self.dct
sentences = document.get('sentences', [])
for sent in sentences:
self.sentences[sent['@id']] = sent
timexes = sent.get('timexes')
if timexes:
for timex in timexes:
tc = time_context_from_timex(timex)
self.timexes[timex['@id']] = tc
geolocs = sent.get('geolocs')
if geolocs:
for geoloc in geolocs:
rc = ref_context_from_geoloc(geoloc)
self.geolocs[geoloc['@id']] = rc
# Build a dictionary of coreferences
for extraction in self.extractions:
if 'Coreference' in extraction['labels']:
reference = find_arg(extraction, 'reference')
anchor = find_arg(extraction, 'anchor')
self.coreferences[reference] = anchor
@staticmethod
def time_context_from_dct(dct):
"""Return a time context object given a DCT entry."""
time_text = dct.get('text')
start = _get_time_stamp(dct.get('start'))
end = _get_time_stamp(dct.get('end'))
duration = _get_duration(start, end)
tc = TimeContext(text=time_text, start=start, end=end,
duration=duration)
return tc
def _sanitize(text):
"""Return sanitized Eidos text field for human readability."""
d = {'-LRB-': '(', '-RRB-': ')'}
return re.sub('|'.join(d.keys()), lambda m: d[m.group(0)], text)
def _get_time_stamp(entry):
"""Return datetime object from a timex constraint start/end entry.
Example string format to convert: 2018-01-01T00:00
"""
if not entry or entry == 'Undef':
return None
try:
dt = datetime.datetime.strptime(entry, '%Y-%m-%dT%H:%M')
except Exception as e:
logger.debug('Could not parse %s format' % entry)
return None
return dt
def _get_duration(start, end):
if not start or not end:
return None
try:
duration = int((end - start).total_seconds())
except Exception as e:
logger.debug('Failed to get duration from %s and %s' %
(str(start), str(end)))
duration = None
return duration
def ref_context_from_geoloc(geoloc):
"""Return a RefContext object given a geoloc entry."""
text = geoloc.get('text')
geoid = geoloc.get('geoID')
rc = RefContext(name=text, db_refs={'GEOID': geoid})
return rc
def time_context_from_timex(timex):
"""Return a TimeContext object given a timex entry."""
time_text = timex.get('text')
intervals = timex.get('intervals')
if not intervals:
start = end = duration = None
else:
constraint = intervals[0]
start = _get_time_stamp(constraint.get('start'))
end = _get_time_stamp(constraint.get('end'))
duration = _get_duration(start, end)
tc = TimeContext(text=time_text, start=start, end=end,
duration=duration)
return tc
def find_arg(event, arg_type):
"""Return ID of the first argument of a given type"""
obj_ids = find_args(event, arg_type)
if not obj_ids:
return None
else:
return obj_ids[0]
def find_args(event, arg_type):
"""Return IDs of all arguments of a given type"""
args = event.get('arguments', {})
obj_tags = [arg for arg in args if arg['type'] == arg_type]
if obj_tags:
return [o['value']['@id'] for o in obj_tags]
else:
return []<|fim▁end|>
|
neg_texts = [neg['text'] for neg in negs]
return neg_texts
|
<|file_name|>GitHubCommit.ts<|end_file_name|><|fim▁begin|>module OctoBoot.model {
export interface GitHubCommit {
url: string,
sha: string,
html_url: string,
comments_url: string,
commit: {
url: string,
author: {
name: string,
email: string,
date: string
},
committer: {
name: string,
email: string,
date: string<|fim▁hole|> tree: {
url: string,
sha: string
},
comment_count: number,
verification: {
verified: boolean,
reason: string,
signature: string,
payload: string
}
},
author: {
login: string,
id: number,
avatar_url: string,
gravatar_id: string,
url: string,
html_url: string,
followers_url: string,
following_url: string,
gists_url: string,
starred_url: string,
subscriptions_url: string,
organizations_url: string,
repos_url: string,
events_url: string,
received_events_url: string,
type: string,
site_admin: boolean
},
committer: {
login: string,
id: number,
avatar_url: string,
gravatar_id: string,
url: string,
html_url: string,
followers_url: string,
following_url: string,
gists_url: string,
starred_url: string,
subscriptions_url: string,
organizations_url: string,
repos_url: string,
events_url: string,
received_events_url: string,
type: string,
site_admin: boolean
},
parents: [
{
url: string,
sha: string
}
]
}
}<|fim▁end|>
|
},
message: string,
|
<|file_name|>RuleRemovalContext.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2008 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Created on Feb 6, 2008
*/
package org.drools.reteoo;
import java.io.Serializable;
import java.io.Externalizable;
import java.io.ObjectOutput;
import java.io.IOException;
import java.io.ObjectInput;
import java.util.HashMap;
import java.util.Map;
import org.drools.common.BaseNode;
/**
* This context class is used during rule removal to ensure
* network consistency.
*
* @author etirelli
*
*/
public class RuleRemovalContext
implements
Externalizable {
private Map visitedNodes;<|fim▁hole|> public RuleRemovalContext() {
this.visitedNodes = new HashMap();
}
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
visitedNodes = (Map) in.readObject();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject( visitedNodes );
}
/**
* We need to track tuple source nodes that we visit
* to avoid multiple removal in case of subnetworks
*
* @param node
*/
public void visitTupleSource(LeftTupleSource node) {
this.visitedNodes.put( new Integer( node.getId() ),
node );
}
/**
* We need to track tuple source nodes that we visit
* to avoid multiple removal in case of subnetworks
*
* @param node
* @return
*/
public boolean alreadyVisited(LeftTupleSource node) {
return this.visitedNodes.containsKey( new Integer( node.getId() ) );
}
public void clear() {
this.visitedNodes.clear();
}
}<|fim▁end|>
| |
<|file_name|>with_asan.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env vpython
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import contextlib
import logging
import os
import subprocess
import sys
_SRC_ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), '..', '..', '..', '..'))
sys.path.append(os.path.join(_SRC_ROOT, 'third_party', 'catapult', 'devil'))
from devil import base_error
from devil.android import device_utils
from devil.android.sdk import adb_wrapper
from devil.android.sdk import version_codes
from devil.utils import logging_common
sys.path.append(os.path.join(_SRC_ROOT, 'build', 'android'))
import devil_chromium
_SCRIPT_PATH = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'asan_device_setup.sh'))
<|fim▁hole|>@contextlib.contextmanager
def _LogDevicesOnFailure(msg):
try:
yield
except base_error.BaseError:
logging.exception(msg)
logging.error('Devices visible to adb:')
for entry in adb_wrapper.AdbWrapper.Devices(desired_state=None,
long_list=True):
logging.error(' %s: %s',
entry[0].GetDeviceSerial(),
' '.join(entry[1:]))
raise
@contextlib.contextmanager
def Asan(args):
env = os.environ.copy()
env['ADB'] = args.adb
try:
with _LogDevicesOnFailure('Failed to set up the device.'):
device = device_utils.DeviceUtils.HealthyDevices(
device_arg=args.device)[0]
disable_verity = device.build_version_sdk >= version_codes.MARSHMALLOW
if disable_verity:
device.EnableRoot()
# TODO(crbug.com/790202): Stop logging output after diagnosing
# issues on android-asan.
verity_output = device.adb.DisableVerity()
if verity_output:
logging.info('disable-verity output:')
for line in verity_output.splitlines():
logging.info(' %s', line)
device.Reboot()
# Call EnableRoot prior to asan_device_setup.sh to ensure it doesn't
# get tripped up by the root timeout.
device.EnableRoot()
setup_cmd = [_SCRIPT_PATH, '--lib', args.lib]
if args.device:
setup_cmd += ['--device', args.device]
subprocess.check_call(setup_cmd, env=env)
yield
finally:
with _LogDevicesOnFailure('Failed to tear down the device.'):
device.EnableRoot()
teardown_cmd = [_SCRIPT_PATH, '--revert']
if args.device:
teardown_cmd += ['--device', args.device]
subprocess.check_call(teardown_cmd, env=env)
if disable_verity:
# TODO(crbug.com/790202): Stop logging output after diagnosing
# issues on android-asan.
verity_output = device.adb.EnableVerity()
if verity_output:
logging.info('enable-verity output:')
for line in verity_output.splitlines():
logging.info(' %s', line)
device.Reboot()
def main(raw_args):
parser = argparse.ArgumentParser()
logging_common.AddLoggingArguments(parser)
parser.add_argument(
'--adb', type=os.path.realpath, required=True,
help='Path to adb binary.')
parser.add_argument(
'--device',
help='Device serial.')
parser.add_argument(
'--lib', type=os.path.realpath, required=True,
help='Path to asan library.')
parser.add_argument(
'command', nargs='*',
help='Command to run with ASAN installed.')
args = parser.parse_args()
# TODO(crbug.com/790202): Remove this after diagnosing issues
# with android-asan.
if not args.quiet:
args.verbose += 1
logging_common.InitializeLogging(args)
devil_chromium.Initialize(adb_path=args.adb)
with Asan(args):
if args.command:
return subprocess.call(args.command)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))<|fim▁end|>
| |
<|file_name|>formatter-mode.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*/
/**
* Possible modes of formatters<|fim▁hole|><|fim▁end|>
|
* @author Florent Benoit
*/
export type FormatterMode = 'MODERN' | 'CSV'
|
<|file_name|>ProfilesPanel.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @constructor
* @extends {WebInspector.Object}
* @param {string} id
* @param {string} name
*/
WebInspector.ProfileType = function(id, name)
{
WebInspector.Object.call(this);
this._id = id;
this._name = name;
/** @type {!Array.<!WebInspector.ProfileHeader>} */
this._profiles = [];
/** @type {?WebInspector.ProfileHeader} */
this._profileBeingRecorded = null;
this._nextProfileUid = 1;
window.addEventListener("unload", this._clearTempStorage.bind(this), false);
}
/**
* @enum {string}
*/
WebInspector.ProfileType.Events = {
AddProfileHeader: "add-profile-header",
ProfileComplete: "profile-complete",
RemoveProfileHeader: "remove-profile-header",
ViewUpdated: "view-updated"
}
WebInspector.ProfileType.prototype = {
/**
* @return {boolean}
*/
hasTemporaryView: function()
{
return false;
},
/**
* @return {?string}
*/
fileExtension: function()
{
return null;
},
get statusBarItems()
{
return [];
},
get buttonTooltip()
{
return "";
},
get id()
{
return this._id;
},
get treeItemTitle()
{
return this._name;
},
get name()
{
return this._name;
},
/**
* @return {boolean}
*/
buttonClicked: function()
{
return false;
},
get description()
{
return "";
},
/**
* @return {boolean}
*/
isInstantProfile: function()
{
return false;
},
/**
* @return {boolean}
*/
isEnabled: function()
{
return true;
},
/**
* @return {!Array.<!WebInspector.ProfileHeader>}
*/
getProfiles: function()
{
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {boolean}
* @this {WebInspector.ProfileType}
*/
function isFinished(profile)
{
return this._profileBeingRecorded !== profile;
}
return this._profiles.filter(isFinished.bind(this));
},
/**
* @return {?Element}
*/
decorationElement: function()
{
return null;
},
/**
* @nosideeffects
* @param {number} uid
* @return {?WebInspector.ProfileHeader}
*/
getProfile: function(uid)
{
for (var i = 0; i < this._profiles.length; ++i) {
if (this._profiles[i].uid === uid)
return this._profiles[i];
}
return null;
},
/**
* @param {!File} file
*/
loadFromFile: function(file)
{
var name = file.name;
if (name.endsWith(this.fileExtension()))
name = name.substr(0, name.length - this.fileExtension().length);
var profile = this.createProfileLoadedFromFile(name);
profile.setFromFile();
this.setProfileBeingRecorded(profile);
this.addProfile(profile);
profile.loadFromFile(file);
},
/**
* @param {string} title
* @return {!WebInspector.ProfileHeader}
*/
createProfileLoadedFromFile: function(title)
{
throw new Error("Needs implemented.");
},
/**
* @param {!WebInspector.ProfileHeader} profile
*/
addProfile: function(profile)
{
this._profiles.push(profile);
this.dispatchEventToListeners(WebInspector.ProfileType.Events.AddProfileHeader, profile);
},
/**
* @param {!WebInspector.ProfileHeader} profile
*/
removeProfile: function(profile)
{
var index = this._profiles.indexOf(profile);
if (index === -1)
return;
this._profiles.splice(index, 1);
this._disposeProfile(profile);
},
_clearTempStorage: function()
{
for (var i = 0; i < this._profiles.length; ++i)
this._profiles[i].removeTempFile();
},
/**
* @nosideeffects
* @return {?WebInspector.ProfileHeader}
*/
profileBeingRecorded: function()
{
return this._profileBeingRecorded;
},
/**
* @param {?WebInspector.ProfileHeader} profile
*/
setProfileBeingRecorded: function(profile)
{
if (this._profileBeingRecorded)
this._profileBeingRecorded.target().profilingLock.release();
if (profile)
profile.target().profilingLock.acquire();
this._profileBeingRecorded = profile;
},
profileBeingRecordedRemoved: function()
{
},
_reset: function()
{
var profiles = this._profiles.slice(0);
for (var i = 0; i < profiles.length; ++i)
this._disposeProfile(profiles[i]);
this._profiles = [];
this._nextProfileUid = 1;
},
/**
* @param {!WebInspector.ProfileHeader} profile
*/
_disposeProfile: function(profile)
{
this.dispatchEventToListeners(WebInspector.ProfileType.Events.RemoveProfileHeader, profile);
profile.dispose();
if (this._profileBeingRecorded === profile) {
this.profileBeingRecordedRemoved();
this.setProfileBeingRecorded(null);
}
},
__proto__: WebInspector.Object.prototype
}
/**
* @interface
*/
WebInspector.ProfileType.DataDisplayDelegate = function()
{
}
WebInspector.ProfileType.DataDisplayDelegate.prototype = {
/**
* @param {?WebInspector.ProfileHeader} profile
* @return {?WebInspector.View}
*/
showProfile: function(profile) { },
/**
* @param {!HeapProfilerAgent.HeapSnapshotObjectId} snapshotObjectId
* @param {string} perspectiveName
*/
showObject: function(snapshotObjectId, perspectiveName) { }
}
/**
* @constructor
* @extends {WebInspector.TargetAwareObject}
* @param {!WebInspector.Target} target
* @param {!WebInspector.ProfileType} profileType
* @param {string} title
*/
WebInspector.ProfileHeader = function(target, profileType, title)
{
WebInspector.TargetAwareObject.call(this, target);
this._profileType = profileType;
this.title = title;
this.uid = profileType._nextProfileUid++;
this._fromFile = false;
}
/**
* @constructor
* @param {?string} subtitle
* @param {boolean|undefined} wait
*/
WebInspector.ProfileHeader.StatusUpdate = function(subtitle, wait)
{
/** @type {?string} */
this.subtitle = subtitle;
/** @type {boolean|undefined} */
this.wait = wait;
}
WebInspector.ProfileHeader.Events = {
UpdateStatus: "UpdateStatus",
ProfileReceived: "ProfileReceived"
}
WebInspector.ProfileHeader.prototype = {
/**
* @return {!WebInspector.ProfileType}
*/
profileType: function()
{
return this._profileType;
},
/**
* @param {?string} subtitle
* @param {boolean=} wait
*/
updateStatus: function(subtitle, wait)
{
this.dispatchEventToListeners(WebInspector.ProfileHeader.Events.UpdateStatus, new WebInspector.ProfileHeader.StatusUpdate(subtitle, wait));
},
/**
* Must be implemented by subclasses.
* @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate
* @return {!WebInspector.ProfileSidebarTreeElement}
*/
createSidebarTreeElement: function(dataDisplayDelegate)
{
throw new Error("Needs implemented.");
},
/**
* @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate
* @return {!WebInspector.View}
*/
createView: function(dataDisplayDelegate)
{
throw new Error("Not implemented.");
},
removeTempFile: function()
{
if (this._tempFile)
this._tempFile.remove();
},
dispose: function()
{
},
/**
* @param {!Function} callback
*/
load: function(callback)
{
},
/**
* @return {boolean}
*/
canSaveToFile: function()
{
return false;
},
saveToFile: function()
{
throw new Error("Needs implemented");
},
/**
* @param {!File} file
*/
loadFromFile: function(file)
{
throw new Error("Needs implemented");
},
/**
* @return {boolean}
*/
fromFile: function()
{
return this._fromFile;
},
setFromFile: function()
{
this._fromFile = true;
},
__proto__: WebInspector.TargetAwareObject.prototype
}
/**
* @constructor
* @implements {WebInspector.Searchable}
* @implements {WebInspector.ProfileType.DataDisplayDelegate}
* @extends {WebInspector.PanelWithSidebarTree}
*/
WebInspector.ProfilesPanel = function()
{
WebInspector.PanelWithSidebarTree.call(this, "profiles");
this.registerRequiredCSS("panelEnablerView.css");
this.registerRequiredCSS("heapProfiler.css");
this.registerRequiredCSS("profilesPanel.css");
this._target = /** @type {!WebInspector.Target} */ (WebInspector.targetManager.activeTarget());
this._target.profilingLock.addEventListener(WebInspector.Lock.Events.StateChanged, this._onProfilingStateChanged, this);
this._searchableView = new WebInspector.SearchableView(this);
var mainView = new WebInspector.VBox();
this._searchableView.show(mainView.element);
mainView.show(this.mainElement());
this.profilesItemTreeElement = new WebInspector.ProfilesSidebarTreeElement(this);
this.sidebarTree.appendChild(this.profilesItemTreeElement);
this.profileViews = document.createElement("div");
this.profileViews.id = "profile-views";
this.profileViews.classList.add("vbox");
this._searchableView.element.appendChild(this.profileViews);
var statusBarContainer = document.createElementWithClass("div", "profiles-status-bar");
mainView.element.insertBefore(statusBarContainer, mainView.element.firstChild);
this._statusBarElement = statusBarContainer.createChild("div", "status-bar");
this.sidebarElement().classList.add("profiles-sidebar-tree-box");
var statusBarContainerLeft = document.createElementWithClass("div", "profiles-status-bar");
this.sidebarElement().insertBefore(statusBarContainerLeft, this.sidebarElement().firstChild);
this._statusBarButtons = statusBarContainerLeft.createChild("div", "status-bar");
this.recordButton = new WebInspector.StatusBarButton("", "record-profile-status-bar-item");
this.recordButton.addEventListener("click", this.toggleRecordButton, this);
this._statusBarButtons.appendChild(this.recordButton.element);
this.clearResultsButton = new WebInspector.StatusBarButton(WebInspector.UIString("Clear all profiles."), "clear-status-bar-item");
this.clearResultsButton.addEventListener("click", this._reset, this);
this._statusBarButtons.appendChild(this.clearResultsButton.element);
this._profileTypeStatusBarItemsContainer = this._statusBarElement.createChild("div");
this._profileViewStatusBarItemsContainer = this._statusBarElement.createChild("div");
this._profileGroups = {};
this._launcherView = new WebInspector.MultiProfileLauncherView(this);
this._launcherView.addEventListener(WebInspector.MultiProfileLauncherView.EventTypes.ProfileTypeSelected, this._onProfileTypeSelected, this);
this._profileToView = [];
this._typeIdToSidebarSection = {};
var types = WebInspector.ProfileTypeRegistry.instance.profileTypes();
for (var i = 0; i < types.length; i++)
this._registerProfileType(types[i]);
this._launcherView.restoreSelectedProfileType();
this.profilesItemTreeElement.select();
this._showLauncherView();
this._createFileSelectorElement();
this.element.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true);
this._registerShortcuts();
this._configureCpuProfilerSamplingInterval();
WebInspector.settings.highResolutionCpuProfiling.addChangeListener(this._configureCpuProfilerSamplingInterval, this);
}
/**
* @constructor
*/
WebInspector.ProfileTypeRegistry = function() {
this._profileTypes = [];
this.cpuProfileType = new WebInspector.CPUProfileType();
this._addProfileType(this.cpuProfileType);
this.heapSnapshotProfileType = new WebInspector.HeapSnapshotProfileType();
this._addProfileType(this.heapSnapshotProfileType);
this.trackingHeapSnapshotProfileType = new WebInspector.TrackingHeapSnapshotProfileType();
this._addProfileType(this.trackingHeapSnapshotProfileType);
HeapProfilerAgent.enable();
if (Capabilities.isMainFrontend && WebInspector.experimentsSettings.canvasInspection.isEnabled()) {
this.canvasProfileType = new WebInspector.CanvasProfileType();
this._addProfileType(this.canvasProfileType);
}
}
WebInspector.ProfileTypeRegistry.prototype = {
/**
* @param {!WebInspector.ProfileType} profileType
*/
_addProfileType: function(profileType)
{
this._profileTypes.push(profileType);
},
/**
* @return {!Array.<!WebInspector.ProfileType>}
*/
profileTypes: function()
{
return this._profileTypes;
}
}
WebInspector.ProfilesPanel.prototype = {
/**
* @return {!WebInspector.SearchableView}
*/
searchableView: function()
{
return this._searchableView;
},
_createFileSelectorElement: function()
{
if (this._fileSelectorElement)
this.element.removeChild(this._fileSelectorElement);
this._fileSelectorElement = WebInspector.createFileSelectorElement(this._loadFromFile.bind(this));
this.element.appendChild(this._fileSelectorElement);
},
_findProfileTypeByExtension: function(fileName)
{
var types = WebInspector.ProfileTypeRegistry.instance.profileTypes();
for (var i = 0; i < types.length; i++) {
var type = types[i];
var extension = type.fileExtension();
if (!extension)
continue;
if (fileName.endsWith(type.fileExtension()))
return type;
}
return null;
},
_registerShortcuts: function()
{
this.registerShortcuts(WebInspector.ShortcutsScreen.ProfilesPanelShortcuts.StartStopRecording, this.toggleRecordButton.bind(this));
},
_configureCpuProfilerSamplingInterval: function()
{
var intervalUs = WebInspector.settings.highResolutionCpuProfiling.get() ? 100 : 1000;
ProfilerAgent.setSamplingInterval(intervalUs, didChangeInterval);
function didChangeInterval(error)
{
if (error)
WebInspector.messageSink.addErrorMessage(error, true);
}
},
/**
* @param {!File} file
*/
_loadFromFile: function(file)
{
this._createFileSelectorElement();
var profileType = this._findProfileTypeByExtension(file.name);
if (!profileType) {
var extensions = [];
var types = WebInspector.ProfileTypeRegistry.instance.profileTypes();
for (var i = 0; i < types.length; i++) {
var extension = types[i].fileExtension();
if (!extension || extensions.indexOf(extension) !== -1)
continue;
extensions.push(extension);
}
WebInspector.messageSink.addMessage(WebInspector.UIString("Can't load file. Only files with extensions '%s' can be loaded.", extensions.join("', '")));
return;
}
if (!!profileType.profileBeingRecorded()) {
WebInspector.messageSink.addMessage(WebInspector.UIString("Can't load profile while another profile is recording."));
return;
}
profileType.loadFromFile(file);
},
/**
* @return {boolean}
*/
toggleRecordButton: function()
{
if (!this.recordButton.enabled())
return true;
var type = this._selectedProfileType;
var isProfiling = type.buttonClicked();
this._updateRecordButton(isProfiling);
if (isProfiling) {
this._launcherView.profileStarted();
if (type.hasTemporaryView())
this.showProfile(type.profileBeingRecorded());
} else {
this._launcherView.profileFinished();
}
return true;
},
_onProfilingStateChanged: function()
{
this._updateRecordButton(this.recordButton.toggled);
},
/**
* @param {boolean} toggled
*/
_updateRecordButton: function(toggled)
{
var enable = toggled || !this._target.profilingLock.isAcquired();
this.recordButton.setEnabled(enable);
this.recordButton.toggled = toggled;
if (enable)
this.recordButton.title = this._selectedProfileType ? this._selectedProfileType.buttonTooltip : "";
else
this.recordButton.title = WebInspector.UIString("Another profiler is already active");
if (this._selectedProfileType)
this._launcherView.updateProfileType(this._selectedProfileType, enable);
},
_profileBeingRecordedRemoved: function()
{
this._updateRecordButton(false);
this._launcherView.profileFinished();
},
/**
* @param {!WebInspector.Event} event
*/
_onProfileTypeSelected: function(event)
{
this._selectedProfileType = /** @type {!WebInspector.ProfileType} */ (event.data);
this._updateProfileTypeSpecificUI();
},
_updateProfileTypeSpecificUI: function()
{
this._updateRecordButton(this.recordButton.toggled);
this._profileTypeStatusBarItemsContainer.removeChildren();
var statusBarItems = this._selectedProfileType.statusBarItems;
if (statusBarItems) {
for (var i = 0; i < statusBarItems.length; ++i)
this._profileTypeStatusBarItemsContainer.appendChild(statusBarItems[i]);
}
},
_reset: function()
{
WebInspector.Panel.prototype.reset.call(this);
var types = WebInspector.ProfileTypeRegistry.instance.profileTypes();
for (var i = 0; i < types.length; i++)
types[i]._reset();
delete this.visibleView;
delete this.currentQuery;
this.searchCanceled();
this._profileGroups = {};
this._updateRecordButton(false);
this._launcherView.profileFinished();
this.sidebarTree.element.classList.remove("some-expandable");
this._launcherView.detach();
this.profileViews.removeChildren();
this._profileViewStatusBarItemsContainer.removeChildren();
this.removeAllListeners();
this.recordButton.visible = true;
this._profileViewStatusBarItemsContainer.classList.remove("hidden");
this.clearResultsButton.element.classList.remove("hidden");
this.profilesItemTreeElement.select();
this._showLauncherView();
},
_showLauncherView: function()
{
this.closeVisibleView();
this._profileViewStatusBarItemsContainer.removeChildren();
this._launcherView.show(this.profileViews);
this.visibleView = this._launcherView;
},
_garbageCollectButtonClicked: function()
{
HeapProfilerAgent.collectGarbage();
},
/**
* @param {!WebInspector.ProfileType} profileType
*/
_registerProfileType: function(profileType)
{
this._launcherView.addProfileType(profileType);
var profileTypeSection = new WebInspector.ProfileTypeSidebarSection(this, profileType);
this._typeIdToSidebarSection[profileType.id] = profileTypeSection
this.sidebarTree.appendChild(profileTypeSection);
profileTypeSection.childrenListElement.addEventListener("contextmenu", this._handleContextMenuEvent.bind(this), true);
/**
* @param {!WebInspector.Event} event
* @this {WebInspector.ProfilesPanel}
*/
function onAddProfileHeader(event)
{
this._addProfileHeader(/** @type {!WebInspector.ProfileHeader} */ (event.data));
}
/**
* @param {!WebInspector.Event} event
* @this {WebInspector.ProfilesPanel}
*/
function onRemoveProfileHeader(event)
{
this._removeProfileHeader(/** @type {!WebInspector.ProfileHeader} */ (event.data));
}
/**
* @param {!WebInspector.Event} event
* @this {WebInspector.ProfilesPanel}
*/
function profileComplete(event)
{
this.showProfile(/** @type {!WebInspector.ProfileHeader} */ (event.data));
}
profileType.addEventListener(WebInspector.ProfileType.Events.ViewUpdated, this._updateProfileTypeSpecificUI, this);
profileType.addEventListener(WebInspector.ProfileType.Events.AddProfileHeader, onAddProfileHeader, this);
profileType.addEventListener(WebInspector.ProfileType.Events.RemoveProfileHeader, onRemoveProfileHeader, this);
profileType.addEventListener(WebInspector.ProfileType.Events.ProfileComplete, profileComplete, this);
var profiles = profileType.getProfiles();
for (var i = 0; i < profiles.length; i++)
this._addProfileHeader(profiles[i]);
},
/**
* @param {?Event} event
*/
_handleContextMenuEvent: function(event)
{
var element = event.srcElement;
while (element && !element.treeElement && element !== this.element)
element = element.parentElement;
if (!element)
return;
if (element.treeElement && element.treeElement.handleContextMenuEvent) {
element.treeElement.handleContextMenuEvent(event, this);
return;
}
var contextMenu = new WebInspector.ContextMenu(event);
if (this.visibleView instanceof WebInspector.HeapSnapshotView) {
this.visibleView.populateContextMenu(contextMenu, event);
}
if (element !== this.element || event.srcElement === this.sidebarElement()) {
contextMenu.appendItem(WebInspector.UIString("Load\u2026"), this._fileSelectorElement.click.bind(this._fileSelectorElement));
}
contextMenu.show();
},
showLoadFromFileDialog: function()
{
this._fileSelectorElement.click();
},
/**
* @param {!WebInspector.ProfileHeader} profile
*/
_addProfileHeader: function(profile)
{
var profileType = profile.profileType();
var typeId = profileType.id;
this._typeIdToSidebarSection[typeId].addProfileHeader(profile);
if (!this.visibleView || this.visibleView === this._launcherView)
this.showProfile(profile);
},
/**
* @param {!WebInspector.ProfileHeader} profile
*/
_removeProfileHeader: function(profile)
{
if (profile.profileType()._profileBeingRecorded === profile)
this._profileBeingRecordedRemoved();
var i = this._indexOfViewForProfile(profile);
if (i !== -1)
this._profileToView.splice(i, 1);
var profileType = profile.profileType();
var typeId = profileType.id;
var sectionIsEmpty = this._typeIdToSidebarSection[typeId].removeProfileHeader(profile);
// No other item will be selected if there aren't any other profiles, so
// make sure that view gets cleared when the last profile is removed.
if (sectionIsEmpty) {
this.profilesItemTreeElement.select();
this._showLauncherView();
}
},
/**
* @param {?WebInspector.ProfileHeader} profile
* @return {?WebInspector.View}
*/
showProfile: function(profile)
{
if (!profile || (profile.profileType().profileBeingRecorded() === profile) && !profile.profileType().hasTemporaryView())
return null;
var view = this._viewForProfile(profile);
if (view === this.visibleView)
return view;
this.closeVisibleView();
view.show(this.profileViews);
this.visibleView = view;
var profileTypeSection = this._typeIdToSidebarSection[profile.profileType().id];
var sidebarElement = profileTypeSection.sidebarElementForProfile(profile);
sidebarElement.revealAndSelect();
this._profileViewStatusBarItemsContainer.removeChildren();
var statusBarItems = view.statusBarItems;
if (statusBarItems)
for (var i = 0; i < statusBarItems.length; ++i)
this._profileViewStatusBarItemsContainer.appendChild(statusBarItems[i]);
return view;
},
/**
* @param {!HeapProfilerAgent.HeapSnapshotObjectId} snapshotObjectId
* @param {string} perspectiveName
*/
showObject: function(snapshotObjectId, perspectiveName)
{
var heapProfiles = WebInspector.ProfileTypeRegistry.instance.heapSnapshotProfileType.getProfiles();
for (var i = 0; i < heapProfiles.length; i++) {
var profile = heapProfiles[i];
// FIXME: allow to choose snapshot if there are several options.
if (profile.maxJSObjectId >= snapshotObjectId) {
this.showProfile(profile);
var view = this._viewForProfile(profile);
view.highlightLiveObject(perspectiveName, snapshotObjectId);
break;
}
}
},
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {!WebInspector.View}
*/
_viewForProfile: function(profile)
{
var index = this._indexOfViewForProfile(profile);
if (index !== -1)
return this._profileToView[index].view;
var view = profile.createView(this);
view.element.classList.add("profile-view");
this._profileToView.push({ profile: profile, view: view});<|fim▁hole|>
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {number}
*/
_indexOfViewForProfile: function(profile)
{
for (var i = 0; i < this._profileToView.length; i++) {
if (this._profileToView[i].profile === profile)
return i;
}
return -1;
},
closeVisibleView: function()
{
if (this.visibleView)
this.visibleView.detach();
delete this.visibleView;
},
/**
* @param {string} query
* @param {boolean} shouldJump
* @param {boolean=} jumpBackwards
*/
performSearch: function(query, shouldJump, jumpBackwards)
{
this.searchCanceled();
var visibleView = this.visibleView;
if (!visibleView)
return;
/**
* @this {WebInspector.ProfilesPanel}
*/
function finishedCallback(view, searchMatches)
{
if (!searchMatches)
return;
this._searchableView.updateSearchMatchesCount(searchMatches);
this._searchResultsView = view;
if (shouldJump) {
if (jumpBackwards)
view.jumpToLastSearchResult();
else
view.jumpToFirstSearchResult();
this._searchableView.updateCurrentMatchIndex(view.currentSearchResultIndex());
}
}
visibleView.currentQuery = query;
visibleView.performSearch(query, finishedCallback.bind(this));
},
jumpToNextSearchResult: function()
{
if (!this._searchResultsView)
return;
if (this._searchResultsView !== this.visibleView)
return;
this._searchResultsView.jumpToNextSearchResult();
this._searchableView.updateCurrentMatchIndex(this._searchResultsView.currentSearchResultIndex());
},
jumpToPreviousSearchResult: function()
{
if (!this._searchResultsView)
return;
if (this._searchResultsView !== this.visibleView)
return;
this._searchResultsView.jumpToPreviousSearchResult();
this._searchableView.updateCurrentMatchIndex(this._searchResultsView.currentSearchResultIndex());
},
searchCanceled: function()
{
if (this._searchResultsView) {
if (this._searchResultsView.searchCanceled)
this._searchResultsView.searchCanceled();
this._searchResultsView.currentQuery = null;
this._searchResultsView = null;
}
this._searchableView.updateSearchMatchesCount(0);
},
/**
* @param {!Event} event
* @param {!WebInspector.ContextMenu} contextMenu
* @param {!Object} target
*/
appendApplicableItems: function(event, contextMenu, target)
{
if (!(target instanceof WebInspector.RemoteObject))
return;
if (WebInspector.inspectorView.currentPanel() !== this)
return;
var object = /** @type {!WebInspector.RemoteObject} */ (target);
var objectId = object.objectId;
if (!objectId)
return;
var heapProfiles = WebInspector.ProfileTypeRegistry.instance.heapSnapshotProfileType.getProfiles();
if (!heapProfiles.length)
return;
/**
* @this {WebInspector.ProfilesPanel}
*/
function revealInView(viewName)
{
HeapProfilerAgent.getHeapObjectId(objectId, didReceiveHeapObjectId.bind(this, viewName));
}
/**
* @this {WebInspector.ProfilesPanel}
*/
function didReceiveHeapObjectId(viewName, error, result)
{
if (WebInspector.inspectorView.currentPanel() !== this)
return;
if (!error)
this.showObject(result, viewName);
}
if (WebInspector.settings.showAdvancedHeapSnapshotProperties.get())
contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Reveal in Dominators view" : "Reveal in Dominators View"), revealInView.bind(this, "Dominators"));
contextMenu.appendItem(WebInspector.UIString(WebInspector.useLowerCaseMenuTitles() ? "Reveal in Summary view" : "Reveal in Summary View"), revealInView.bind(this, "Summary"));
},
__proto__: WebInspector.PanelWithSidebarTree.prototype
}
/**
* @constructor
* @extends {WebInspector.SidebarSectionTreeElement}
* @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate
* @param {!WebInspector.ProfileType} profileType
*/
WebInspector.ProfileTypeSidebarSection = function(dataDisplayDelegate, profileType)
{
WebInspector.SidebarSectionTreeElement.call(this, profileType.treeItemTitle, null, true);
this._dataDisplayDelegate = dataDisplayDelegate;
this._profileTreeElements = [];
this._profileGroups = {};
this.hidden = true;
}
/**
* @constructor
*/
WebInspector.ProfileTypeSidebarSection.ProfileGroup = function()
{
this.profileSidebarTreeElements = [];
this.sidebarTreeElement = null;
}
WebInspector.ProfileTypeSidebarSection.prototype = {
/**
* @param {!WebInspector.ProfileHeader} profile
*/
addProfileHeader: function(profile)
{
this.hidden = false;
var profileType = profile.profileType();
var sidebarParent = this;
var profileTreeElement = profile.createSidebarTreeElement(this._dataDisplayDelegate);
this._profileTreeElements.push(profileTreeElement);
if (!profile.fromFile() && profileType.profileBeingRecorded() !== profile) {
var profileTitle = profile.title;
var group = this._profileGroups[profileTitle];
if (!group) {
group = new WebInspector.ProfileTypeSidebarSection.ProfileGroup();
this._profileGroups[profileTitle] = group;
}
group.profileSidebarTreeElements.push(profileTreeElement);
var groupSize = group.profileSidebarTreeElements.length;
if (groupSize === 2) {
// Make a group TreeElement now that there are 2 profiles.
group.sidebarTreeElement = new WebInspector.ProfileGroupSidebarTreeElement(this._dataDisplayDelegate, profile.title);
var firstProfileTreeElement = group.profileSidebarTreeElements[0];
// Insert at the same index for the first profile of the group.
var index = this.children.indexOf(firstProfileTreeElement);
this.insertChild(group.sidebarTreeElement, index);
// Move the first profile to the group.
var selected = firstProfileTreeElement.selected;
this.removeChild(firstProfileTreeElement);
group.sidebarTreeElement.appendChild(firstProfileTreeElement);
if (selected)
firstProfileTreeElement.revealAndSelect();
firstProfileTreeElement.small = true;
firstProfileTreeElement.mainTitle = WebInspector.UIString("Run %d", 1);
this.treeOutline.element.classList.add("some-expandable");
}
if (groupSize >= 2) {
sidebarParent = group.sidebarTreeElement;
profileTreeElement.small = true;
profileTreeElement.mainTitle = WebInspector.UIString("Run %d", groupSize);
}
}
sidebarParent.appendChild(profileTreeElement);
},
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {boolean}
*/
removeProfileHeader: function(profile)
{
var index = this._sidebarElementIndex(profile);
if (index === -1)
return false;
var profileTreeElement = this._profileTreeElements[index];
this._profileTreeElements.splice(index, 1);
var sidebarParent = this;
var group = this._profileGroups[profile.title];
if (group) {
var groupElements = group.profileSidebarTreeElements;
groupElements.splice(groupElements.indexOf(profileTreeElement), 1);
if (groupElements.length === 1) {
// Move the last profile out of its group and remove the group.
var pos = sidebarParent.children.indexOf(group.sidebarTreeElement);
this.insertChild(groupElements[0], pos);
groupElements[0].small = false;
groupElements[0].mainTitle = group.sidebarTreeElement.title;
this.removeChild(group.sidebarTreeElement);
}
if (groupElements.length !== 0)
sidebarParent = group.sidebarTreeElement;
}
sidebarParent.removeChild(profileTreeElement);
profileTreeElement.dispose();
if (this.children.length)
return false;
this.hidden = true;
return true;
},
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {?WebInspector.ProfileSidebarTreeElement}
*/
sidebarElementForProfile: function(profile)
{
var index = this._sidebarElementIndex(profile);
return index === -1 ? null : this._profileTreeElements[index];
},
/**
* @param {!WebInspector.ProfileHeader} profile
* @return {number}
*/
_sidebarElementIndex: function(profile)
{
var elements = this._profileTreeElements;
for (var i = 0; i < elements.length; i++) {
if (elements[i].profile === profile)
return i;
}
return -1;
},
__proto__: WebInspector.SidebarSectionTreeElement.prototype
}
/**
* @constructor
* @implements {WebInspector.ContextMenu.Provider}
*/
WebInspector.ProfilesPanel.ContextMenuProvider = function()
{
}
WebInspector.ProfilesPanel.ContextMenuProvider.prototype = {
/**
* @param {!Event} event
* @param {!WebInspector.ContextMenu} contextMenu
* @param {!Object} target
*/
appendApplicableItems: function(event, contextMenu, target)
{
WebInspector.inspectorView.panel("profiles").appendApplicableItems(event, contextMenu, target);
}
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate
* @param {!WebInspector.ProfileHeader} profile
* @param {string} className
*/
WebInspector.ProfileSidebarTreeElement = function(dataDisplayDelegate, profile, className)
{
this._dataDisplayDelegate = dataDisplayDelegate;
this.profile = profile;
WebInspector.SidebarTreeElement.call(this, className, profile.title, "", profile, false);
this.refreshTitles();
profile.addEventListener(WebInspector.ProfileHeader.Events.UpdateStatus, this._updateStatus, this);
if (profile.canSaveToFile())
this._createSaveLink();
else
profile.addEventListener(WebInspector.ProfileHeader.Events.ProfileReceived, this._onProfileReceived, this);
}
WebInspector.ProfileSidebarTreeElement.prototype = {
_createSaveLink: function()
{
this._saveLinkElement = this.titleContainer.createChild("span", "save-link");
this._saveLinkElement.textContent = WebInspector.UIString("Save");
this._saveLinkElement.addEventListener("click", this._saveProfile.bind(this), false);
},
_onProfileReceived: function(event)
{
this._createSaveLink();
},
/**
* @param {!WebInspector.Event} event
*/
_updateStatus: function(event)
{
var statusUpdate = event.data;
if (statusUpdate.subtitle !== null)
this.subtitle = statusUpdate.subtitle;
if (typeof statusUpdate.wait === "boolean")
this.wait = statusUpdate.wait;
this.refreshTitles();
},
dispose: function()
{
this.profile.removeEventListener(WebInspector.ProfileHeader.Events.UpdateStatus, this._updateStatus, this);
this.profile.removeEventListener(WebInspector.ProfileHeader.Events.ProfileReceived, this._onProfileReceived, this);
},
onselect: function()
{
this._dataDisplayDelegate.showProfile(this.profile);
},
/**
* @return {boolean}
*/
ondelete: function()
{
this.profile.profileType().removeProfile(this.profile);
return true;
},
/**
* @param {!Event} event
* @param {!WebInspector.ProfilesPanel} panel
*/
handleContextMenuEvent: function(event, panel)
{
var profile = this.profile;
var contextMenu = new WebInspector.ContextMenu(event);
// FIXME: use context menu provider
contextMenu.appendItem(WebInspector.UIString("Load\u2026"), panel._fileSelectorElement.click.bind(panel._fileSelectorElement));
if (profile.canSaveToFile())
contextMenu.appendItem(WebInspector.UIString("Save\u2026"), profile.saveToFile.bind(profile));
contextMenu.appendItem(WebInspector.UIString("Delete"), this.ondelete.bind(this));
contextMenu.show();
},
_saveProfile: function(event)
{
this.profile.saveToFile();
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {!WebInspector.ProfileType.DataDisplayDelegate} dataDisplayDelegate
* @param {string} title
* @param {string=} subtitle
*/
WebInspector.ProfileGroupSidebarTreeElement = function(dataDisplayDelegate, title, subtitle)
{
WebInspector.SidebarTreeElement.call(this, "profile-group-sidebar-tree-item", title, subtitle, null, true);
this._dataDisplayDelegate = dataDisplayDelegate;
}
WebInspector.ProfileGroupSidebarTreeElement.prototype = {
onselect: function()
{
if (this.children.length > 0)
this._dataDisplayDelegate.showProfile(this.children[this.children.length - 1].profile);
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
/**
* @constructor
* @extends {WebInspector.SidebarTreeElement}
* @param {!WebInspector.ProfilesPanel} panel
*/
WebInspector.ProfilesSidebarTreeElement = function(panel)
{
this._panel = panel;
this.small = false;
WebInspector.SidebarTreeElement.call(this, "profile-launcher-view-tree-item", WebInspector.UIString("Profiles"), "", null, false);
}
WebInspector.ProfilesSidebarTreeElement.prototype = {
onselect: function()
{
this._panel._showLauncherView();
},
get selectable()
{
return true;
},
__proto__: WebInspector.SidebarTreeElement.prototype
}
importScript("../sdk/CPUProfileModel.js");
importScript("CPUProfileDataGrid.js");
importScript("CPUProfileBottomUpDataGrid.js");
importScript("CPUProfileTopDownDataGrid.js");
importScript("CPUProfileFlameChart.js");
importScript("CPUProfileView.js");
importScript("HeapSnapshotCommon.js");
importScript("HeapSnapshotProxy.js");
importScript("HeapSnapshotDataGrids.js");
importScript("HeapSnapshotGridNodes.js");
importScript("HeapSnapshotView.js");
importScript("ProfileLauncherView.js");
importScript("CanvasProfileView.js");
importScript("CanvasReplayStateView.js");
WebInspector.ProfileTypeRegistry.instance = new WebInspector.ProfileTypeRegistry();<|fim▁end|>
|
return view;
},
|
<|file_name|>test_grids.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
from __future__ import print_function, division
import numpy as np
from sht.grids import standard_grid, get_cartesian_grid
def test_grids():
L = 10
thetas, phis = standard_grid(L)
# Can't really test much here
assert thetas.size == L
assert phis.size == L**2
<|fim▁hole|> assert grid.shape == (L**2, 3)<|fim▁end|>
|
grid = get_cartesian_grid(thetas, phis)
|
<|file_name|>cleanup-shortcircuit.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// copyright 2014 the rust project developers. see the copyright
// file at the top-level directory of this distribution and at
// http://rust-lang.org/copyright.
//
// licensed under the apache license, version 2.0 <license-apache or
// http://www.apache.org/licenses/license-2.0> or the mit license
// <license-mit or http://opensource.org/licenses/mit>, at your
// option. this file may not be copied, modified, or distributed
// except according to those terms.
// Test that cleanups for the RHS of shortcircuiting operators work.
use std::env;
pub fn main() {
let args: Vec<String> = env::args().collect();
// Here, the rvalue `"signal".to_string()` requires cleanup. Older versions<|fim▁hole|> // expression was never evaluated, we wound up trying to clean
// uninitialized memory.
if args.len() >= 2 && args[1] == "signal" {
// Raise a segfault.
unsafe { *(0 as *mut int) = 0; }
}
}<|fim▁end|>
|
// of the code had a problem that the cleanup scope for this
// expression was the end of the `if`, and as the `"signal".to_string()`
|
<|file_name|>bitcoin_eu_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="eu_ES" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Crunchcoin</source>
<translation>Crunchcoin-i buruz</translation>
</message>
<message>
<location line="+39"/>
<source><b>Crunchcoin</b> version</source>
<translation><b>Crunchcoin</b> bertsioa</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The Crunchcoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Helbide-liburua</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Klik bikoitza helbidea edo etiketa editatzeko</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Sortu helbide berria</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiatu hautatutako helbidea sistemaren arbelera</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Crunchcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Erakutsi &QR kodea</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Crunchcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Crunchcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Ezabatu</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Crunchcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Esportatu Helbide-liburuaren datuak</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Errorea esportatzean</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ezin idatzi %1 artxiboan.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(etiketarik ez)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Sartu pasahitza</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Pasahitz berria</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Errepikatu pasahitz berria</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Sartu zorrorako pasahitz berria.<br/> Mesedez erabili <b>gutxienez ausazko 10 karaktere</b>, edo <b>gutxienez zortzi hitz</b> pasahitza osatzeko.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Enkriptatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Eragiketa honek zorroaren pasahitza behar du zorroa desblokeatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Desblokeatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Eragiketa honek zure zorroaren pasahitza behar du, zorroa desenkriptatzeko.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Desenkriptatu zorroa</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Aldatu pasahitza</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Sartu zorroaren pasahitz zaharra eta berria.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Berretsi zorroaren enkriptazioa</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Zorroa enkriptatuta</translation>
</message>
<message>
<location line="-56"/>
<source>Crunchcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your crunchcoins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Zorroaren enkriptazioak huts egin du</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Zorroaren enkriptazioak huts egin du barne-errore baten ondorioz. Zure zorroa ez da enkriptatu.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Eman dituzun pasahitzak ez datoz bat.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Zorroaren desblokeoak huts egin du</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Zorroa desenkriptatzeko sartutako pasahitza okerra da.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Zorroaren desenkriptazioak huts egin du</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CrunchcoinGUI</name>
<message>
<location filename="../crunchcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sarearekin sinkronizatzen...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Gainbegiratu</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Ikusi zorroaren begirada orokorra</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transakzioak</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Ikusi transakzioen historia</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Editatu gordetako helbide eta etiketen zerrenda</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Erakutsi ordainketak jasotzeko helbideen zerrenda</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>Irten</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Irten aplikaziotik</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Crunchcoin</source>
<translation>Erakutsi Crunchcoin-i buruzko informazioa</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>&Qt-ari buruz</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Erakutsi Crunchcoin-i buruzko informazioa</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Aukerak...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Crunchcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Crunchcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Aldatu zorroa enkriptatzeko erabilitako pasahitza</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Crunchcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>&About Crunchcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Crunchcoin addresses to prove you own them</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Crunchcoin addresses</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&Artxiboa</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Ezarpenak</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Laguntza</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Fitxen tresna-barra</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>Crunchcoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Crunchcoin network</source>
<translation><numerusform>Konexio aktibo %n Crunchcoin-en sarera</numerusform><numerusform>%n konexio aktibo Crunchcoin-en sarera</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Egunean</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Eguneratzen...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Bidalitako transakzioa</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Sarrerako transakzioa</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Crunchcoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>desblokeatuta</b> dago une honetan</translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Zorroa <b>enkriptatuta</b> eta <b>blokeatuta</b> dago une honetan</translation>
</message>
<message>
<location filename="../crunchcoin.cpp" line="+111"/>
<source>A fatal error occurred. Crunchcoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editatu helbidea</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiketa</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Helbide-liburuko sarrera honekin lotutako etiketa</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Helbidea</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Helbide-liburuko sarrera honekin lotutako helbidea. Bidaltzeko helbideeta soilik alda daiteke.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Jasotzeko helbide berria</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Bidaltzeko helbide berria</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editatu jasotzeko helbidea</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editatu bidaltzeko helbidea</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Sartu berri den helbidea, "%1", helbide-liburuan dago jadanik.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Crunchcoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Ezin desblokeatu zorroa.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Gako berriaren sorrerak huts egin du.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Crunchcoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start Crunchcoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start Crunchcoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Crunchcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the Crunchcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Crunchcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show Crunchcoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Crunchcoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Inprimakia</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Crunchcoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldoa:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Konfirmatu gabe:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Azken transakzioak</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Zure uneko saldoa</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Oraindik konfirmatu gabe daudenez, uneko saldoab kontatu gabe dagoen transakzio kopurua</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start crunchcoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>&Etiketa:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Mezua</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>Gorde honela...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the Crunchcoin-Qt help message to get a list with possible Crunchcoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>Crunchcoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Crunchcoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the Crunchcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Crunchcoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Bidali txanponak</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Bidali hainbat jasotzaileri batera</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldoa:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 CRC</source>
<translation>123.456 CRC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Berretsi bidaltzeko ekintza</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> honi: %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Berretsi txanponak bidaltzea</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Ziur zaude %1 bidali nahi duzula?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation>eta</translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Ordaintzeko kopurua 0 baino handiagoa izan behar du.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Inprimakia</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>K&opurua:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Ordaindu &honi:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Sartu etiketa bat helbide honetarako, eta gehitu zure helbide-liburuan</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etiketa:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Ezabatu jasotzaile hau</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Crunchcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Sartu Bitocin helbide bat (adb.: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L) </translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Itsatsi helbidea arbeletik</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Crunchcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Crunchcoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Crunchcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Sartu Bitocin helbide bat (adb.: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L) </translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter Crunchcoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Crunchcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/konfirmatu gabe</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmazioak</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, ez da arrakastaz emititu oraindik</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>ezezaguna</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transakzioaren xehetasunak</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Panel honek transakzioaren deskribapen xehea erakusten du</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Zabalik %1 arte</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 konfirmazio)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Konfirmatuta (%1 konfirmazio)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Bloke hau ez du beste inongo nodorik jaso, eta seguruenik ez da onartuko!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Sortua, baina ez onartua</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Jasoa honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Honi bidalia: </translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Ordainketa zeure buruari</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transakzioaren egoera. Pasatu sagua gainetik konfirmazio kopurua ikusteko.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Transakzioa jasotako data eta ordua.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Transakzio mota.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Transakzioaren xede-helbidea.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Saldoan kendu edo gehitutako kopurua.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Denak</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Gaur</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Aste honetan</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Hil honetan</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Azken hilean</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Aurten</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Muga...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Jasota honekin: </translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Hona bidalia: </translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Zeure buruari</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Bildua</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Beste</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Sartu bilatzeko helbide edo etiketa</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Kopuru minimoa</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopiatu helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopiatu etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Transakzioaren xehetasunak</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Komaz bereizitako artxiboa (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Mota</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiketa</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Helbidea</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Kopurua</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Errorea esportatzean</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Ezin idatzi %1 artxiboan.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>crunchcoin-core</name>
<message>
<location filename="../crunchcoinstrings.cpp" line="+94"/>
<source>Crunchcoin version</source>
<translation>Botcoin bertsioa</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or crunchcoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Komandoen lista</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Laguntza komando batean</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Aukerak</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: crunchcoin.conf)</source>
<translation>Ezarpen fitxategia aukeratu (berezkoa: crunchcoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: crunchcoind.pid)</source>
<translation>pid fitxategia aukeratu (berezkoa: crunchcoind.pid)</translation><|fim▁hole|> </message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9336 or testnet: 19336)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9335 or testnet: 19335)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=crunchcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Crunchcoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Crunchcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Crunchcoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Crunchcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Laguntza mezu hau</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Crunchcoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Crunchcoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Crunchcoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Birbilatzen...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Zamaketa amaitua</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>settings.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { SettingsComponent } from './settings/settings.component';<|fim▁hole|>import { SharedModule} from '../../shared/shared.module';
import { ModalModule } from 'angular2-modal';
import { BootstrapModalModule } from 'angular2-modal/plugins/bootstrap';
import { ContextMenuModule } from 'ngx-contextmenu';
import { MyDatePickerModule } from 'mydatepicker';
import { SettingsGlobalFormComponent } from './settings-global-form/settings-global-form.component';
import { SettingsGlobalEditFormComponent } from './settings-global-form/settings-global-edit-form.component';
import {SettingsDispatchingFormComponent} from "./settings-dispatching-form/settings-dispatching-form.component";
import {SettingsDispatchingEditFormComponent} from "./settings-dispatching-form/settings-dispatching-edit-form.component";
import {SettingsRfcFormComponent} from "./settings-rfc-form/settings-rfc-form.component";
import {SettingsRfcEditFormComponent} from "./settings-rfc-form/settings-rfc-edit-form.component";
const routes: Routes = [
{ path: '', component: SettingsComponent }
];
@NgModule({
imports: [
SharedModule,
RouterModule.forChild(routes),
ContextMenuModule,
ModalModule.forRoot(),
BootstrapModalModule,
MyDatePickerModule
],
exports: [RouterModule],
declarations: [
SettingsComponent,
SettingsGlobalFormComponent,
SettingsGlobalEditFormComponent,
SettingsDispatchingFormComponent,
SettingsDispatchingEditFormComponent,
SettingsRfcFormComponent,
SettingsRfcEditFormComponent],
entryComponents: [
SettingsGlobalFormComponent,
SettingsGlobalEditFormComponent,
SettingsDispatchingFormComponent,
SettingsDispatchingEditFormComponent,
SettingsRfcFormComponent,
SettingsRfcEditFormComponent],
providers: [],
})
export class SettingsModule {}<|fim▁end|>
| |
<|file_name|>cursor.rs<|end_file_name|><|fim▁begin|>//! Cursor movement.
use std::fmt;
use std::ops;
use std::io::{self, Write, Error, ErrorKind, Read};
use async::async_stdin_until;
use std::time::{SystemTime, Duration};
use raw::CONTROL_SEQUENCE_TIMEOUT;
use numtoa::NumToA;
derive_csi_sequence!("Hide the cursor.", Hide, "?25l");
derive_csi_sequence!("Show the cursor.", Show, "?25h");
derive_csi_sequence!("Restore the cursor.", Restore, "u");
derive_csi_sequence!("Save the cursor.", Save, "s");
derive_csi_sequence!("Change the cursor style to blinking block", BlinkingBlock, "\x31 q");
derive_csi_sequence!("Change the cursor style to steady block", SteadyBlock, "\x32 q");
derive_csi_sequence!("Change the cursor style to blinking underline", BlinkingUnderline, "\x33 q");
derive_csi_sequence!("Change the cursor style to steady underline", SteadyUnderline, "\x34 q");
derive_csi_sequence!("Change the cursor style to blinking bar", BlinkingBar, "\x35 q");
derive_csi_sequence!("Change the cursor style to steady bar", SteadyBar, "\x36 q");
/// Goto some position ((1,1)-based).
///
/// # Why one-based?
///
/// ANSI escapes are very poorly designed, and one of the many odd aspects is being one-based. This
/// can be quite strange at first, but it is not that big of an obstruction once you get used to
/// it.
///
/// # Example
///
/// ```rust
/// extern crate termion;
///
/// fn main() {
/// print!("{}{}Stuff", termion::clear::All, termion::cursor::Goto(5, 3));
/// }
/// ```
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Goto(pub u16, pub u16);
impl From<Goto> for String {
fn from(this: Goto) -> String {
let (mut x, mut y) = ([0u8; 20], [0u8; 20]);
["\x1B[", this.1.numtoa_str(10, &mut x), ";", this.0.numtoa_str(10, &mut y), "H"].concat()
}
}
impl Default for Goto {
fn default() -> Goto {
Goto(1, 1)
}
}
impl fmt::Display for Goto {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
debug_assert!(self != &Goto(0, 0), "Goto is one-based.");
write!(f, "\x1B[{};{}H", self.1, self.0)
}
}
/// Move cursor left.
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Left(pub u16);
impl From<Left> for String {
fn from(this: Left) -> String {
let mut buf = [0u8; 20];
["\x1B[", this.0.numtoa_str(10, &mut buf), "D"].concat()
}
}<|fim▁hole|>
impl fmt::Display for Left {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\x1B[{}D", self.0)
}
}
/// Move cursor right.
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Right(pub u16);
impl From<Right> for String {
fn from(this: Right) -> String {
let mut buf = [0u8; 20];
["\x1B[", this.0.numtoa_str(10, &mut buf), "C"].concat()
}
}
impl fmt::Display for Right {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\x1B[{}C", self.0)
}
}
/// Move cursor up.
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Up(pub u16);
impl From<Up> for String {
fn from(this: Up) -> String {
let mut buf = [0u8; 20];
["\x1B[", this.0.numtoa_str(10, &mut buf), "A"].concat()
}
}
impl fmt::Display for Up {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\x1B[{}A", self.0)
}
}
/// Move cursor down.
#[derive(Copy, Clone, PartialEq, Eq)]
pub struct Down(pub u16);
impl From<Down> for String {
fn from(this: Down) -> String {
let mut buf = [0u8; 20];
["\x1B[", this.0.numtoa_str(10, &mut buf), "B"].concat()
}
}
impl fmt::Display for Down {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "\x1B[{}B", self.0)
}
}
/// Types that allow detection of the cursor position.
pub trait DetectCursorPos {
/// Get the (1,1)-based cursor position from the terminal.
fn cursor_pos(&mut self) -> io::Result<(u16, u16)>;
}
impl<W: Write> DetectCursorPos for W {
fn cursor_pos(&mut self) -> io::Result<(u16, u16)> {
let delimiter = b'R';
let mut stdin = async_stdin_until(delimiter);
// Where is the cursor?
// Use `ESC [ 6 n`.
write!(self, "\x1B[6n")?;
self.flush()?;
let mut buf: [u8; 1] = [0];
let mut read_chars = Vec::new();
let timeout = Duration::from_millis(CONTROL_SEQUENCE_TIMEOUT);
let now = SystemTime::now();
// Either consume all data up to R or wait for a timeout.
while buf[0] != delimiter && now.elapsed().unwrap() < timeout {
if stdin.read(&mut buf)? > 0 {
read_chars.push(buf[0]);
}
}
if read_chars.is_empty() {
return Err(Error::new(ErrorKind::Other, "Cursor position detection timed out."));
}
// The answer will look like `ESC [ Cy ; Cx R`.
read_chars.pop(); // remove trailing R.
let read_str = String::from_utf8(read_chars).unwrap();
let beg = read_str.rfind('[').unwrap();
let coords: String = read_str.chars().skip(beg + 1).collect();
let mut nums = coords.split(';');
let cy = nums.next()
.unwrap()
.parse::<u16>()
.unwrap();
let cx = nums.next()
.unwrap()
.parse::<u16>()
.unwrap();
Ok((cx, cy))
}
}
/// Hide the cursor for the lifetime of this struct.
/// It will hide the cursor on creation with from() and show it back on drop().
pub struct HideCursor<W: Write> {
/// The output target.
output: W,
}
impl<W: Write> HideCursor<W> {
/// Create a hide cursor wrapper struct for the provided output and hides the cursor.
pub fn from(mut output: W) -> Self {
write!(output, "{}", Hide).expect("hide the cursor");
HideCursor { output: output }
}
}
impl<W: Write> Drop for HideCursor<W> {
fn drop(&mut self) {
write!(self, "{}", Show).expect("show the cursor");
}
}
impl<W: Write> ops::Deref for HideCursor<W> {
type Target = W;
fn deref(&self) -> &W {
&self.output
}
}
impl<W: Write> ops::DerefMut for HideCursor<W> {
fn deref_mut(&mut self) -> &mut W {
&mut self.output
}
}
impl<W: Write> Write for HideCursor<W> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.output.write(buf)
}
fn flush(&mut self) -> io::Result<()> {
self.output.flush()
}
}<|fim▁end|>
| |
<|file_name|>fileserve.go<|end_file_name|><|fim▁begin|>package main
import (
"net/http"
"os"
"path"
"strings"
"github.com/zenazn/goji/web"
)
func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
fileName := c.URLParams["name"]
filePath := path.Join(Config.filesDir, fileName)
if !fileExistsAndNotExpired(fileName) {
notFoundHandler(c, w, r)
return
}
if !Config.allowHotlink {
referer := r.Header.Get("Referer")
if referer != "" && !strings.HasPrefix(referer, Config.siteURL) {
w.WriteHeader(403)
return
}
}
w.Header().Set("Content-Security-Policy", Config.fileContentSecurityPolicy)
http.ServeFile(w, r, filePath)
}
func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
path := r.URL.Path
if path[len(path)-1:] == "/" {
notFoundHandler(c, w, r)
return
} else {
if path == "/favicon.ico" {
path = "/static/images/favicon.gif"
}
filePath := strings.TrimPrefix(path, "/static/")
file, err := staticBox.Open(filePath)
if err != nil {
notFoundHandler(c, w, r)
return
}
w.Header().Set("Etag", timeStartedStr)
w.Header().Set("Cache-Control", "max-age=86400")
http.ServeContent(w, r, filePath, timeStarted, file)
return
}<|fim▁hole|> filePath := path.Join(Config.filesDir, filename)
_, err := os.Stat(filePath)
if err != nil {
return false
}
if isFileExpired(filename) {
os.Remove(path.Join(Config.filesDir, filename))
os.Remove(path.Join(Config.metaDir, filename))
return false
}
return true
}<|fim▁end|>
|
}
func fileExistsAndNotExpired(filename string) bool {
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>$('.counter').countTo({
formatter: function (value, options) {
return value.toFixed(2).replace(/(\d)(?=(\d{3})+\.)/g, '$1,').slice(0,-3);
}
});
function updateCount(to)
{
$('.counter').countTo({
from: parseInt($('.counter').html().replace(/,/g, '')),<|fim▁hole|> to: to,
formatter: function (value, options) {
return value.toFixed(2).replace(/(\d)(?=(\d{3})+\.)/g, '$1,').slice(0,-3);
}
});
}
function getUpdatedCount()
{
$.getJSON("https://petition.parliament.uk/petitions/131215.json", function (data){
updateCount(data.data.attributes.signature_count);
console.log("+" + (data.data.attributes.signature_count - parseInt($('.counter').html().replace(/,/g, ''))) );
});
}
function getCountryData()
{
$.getJSON("https://petition.parliament.uk/petitions/131215.json", function (data){
data.data.attributes.signatures_by_country.sort(function(a,b){
if (a.signature_count > b.signature_count){
return -1;
} else if (a.signature_count < b.signature_count){
return 1;
}
return 0;
});
$("#counties").html("");
data.data.attributes.signatures_by_country.slice(0, 10).forEach(function(item, index){
html = '<img class="flag flag-' + item.code.toLowerCase() + '"> <b>' + item.name + ':</b> ' + item.signature_count.toFixed(2).replace(/(\d)(?=(\d{3})+\.)/g, '$1,').slice(0,-3) + '</p>';
$("#counties").append(html);
});
});
}<|fim▁end|>
| |
<|file_name|>ignore.hpp<|end_file_name|><|fim▁begin|>/*=============================================================================
Copyright (c) 2001 Doug Gregor
Copyright (c) 1999-2003 Jaakko Jarvi
Copyright (c) 2001-2011 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_IGNORE_07192005_0329)
#define FUSION_IGNORE_07192005_0329
#include <boost/fusion/support/config.hpp>
namespace boost { namespace fusion
{<|fim▁hole|> namespace detail
{
struct swallow_assign
{
template<typename T>
BOOST_FUSION_CONSTEXPR_THIS BOOST_FUSION_GPU_ENABLED
swallow_assign const&
operator=(const T&) const
{
return *this;
}
};
}
// "ignore" allows tuple positions to be ignored when using "tie".
BOOST_CONSTEXPR_OR_CONST detail::swallow_assign ignore = detail::swallow_assign();
}}
#endif<|fim▁end|>
|
// Swallows any assignment (by Doug Gregor)
|
<|file_name|>FWTCBFbC.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub fn count_characters(words: Vec<String>, chars: String) -> i32 {
use std::collections::{HashMap, HashSet};
let char_set = {
let mut char_table = HashMap::new();
for c in chars.chars() {
*char_table.entry(c).or_insert(0) += 1
}
char_table
};
let words_sets = words
.iter()
.map(|w| {
let mut char_table = HashMap::new();
for c in w.chars() {
*char_table.entry(c).or_insert(0) += 1
}
char_table
})
.collect::<Vec<_>>();
dbg!(&char_set);
dbg!(&words_sets);
let mut result = 0;
for i in 0..words_sets.len() {
if words[i].len() > chars.len() {
continue;
}
let mut flag = true;
for (k, v) in &words_sets[i] {
if let Some(n) = char_set.get(k) {
if n < v {
flag = false;
break;
}
} else {
flag = false;
break;
}
}
if flag {
result += words[i].len()
}
}
result as i32
}
fn main() {
dbg!(count_characters(
vec![
"cat".to_string(),
"bt".to_string(),
"hat".to_string(),
"tree".to_string()
],
"atach".to_string()
));
dbg!(count_characters(
vec![
"hello".to_string(),
"world".to_string(),
"leetcode".to_string(),
],
"welldonehoneyr".to_string()
));
}<|fim▁end|>
| |
<|file_name|>ibg.py<|end_file_name|><|fim▁begin|># Copyright 2017 QuantRocket - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import getpass
from quantrocket.houston import houston
from quantrocket.cli.utils.output import json_to_cli
def get_credentials(gateway):
"""
Returns username and trading mode (paper/live) for IB Gateway.
Parameters
----------
gateway : str, required
name of IB Gateway service to get credentials for (for example, 'ibg1')
Returns
-------
dict
credentials
"""
statuses = list_gateway_statuses(gateways=[gateway])<|fim▁hole|> if not statuses:
raise ValueError("no such IB Gateway: {0}".format(gateway))
response = houston.get("/{0}/credentials".format(gateway))
houston.raise_for_status_with_json(response)
# It's possible to get a 204 empty response
if not response.content:
return {}
return response.json()
def set_credentials(gateway, username=None, password=None, trading_mode=None):
"""
Set username/password and trading mode (paper/live) for IB Gateway.
Can be used to set new credentials or switch between paper and live trading
(must have previously entered live credentials). Setting new credentials will
restart IB Gateway and takes a moment to complete.
Credentials are encrypted at rest and never leave your deployment.
Parameters
----------
gateway : str, required
name of IB Gateway service to set credentials for (for example, 'ibg1')
username : str, optional
IBKR username (optional if only modifying trading environment)
password : str, optional
IBKR password (if omitted and username is provided, will be prompted
for password)
trading_mode : str, optional
the trading mode to use ('paper' or 'live')
Returns
-------
dict
status message
"""
statuses = list_gateway_statuses(gateways=[gateway])
if not statuses:
raise ValueError("no such IB Gateway: {0}".format(gateway))
if username and not password:
password = getpass.getpass(prompt="Enter IBKR Password: ")
data = {}
if username:
data["username"] = username
if password:
data["password"] = password
if trading_mode:
data["trading_mode"] = trading_mode
response = houston.put("/{0}/credentials".format(gateway), data=data, timeout=180)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_get_or_set_credentials(*args, **kwargs):
if kwargs.get("username", None) or kwargs.get("password", None) or kwargs.get("trading_mode", None):
return json_to_cli(set_credentials, *args, **kwargs)
else:
return json_to_cli(get_credentials, gateway=kwargs.get("gateway", None))
def list_gateway_statuses(status=None, gateways=None):
"""
Query statuses of IB Gateways.
Parameters
----------
status : str, optional
limit to IB Gateways in this status. Possible choices: running, stopped, error
gateways : list of str, optional
limit to these IB Gateways
Returns
-------
dict of gateway:status (if status arg not provided), or list of gateways (if status arg provided)
"""
params = {}
if gateways:
params["gateways"] = gateways
if status:
params["status"] = status
response = houston.get("/ibgrouter/gateways", params=params)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_list_gateway_statuses(*args, **kwargs):
return json_to_cli(list_gateway_statuses, *args, **kwargs)
def start_gateways(gateways=None, wait=False):
"""
Start one or more IB Gateways.
Parameters
----------
gateways : list of str, optional
limit to these IB Gateways
wait: bool
wait for the IB Gateway to start before returning (default is to start
the gateways asynchronously)
Returns
-------
dict
status message
"""
params = {"wait": wait}
if gateways:
params["gateways"] = gateways
response = houston.post("/ibgrouter/gateways", params=params, timeout=120)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_start_gateways(*args, **kwargs):
return json_to_cli(start_gateways, *args, **kwargs)
def stop_gateways(gateways=None, wait=False):
"""
Stop one or more IB Gateways.
Parameters
----------
gateways : list of str, optional
limit to these IB Gateways
wait: bool
wait for the IB Gateway to stop before returning (default is to stop
the gateways asynchronously)
Returns
-------
dict
status message
"""
params = {"wait": wait}
if gateways:
params["gateways"] = gateways
response = houston.delete("/ibgrouter/gateways", params=params, timeout=60)
houston.raise_for_status_with_json(response)
return response.json()
def _cli_stop_gateways(*args, **kwargs):
return json_to_cli(stop_gateways, *args, **kwargs)
def load_ibg_config(filename):
"""
Upload a new IB Gateway permissions config.
Permission configs are only necessary when running multiple IB Gateways with
differing market data permissions.
Parameters
----------
filename : str, required
the config file to upload
Returns
-------
dict
status message
"""
with open(filename) as file:
response = houston.put("/ibgrouter/config", data=file.read())
houston.raise_for_status_with_json(response)
return response.json()
def get_ibg_config():
"""
Returns the current IB Gateway permissions config.
Returns
-------
dict
the config as a dict
"""
response = houston.get("/ibgrouter/config")
houston.raise_for_status_with_json(response)
# It's possible to get a 204 empty response
if not response.content:
return {}
return response.json()
def _cli_load_or_show_config(filename=None):
if filename:
return json_to_cli(load_ibg_config, filename)
else:
return json_to_cli(get_ibg_config)<|fim▁end|>
| |
<|file_name|>guestagent_utils.py<|end_file_name|><|fim▁begin|># Copyright 2015 Tesora Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from collections import abc
import os
import re
from trove.common import cfg
from trove.common import pagination
from trove.common import utils
from trove.guestagent.common import operating_system
CONF = cfg.CONF
def update_dict(updates, target):
"""Recursively update a target dictionary with given updates.
Updates are provided as a dictionary of key-value pairs
where a value can also be a nested dictionary in which case
its key is treated as a sub-section of the outer key.
If a list value is encountered the update is applied
iteratively on all its items.
:returns: Will always return a dictionary of results (may be empty).
"""
if target is None:
target = {}
if isinstance(target, list):
for index, item in enumerate(target):
target[index] = update_dict(updates, item)
return target
if updates is not None:
for k, v in updates.items():
if isinstance(v, abc.Mapping):
target[k] = update_dict(v, target.get(k, {}))
else:
target[k] = updates[k]
return target
def expand_dict(target, namespace_sep='.'):
"""Expand a flat dict to a nested one.
This is an inverse of 'flatten_dict'.
:seealso: flatten_dict
"""
nested = {}
for k, v in target.items():
sub = nested
keys = k.split(namespace_sep)
for key in keys[:-1]:
sub = sub.setdefault(key, {})
sub[keys[-1]] = v
return nested
def flatten_dict(target, namespace_sep='.'):
"""Flatten a nested dict.
Return a one-level dict with all sub-level keys joined by a namespace
separator.
The following nested dict:
{'ns1': {'ns2a': {'ns3a': True, 'ns3b': False}, 'ns2b': 10}}
would be flattened to:
{'ns1.ns2a.ns3a': True, 'ns1.ns2a.ns3b': False, 'ns1.ns2b': 10}
"""
def flatten(target, keys, namespace_sep):
flattened = {}
if isinstance(target, abc.Mapping):
for k, v in target.items():
flattened.update(
flatten(v, keys + [k], namespace_sep))
else:
ns = namespace_sep.join(keys)
flattened[ns] = target
return flattened
return flatten(target, [], namespace_sep)
def build_file_path(base_dir, base_name, *extensions):
"""Build a path to a file in a given directory.
The file may have an extension(s).
:returns: Path such as: 'base_dir/base_name.ext1.ext2.ext3'
"""
file_name = os.extsep.join([base_name] + list(extensions))
return os.path.expanduser(os.path.join(base_dir, file_name))
def to_bytes(value):
"""Convert numbers with a byte suffix to bytes.
"""
if isinstance(value, str):<|fim▁hole|> match = pattern.match(value)
if match:
value = match.group(1)
suffix = match.group(2)
factor = {
'K': 1024,
'M': 1024 ** 2,
'G': 1024 ** 3,
}[suffix]
return int(round(factor * float(value)))
return value
def paginate_list(li, limit=None, marker=None, include_marker=False):
"""Paginate a list of objects based on the name attribute.
:returns: Page sublist and a marker (name of the last item).
"""
return pagination.paginate_object_list(
li, 'name', limit=limit, marker=marker, include_marker=include_marker)
def serialize_list(li, limit=None, marker=None, include_marker=False):
"""
Paginate (by name) and serialize a given object list.
:returns: A serialized and paginated version of a given list.
"""
page, next_name = paginate_list(li, limit=limit, marker=marker,
include_marker=include_marker)
return [item.serialize() for item in page], next_name
def get_filesystem_volume_stats(fs_path):
try:
stats = os.statvfs(fs_path)
except OSError:
raise RuntimeError("Filesystem not found (%s)" % fs_path)
total = stats.f_blocks * stats.f_bsize
free = stats.f_bfree * stats.f_bsize
# return the size in GB
used_gb = utils.to_gb(total - free)
total_gb = utils.to_gb(total)
output = {
'block_size': stats.f_bsize,
'total_blocks': stats.f_blocks,
'free_blocks': stats.f_bfree,
'total': total_gb,
'free': free,
'used': used_gb
}
return output
def get_conf_dir():
"""Get the config directory for the database related settings.
For now, the files inside the config dir are mainly for instance rebuild.
"""
mount_point = CONF.get(CONF.datastore_manager).mount_point
conf_dir = os.path.join(mount_point, 'conf.d')
if not operating_system.exists(conf_dir, is_directory=True, as_root=True):
operating_system.ensure_directory(conf_dir, as_root=True)
return conf_dir<|fim▁end|>
|
pattern = re.compile(r'^(\d+)([K,M,G]{1})$')
|
<|file_name|>nir_opt_algebraic.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
#
# Copyright (C) 2014 Intel Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# Authors:
# Jason Ekstrand ([email protected])
import nir_algebraic
# Convenience variables
a = 'a'
b = 'b'
c = 'c'
d = 'd'
# Written in the form (<search>, <replace>) where <search> is an expression
# and <replace> is either an expression or a value. An expression is
# defined as a tuple of the form (<op>, <src0>, <src1>, <src2>, <src3>)
# where each source is either an expression or a value. A value can be
# either a numeric constant or a string representing a variable name.
#
# Variable names are specified as "[#]name[@type]" where "#" inicates that
# the given variable will only match constants and the type indicates that
# the given variable will only match values from ALU instructions with the
# given output type.
#
# For constants, you have to be careful to make sure that it is the right
# type because python is unaware of the source and destination types of the
# opcodes.
optimizations = [
(('fneg', ('fneg', a)), a),
(('ineg', ('ineg', a)), a),
(('fabs', ('fabs', a)), ('fabs', a)),
(('fabs', ('fneg', a)), ('fabs', a)),
(('iabs', ('iabs', a)), ('iabs', a)),
(('iabs', ('ineg', a)), ('iabs', a)),
(('fadd', a, 0.0), a),
(('iadd', a, 0), a),
(('fadd', ('fmul', a, b), ('fmul', a, c)), ('fmul', a, ('fadd', b, c))),
(('iadd', ('imul', a, b), ('imul', a, c)), ('imul', a, ('iadd', b, c))),
(('fadd', ('fneg', a), a), 0.0),
(('iadd', ('ineg', a), a), 0),
(('fmul', a, 0.0), 0.0),
(('imul', a, 0), 0),
(('fmul', a, 1.0), a),
(('imul', a, 1), a),
(('fmul', a, -1.0), ('fneg', a)),
(('imul', a, -1), ('ineg', a)),
(('ffma', 0.0, a, b), b),
(('ffma', a, 0.0, b), b),
(('ffma', a, b, 0.0), ('fmul', a, b)),
(('ffma', a, 1.0, b), ('fadd', a, b)),
(('ffma', 1.0, a, b), ('fadd', a, b)),
(('flrp', a, b, 0.0), a),
(('flrp', a, b, 1.0), b),
(('flrp', a, a, b), a),
(('flrp', 0.0, a, b), ('fmul', a, b)),
(('flrp', a, b, c), ('fadd', ('fmul', c, ('fsub', b, a)), a), 'options->lower_flrp'),
(('fadd', ('fmul', a, ('fadd', 1.0, ('fneg', c))), ('fmul', b, c)), ('flrp', a, b, c), '!options->lower_flrp'),
(('fadd', a, ('fmul', c, ('fadd', b, ('fneg', a)))), ('flrp', a, b, c), '!options->lower_flrp'),
(('ffma', a, b, c), ('fadd', ('fmul', a, b), c), 'options->lower_ffma'),
(('fadd', ('fmul', a, b), c), ('ffma', a, b, c), '!options->lower_ffma'),
# Comparison simplifications
(('inot', ('flt', a, b)), ('fge', a, b)),
(('inot', ('fge', a, b)), ('flt', a, b)),
(('inot', ('ilt', a, b)), ('ige', a, b)),
(('inot', ('ige', a, b)), ('ilt', a, b)),
(('fge', ('fneg', ('fabs', a)), 0.0), ('feq', a, 0.0)),
(('bcsel', ('flt', a, b), a, b), ('fmin', a, b)),
(('bcsel', ('flt', a, b), b, a), ('fmax', a, b)),
(('bcsel', ('inot', 'a@bool'), b, c), ('bcsel', a, c, b)),
(('bcsel', a, ('bcsel', a, b, c), d), ('bcsel', a, b, d)),
(('fmin', ('fmax', a, 0.0), 1.0), ('fsat', a), '!options->lower_fsat'),
(('fsat', a), ('fmin', ('fmax', a, 0.0), 1.0), 'options->lower_fsat'),
(('fsat', ('fsat', a)), ('fsat', a)),
(('fmin', ('fmax', ('fmin', ('fmax', a, 0.0), 1.0), 0.0), 1.0), ('fmin', ('fmax', a, 0.0), 1.0)),
(('ior', ('flt', a, b), ('flt', a, c)), ('flt', a, ('fmax', b, c))),
(('ior', ('fge', a, b), ('fge', a, c)), ('fge', a, ('fmin', b, c))),
(('slt', a, b), ('b2f', ('flt', a, b)), 'options->lower_scmp'),
(('sge', a, b), ('b2f', ('fge', a, b)), 'options->lower_scmp'),
(('seq', a, b), ('b2f', ('feq', a, b)), 'options->lower_scmp'),
(('sne', a, b), ('b2f', ('fne', a, b)), 'options->lower_scmp'),
# Emulating booleans
(('fmul', ('b2f', a), ('b2f', b)), ('b2f', ('iand', a, b))),
(('fsat', ('fadd', ('b2f', a), ('b2f', b))), ('b2f', ('ior', a, b))),
(('iand', 'a@bool', 1.0), ('b2f', a)),
(('flt', ('fneg', ('b2f', a)), 0), a), # Generated by TGSI KILL_IF.
(('flt', ('fsub', 0.0, ('b2f', a)), 0), a), # Generated by TGSI KILL_IF.
# Comparison with the same args. Note that these are not done for
# the float versions because NaN always returns false on float
# inequalities.
(('ilt', a, a), False),
(('ige', a, a), True),
(('ieq', a, a), True),
(('ine', a, a), False),
(('ult', a, a), False),
(('uge', a, a), True),
# Logical and bit operations
(('fand', a, 0.0), 0.0),
(('iand', a, a), a),
(('iand', a, 0), 0),
(('ior', a, a), a),
(('ior', a, 0), a),
(('fxor', a, a), 0.0),
(('ixor', a, a), 0),
(('inot', ('inot', a)), a),
# DeMorgan's Laws
(('iand', ('inot', a), ('inot', b)), ('inot', ('ior', a, b))),
(('ior', ('inot', a), ('inot', b)), ('inot', ('iand', a, b))),
# Shift optimizations
(('ishl', 0, a), 0),
(('ishl', a, 0), a),
(('ishr', 0, a), 0),
(('ishr', a, 0), a),
(('ushr', 0, a), 0),
(('ushr', a, 0), a),
# Exponential/logarithmic identities
(('fexp2', ('flog2', a)), a), # 2^lg2(a) = a
(('fexp', ('flog', a)), a), # e^ln(a) = a
(('flog2', ('fexp2', a)), a), # lg2(2^a) = a
(('flog', ('fexp', a)), a), # ln(e^a) = a
(('fpow', a, b), ('fexp2', ('fmul', ('flog2', a), b)), 'options->lower_fpow'), # a^b = 2^(lg2(a)*b)
(('fexp2', ('fmul', ('flog2', a), b)), ('fpow', a, b), '!options->lower_fpow'), # 2^(lg2(a)*b) = a^b
(('fexp', ('fmul', ('flog', a), b)), ('fpow', a, b), '!options->lower_fpow'), # e^(ln(a)*b) = a^b
(('fpow', a, 1.0), a),
(('fpow', a, 2.0), ('fmul', a, a)),
(('fpow', a, 4.0), ('fmul', ('fmul', a, a), ('fmul', a, a))),
(('fpow', 2.0, a), ('fexp2', a)),
(('fsqrt', ('fexp2', a)), ('fexp2', ('fmul', 0.5, a))),
(('fsqrt', ('fexp', a)), ('fexp', ('fmul', 0.5, a))),
(('frcp', ('fexp2', a)), ('fexp2', ('fneg', a))),
(('frcp', ('fexp', a)), ('fexp', ('fneg', a))),
(('frsq', ('fexp2', a)), ('fexp2', ('fmul', -0.5, a))),
(('frsq', ('fexp', a)), ('fexp', ('fmul', -0.5, a))),
(('flog2', ('fsqrt', a)), ('fmul', 0.5, ('flog2', a))),
(('flog', ('fsqrt', a)), ('fmul', 0.5, ('flog', a))),
(('flog2', ('frcp', a)), ('fneg', ('flog2', a))),
(('flog', ('frcp', a)), ('fneg', ('flog', a))),
(('flog2', ('frsq', a)), ('fmul', -0.5, ('flog2', a))),
(('flog', ('frsq', a)), ('fmul', -0.5, ('flog', a))),
(('flog2', ('fpow', a, b)), ('fmul', b, ('flog2', a))),
(('flog', ('fpow', a, b)), ('fmul', b, ('flog', a))),
(('fadd', ('flog2', a), ('flog2', b)), ('flog2', ('fmul', a, b))),
(('fadd', ('flog', a), ('flog', b)), ('flog', ('fmul', a, b))),
(('fadd', ('flog2', a), ('fneg', ('flog2', b))), ('flog2', ('fdiv', a, b))),
(('fadd', ('flog', a), ('fneg', ('flog', b))), ('flog', ('fdiv', a, b))),
(('fmul', ('fexp2', a), ('fexp2', b)), ('fexp2', ('fadd', a, b))),
(('fmul', ('fexp', a), ('fexp', b)), ('fexp', ('fadd', a, b))),
# Division and reciprocal
(('fdiv', 1.0, a), ('frcp', a)),
(('frcp', ('frcp', a)), a),
(('frcp', ('fsqrt', a)), ('frsq', a)),
(('fsqrt', a), ('frcp', ('frsq', a)), 'options->lower_fsqrt'),
(('frcp', ('frsq', a)), ('fsqrt', a), '!options->lower_fsqrt'),
# Boolean simplifications
(('ine', 'a@bool', 0), 'a'),<|fim▁hole|> (('bcsel', a, True, False), ('ine', a, 0)),
(('bcsel', a, False, True), ('ieq', a, 0)),
(('bcsel', True, b, c), b),
(('bcsel', False, b, c), c),
# The result of this should be hit by constant propagation and, in the
# next round of opt_algebraic, get picked up by one of the above two.
(('bcsel', '#a', b, c), ('bcsel', ('ine', 'a', 0), b, c)),
(('bcsel', a, b, b), b),
(('fcsel', a, b, b), b),
# Conversions
(('f2i', ('ftrunc', a)), ('f2i', a)),
(('f2u', ('ftrunc', a)), ('f2u', a)),
# Subtracts
(('fsub', a, ('fsub', 0.0, b)), ('fadd', a, b)),
(('isub', a, ('isub', 0, b)), ('iadd', a, b)),
(('fsub', a, b), ('fadd', a, ('fneg', b)), 'options->lower_sub'),
(('isub', a, b), ('iadd', a, ('ineg', b)), 'options->lower_sub'),
(('fneg', a), ('fsub', 0.0, a), 'options->lower_negate'),
(('ineg', a), ('isub', 0, a), 'options->lower_negate'),
(('fadd', a, ('fsub', 0.0, b)), ('fsub', a, b)),
(('iadd', a, ('isub', 0, b)), ('isub', a, b)),
(('fabs', ('fsub', 0.0, a)), ('fabs', a)),
(('iabs', ('isub', 0, a)), ('iabs', a)),
]
# Add optimizations to handle the case where the result of a ternary is
# compared to a constant. This way we can take things like
#
# (a ? 0 : 1) > 0
#
# and turn it into
#
# a ? (0 > 0) : (1 > 0)
#
# which constant folding will eat for lunch. The resulting ternary will
# further get cleaned up by the boolean reductions above and we will be
# left with just the original variable "a".
for op in ['flt', 'fge', 'feq', 'fne',
'ilt', 'ige', 'ieq', 'ine', 'ult', 'uge']:
optimizations += [
((op, ('bcsel', 'a', '#b', '#c'), '#d'),
('bcsel', 'a', (op, 'b', 'd'), (op, 'c', 'd'))),
((op, '#d', ('bcsel', a, '#b', '#c')),
('bcsel', 'a', (op, 'd', 'b'), (op, 'd', 'c'))),
]
# This section contains "late" optimizations that should be run after the
# regular optimizations have finished. Optimizations should go here if
# they help code generation but do not necessarily produce code that is
# more easily optimizable.
late_optimizations = [
(('flt', ('fadd', a, b), 0.0), ('flt', a, ('fneg', b))),
(('fge', ('fadd', a, b), 0.0), ('fge', a, ('fneg', b))),
(('feq', ('fadd', a, b), 0.0), ('feq', a, ('fneg', b))),
(('fne', ('fadd', a, b), 0.0), ('fne', a, ('fneg', b))),
]
print nir_algebraic.AlgebraicPass("nir_opt_algebraic", optimizations).render()
print nir_algebraic.AlgebraicPass("nir_opt_algebraic_late",
late_optimizations).render()<|fim▁end|>
|
(('ieq', 'a@bool', 0), ('inot', 'a')),
|
<|file_name|>test.profile.js<|end_file_name|><|fim▁begin|>var fs = require("fs");
var path = require("path");
var _ = require("underscore");
var chai = require("chai");
var expect = chai.expect;
var profile = require("../../lib/profile");
var PREFS = require("../../data/preferences");
var simpleXpiPath = path.join(__dirname, '..', 'xpis', 'simple-addon.xpi');
describe("lib/profile", function () {
it("creates a profile and returns the path", function (done) {
profile().then(function (profilePath) {
var contents = fs.readFileSync(path.join(profilePath, "user.js"), "utf8");
expect(contents).to.be.ok;
})
.then(done, done);
});
it("creates a profile with proper default preferences (Firefox)", function (done) {
profile().then(function (profilePath) {
var contents = fs.readFileSync(path.join(profilePath, "user.js"), "utf8");
var defaults = _.extend({}, PREFS.DEFAULT_COMMON_PREFS, PREFS.DEFAULT_FIREFOX_PREFS);
comparePrefs(defaults, contents);
})
.then(done, done);
});<|fim▁hole|>
it("creates a profile with an addon installed when given a XPI", function (done) {
profile({ xpi: simpleXpiPath }).then(function (profilePath) {
var addonPath = path.join(profilePath, "extensions", "simple-addon");
var files = fs.readdirSync(addonPath, "utf8");
var index = fs.readFileSync(path.join(addonPath, "index.js"));
var manifest = fs.readFileSync(path.join(addonPath, "package.json"));
expect(index).to.be.ok;
expect(manifest).to.be.ok;
})
.then(done, done);
});
});
function comparePrefs (defaults, prefs) {
var count = Object.keys(defaults).length;
prefs.split("\n").forEach(function (pref) {
var parsed = pref.match(/user_pref\("(.*)", "?([^"]*)"?\)\;$/);
if (!parsed || parsed.length < 2) return;
var key = parsed[1];
var value = parsed[2];
// Cast booleans and numbers in string formative to primitives
if (value === 'true')
value = true;
else if (value === 'false')
value = false;
else if (!isNaN(parseFloat(value)) && isFinite(value))
value = +value;
// TODO need to override firefox-profile setting default prefs
// but we still override them if we explicitly set them
if (key in defaults) {
expect(defaults[key]).to.be.equal(value);
--count;
}
});
expect(count).to.be.equal(0);
}<|fim▁end|>
| |
<|file_name|>lookups.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Doqu is a lightweight schema/query framework for document databases.
# Copyright © 2009—2010 Andrey Mikhaylenko
#
# This file is part of Docu.
#
# Doqu is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#<|fim▁hole|># Doqu is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Docu. If not, see <http://gnu.org/licenses/>.
import datetime
from functools import wraps
import re
from doqu.backend_base import LookupManager
__all__ = ['lookup_manager']
lookup_manager = LookupManager()
DEFAULT_OPERATION = 'equals'
# we define operations as functions that expect
mapping = {
'between': lambda a,b: b is not None and a[0] <= b <= a[1],
'contains': lambda a,b: a in b,
'contains_any': lambda a,b: b is not None and any(x in b for x in a),
'endswith': lambda a,b: b is not None and b.endswith(a),
'equals': lambda a,b: a.pk == b if hasattr(a, 'pk') else a == b,
'exists': lambda a,b: True,
'gt': lambda a,b: b is not None and a < b,
'gte': lambda a,b: b is not None and a <= b,
'in': lambda a,b: b in a,
# 'like': lambda a,b: NotImplemented,
# 'like_any': lambda a,b: NotImplemented,
'lt': lambda a,b: b is not None and b < a,
'lte': lambda a,b: b is not None and b <= a,
'matches': lambda a,b: re.search(a, b), # XXX pre-compile?
# 'search': lambda a,b: NotImplemented,
'startswith': lambda a,b: b and b.startswith(a),
'year': lambda a,b: b and b.year == a,
'month': lambda a,b: b and b.month == a,
'day': lambda a,b: b and b.day == a,
}
def autonegated_processor(processor):
"decorator for processors; handles negation"
@wraps(processor)
def inner(name, value, data_processor, negated):
def condition(data):
if name in data:
value_in_data = data_processor(data.get(name, None))
matches = processor(value, value_in_data)
else:
matches = False
return not matches if negated else matches
return condition
return inner
for operation, processor in mapping.items():
is_default = operation == DEFAULT_OPERATION
processor = autonegated_processor(processor)
lookup_manager.register(operation, default=is_default)(processor)<|fim▁end|>
| |
<|file_name|>queue_test.go<|end_file_name|><|fim▁begin|>package gq
import (
"testing"
"time"
)
var count int
type WorkerTest struct {
Delay time.Duration
}
func (w WorkerTest) Work() {
count++<|fim▁hole|>
func (w WorkerTest) Data() string {
return ""
}
func (w WorkerTest) DelayTime() time.Duration {
return w.Delay
}
func (w WorkerTest) Preprocess() string {
return ""
}
func (w WorkerTest) Postprocess() string {
return ""
}
func nullLogger(...interface{}) {}
func init() {
Logger(nullLogger)
StartDispatcher(10)
for i := 0; i < 10; i++ {
work := WorkerTest{Delay: 0 * time.Second}
WorkQueue <- work
}
time.Sleep(1 * time.Second)
}
func TestIncrement(t *testing.T) {
if count != 10 {
t.Errorf("expected count to be 10, got %d\n", count)
}
}<|fim▁end|>
|
}
|
<|file_name|>authtest.go<|end_file_name|><|fim▁begin|>package resource
import (
"github.com/gbl08ma/sqalx"
"github.com/yarf-framework/yarf"
)
// AuthTest composites resource
type AuthTest struct {
resource
}
// WithNode associates a sqalx Node with this resource
func (r *AuthTest) WithNode(node sqalx.Node) *AuthTest {
r.node = node
return r
}
// WithHashKey associates a HMAC key with this resource so it can participate in authentication processes
func (r *AuthTest) WithHashKey(key []byte) *AuthTest {
r.hashKey = key
return r
}
// Get serves HTTP GET requests on this resource
func (r *AuthTest) Get(c *yarf.Context) error {<|fim▁hole|> return nil
}
RenderData(c, struct {
Result string `msgpack:"result" json:"result"`
Key string `msgpack:"key" json:"key"`
}{
Result: "ok",
Key: pair.Key,
}, "no-cache, no-store, must-revalidate")
return nil
}<|fim▁end|>
|
pair, err := r.AuthenticateClient(c)
if err != nil {
RenderUnauthorized(c)
|
<|file_name|>res_partner.py<|end_file_name|><|fim▁begin|># Copyright 2018 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, fields, models
class ResPartner(models.Model):
_inherit = "res.partner"
_allowed_inactive_link_models = ["res.partner"]
_inactive_cascade = True
sta_mandate_ids = fields.One2many(
comodel_name="sta.mandate",
inverse_name="partner_id",
string="State Mandates",
domain=[("active", "=", True)],
context={"force_recompute": True},
)
sta_mandate_inactive_ids = fields.One2many(
comodel_name="sta.mandate",
inverse_name="partner_id",
string="State Mandates (Inactive)",
domain=[("active", "=", False)],
)
int_mandate_ids = fields.One2many(
comodel_name="int.mandate",
inverse_name="partner_id",
string="Internal Mandates",
domain=[("active", "=", True)],
context={"force_recompute": True},
)
int_mandate_inactive_ids = fields.One2many(
comodel_name="int.mandate",
inverse_name="partner_id",
string="Internal Mandates (Inactive)",
domain=[("active", "=", False)],
)
ext_mandate_ids = fields.One2many(
comodel_name="ext.mandate",
inverse_name="partner_id",
string="External Mandates",
domain=[("active", "=", True)],
context={"force_recompute": True},
)
ext_mandate_inactive_ids = fields.One2many(
comodel_name="ext.mandate",
inverse_name="partner_id",
string="External Mandates (Inactive)",
domain=[("active", "=", False)],
)
ext_mandate_count = fields.Integer(
string="External Mandates Nbr", compute="_compute_mandate_assembly_count"
)
ext_assembly_count = fields.Integer(
string="External Assemblies", compute="_compute_mandate_assembly_count"
)
def get_mandate_action(self):
"""
return an action for an ext.mandate contains into the domain a
specific tuples to get concerned mandates
"""
self.ensure_one()
res_ids = self._get_assemblies()._get_mandates().ids
domain = [("id", "in", res_ids)]
# get model's action to update its domain
action = self.env["ir.actions.act_window"]._for_xml_id(
"mozaik_mandate.ext_mandate_action"
)
action["domain"] = domain
return action
def _get_assemblies(self):
"""
return the assemblies of the current partner
"""
self.ensure_one()
assembly_model = "ext.assembly"
if self.is_assembly:
field = "partner_id"
else:
field = "ref_partner_id"<|fim▁hole|>
assembly_obj = self.env[assembly_model]
assemblies = assembly_obj.search(domain)
return assemblies
def _compute_mandate_assembly_count(self):
"""
count the number of assemblies linked to the current partner
count the number of mandates linked to the assemblies of the
current partner
"""
for partner in self:
assemblies = partner._get_assemblies()
partner.ext_assembly_count = len(assemblies)
partner.ext_mandate_count = len(assemblies._get_mandates())
def add_mandate_action(self):
self.ensure_one()
return {
"type": "ir.actions.act_window",
"name": _("Add a new mandate"),
"res_model": self._context.get("mandate_model"),
"context": {"default_partner_id": self.id},
"view_mode": "form",
"target": "new",
}<|fim▁end|>
|
domain = [(field, "=", self.id)]
|
<|file_name|>insert_sbench.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package main
import (
"flag"
"fmt"
"github.com/prataprc/gobtree"
"os"
"time"
)<|fim▁hole|>
var _ = fmt.Sprintln("keep 'fmt' import during debugging", time.Now(), os.O_WRONLY)
func main() {
flag.Parse()
args := flag.Args()
idxfile, kvfile := args[0], args[1]
os.Remove(idxfile)
os.Remove(kvfile)
var conf = btree.Config{
Idxfile: idxfile,
Kvfile: kvfile,
IndexConfig: btree.IndexConfig{
Sectorsize: 512,
Flistsize: 2000 * btree.OFFSET_SIZE,
Blocksize: 512,
},
Maxlevel: 6,
RebalanceThrs: 5,
AppendRatio: 0.7,
DrainRate: 600,
MaxLeafCache: 1000,
Sync: false,
Nocache: false,
}
store := btree.NewStore(conf)
bt := btree.NewBTree(store)
factor := 1
count := 10000
seed := time.Now().UnixNano()
fmt.Println("Seed:", seed)
keys, values := btree.TestData(count, seed)
fmt.Println(time.Now())
for i := 0; i < factor; i++ {
for j := 0; j < count; j++ {
k, v := keys[j], values[j]
k.Id = int64((i * count) + j)
bt.Insert(k, v)
}
fmt.Println("Done ", time.Now().UnixNano()/1000000, (i+1)*count)
}
bt.Drain()
fmt.Println(time.Now())
// Sanity check
if bt.Count() != int64(count*factor) {
fmt.Println(bt.Count(), int64(count*factor))
panic("Count mismatch")
}
// Remove
checkcount := int64(count * factor)
for i := 0; i < factor; i++ {
for j := 0; j < count; j += 3 {
k := keys[j]
k.Id = int64((i * count) + j)
bt.Remove(k)
bt.Drain()
bt.Check()
checkcount -= 1
if bt.Count() != checkcount {
fmt.Println("remove mismatch count", bt.Count(), checkcount)
panic("")
}
}
for j := 1; j < count; j += 3 {
k := keys[j]
k.Id = int64((i * count) + j)
bt.Remove(k)
bt.Drain()
bt.Check()
checkcount -= 1
if bt.Count() != checkcount {
fmt.Println("remove mismatch count", bt.Count(), checkcount)
panic("")
}
}
for j := 2; j < count; j += 3 {
k := keys[j]
k.Id = int64((i * count) + j)
bt.Remove(k)
bt.Drain()
bt.Check()
checkcount -= 1
if bt.Count() != checkcount {
fmt.Println("remove mismatch count", bt.Count(), checkcount)
panic("")
}
}
fmt.Println("Done ", time.Now().UnixNano()/1000000, (i+1)*count)
}
bt.Drain()
bt.Stats(false)
fmt.Println("Count", bt.Count())
bt.Close()
}<|fim▁end|>
| |
<|file_name|>check_syscall.py<|end_file_name|><|fim▁begin|># Rekall Memory Forensics
# Copyright (C) 2007-2013 Volatility Foundation
# Copyright 2013 Google Inc. All Rights Reserved.
#
# This file is part of Rekall Memory Forensics.
#
# Rekall Memory Forensics is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License Version 2 as
# published by the Free Software Foundation. You may not use, modify or
# distribute this program under any other version of the GNU General Public
# License.
#
# Rekall Memory Forensics is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# Rekall Memory Forensics. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: [email protected]
@organization:
"""
from rekall.plugins.linux import common
from rekall.plugins.tools import dynamic_profiles
class CheckSyscall(common.LinuxPlugin):
"""Checks if the system call table has been altered."""
__name = "check_syscall"
table_header = [
dict(name="divider", type="Divider"),
dict(name="table", hidden=True),
dict(name="index", style="address"),
dict(name="address", style="address"),
dict(name="symbol", width=80)
]
def Find_sys_call_tables(self):
"""Calculates the size of the syscall table.
Here we need the symbol __NR_syscall_max. We derive it from
disassembling the following system calls:
- system_call_fastpath function:
http://lxr.linux.no/linux+v3.12/arch/x86/kernel/entry_64.S#L620
system_call_fastpath:
#if __SYSCALL_MASK == ~0
cmpq $__NR_syscall_max,%rax
#else
andl $__SYSCALL_MASK,%eax
cmpl $__NR_syscall_max,%eax
#endif
- ret_from_sys_call function (with a small rewind):
http://lxr.linux.no/linux+v2.6.26/arch/x86/kernel/entry_64.S#L249
249 cmpq $__NR_syscall_max,%rax
250 ja badsys
251 movq %r10,%rcx
252 call *sys_call_table(,%rax,8) # XXX: rip relative
253 movq %rax,RAX-ARGOFFSET(%rsp)
254 /*
255 * Syscall return path ending with SYSRET (fast path)
256 * Has incomplete stack frame and undefined top of stack.
257 */
258 ret_from_sys_call:
259 movl $_TIF_ALLWORK_MASK,%edi
260 /* edi: flagmask */
- sysenter_do_call
Linux> dis "linux!sysenter_do_call"
Address Rel Op Codes Instruction Comment
------- ---------- -------------------- ------------------ -------
------ linux!sysenter_do_call ------: 0xc12c834d
0xc12c834d 0x0 3d5d010000 CMP EAX, 0x15d
0xc12c8352 0x5 0f8397baffff JAE 0xc12c3def linux!syscall_badsys
"""
rules = [
# Look for a comparison of the register (EAX) with a fixed value.
{'mnemonic': 'CMP', 'operands': [
{'type': 'REG'}, {'type': 'IMM', 'target': "$value"}]},
# Immediately followed by a branch to linux!badsys,
# linux!ia32_badsys etc.
{'comment': '~.+badsys'}<|fim▁hole|> for func_name, table_name in [
# http://lxr.free-electrons.com/source/arch/x86_64/kernel/entry.S?v=2.4.37
("system_call", "sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/kernel/entry_64.S?v=3.16
("system_call_fastpath", "sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/ia32/ia32entry.S?v=3.14
("ia32_sysenter_target", "ia32_sys_call_table"),
("sysenter_auditsys", "ia32_sys_call_table"),
# http://lxr.free-electrons.com/source/arch/x86/kernel/entry_32.S?v=3.3
("sysenter_do_call", "sys_call_table")]:
if table_name in tables:
continue
# This table does not exist in this profile dont bother looking for
# its size.
if self.profile.get_constant(table_name) == None:
continue
func = self.profile.get_constant_object(
func_name, target="Function")
if func == None:
continue
matcher = dynamic_profiles.DisassembleMatcher(
name="sys_call_table_size",
mode=func.mode, rules=rules, session=self.session)
result = matcher.MatchFunction(func)
if result:
tables.add(table_name)
yield table_name, result["$value"] + 1
# Fallback. Note this underestimates the size quite a bit.
if func == None:
table_size = len([x for x in self.profile.constants
if x.startswith("__syscall_meta__")]) or 0x300
yield "ia32_sys_call_table", table_size
yield "sys_call_table", table_size
def collect(self):
"""
This works by walking the system call table
and verifies that each is a symbol in the kernel
"""
for table_name, table_size in self.Find_sys_call_tables():
# The syscall table is simply an array of pointers to functions.
table = self.profile.get_constant_object(
table_name,
target="Array",
target_args=dict(
count=table_size,
target="Pointer",
target_args=dict(
target="Function"
)
)
)
yield dict(divider="Table %s" % table_name)
resolver = self.session.address_resolver
for i, entry in enumerate(table):
sym_name = resolver.format_address(entry.deref())[:2]
yield dict(
table=table_name, index=i,
address=entry,
symbol=sym_name or "Unknown",
highlight=None if sym_name else "important")<|fim▁end|>
|
]
func = None
tables = set()
|
<|file_name|>RootlessHgCommand.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2005-2010 VecTrace (Zingo Andersen) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* John Peberdy implementation
*******************************************************************************/
package com.vectrace.MercurialEclipse.commands;
import java.io.File;
import java.util.List;
import org.eclipse.core.runtime.Assert;
/**
* A command to invoke hg definitely outside of an hg root.
*/
public class RootlessHgCommand extends AbstractShellCommand {
public RootlessHgCommand(String command, String uiName) {
this(command, uiName, null);
}
public RootlessHgCommand(String command, String uiName, File workingDir) {
super(uiName, null, workingDir, false);
Assert.isNotNull(command);
this.command = command;
}
// operations
<|fim▁hole|> @Override
protected void customizeCommands(List<String> cmd) {
cmd.add(1, "-y");
}
/**
* @see com.vectrace.MercurialEclipse.commands.AbstractShellCommand#getExecutable()
*/
@Override
protected String getExecutable() {
return HgClients.getExecutable();
}
}<|fim▁end|>
|
/**
* @see com.vectrace.MercurialEclipse.commands.AbstractShellCommand#customizeCommands(java.util.List)
*/
|
<|file_name|>confirm-toolbar.js<|end_file_name|><|fim▁begin|>import log from 'log';
import Ember from 'ember';
import {registerComponent} from 'ember-utils';<|fim▁hole|>export var RheaConfirmToolbar = Ember.Component.extend({
layout,
tagName: '',
actions: {
confirm: function(opID) {
this.sendAction('confirm');
},
cancel: function(opID) {
this.sendAction('cancel');
}
}
});
registerComponent('rhea-confirm-toolbar', RheaConfirmToolbar);<|fim▁end|>
|
import layout from './confirm-toolbar.hbs!';
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod options;
pub mod compiler;
pub mod errors;
mod parser;<|fim▁hole|><|fim▁end|>
|
mod lexer;
mod modulebuilder;
|
<|file_name|>darwinvpnlauncher.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# darwinvpnlauncher.py
# Copyright (C) 2013 LEAP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Darwin VPN launcher implementation.
"""
import commands
import getpass
import logging
import os
import sys
from leap.bitmask.services.eip.vpnlauncher import VPNLauncher
from leap.bitmask.services.eip.vpnlauncher import VPNLauncherException
from leap.bitmask.util import get_path_prefix
logger = logging.getLogger(__name__)
class EIPNoTunKextLoaded(VPNLauncherException):
pass
class DarwinVPNLauncher(VPNLauncher):
"""
VPN launcher for the Darwin Platform
"""
COCOASUDO = "cocoasudo"
# XXX need the good old magic translate for these strings
# (look for magic in 0.2.0 release)
SUDO_MSG = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
INSTALL_MSG = ("\"Bitmask needs administrative privileges to install "
"missing scripts and fix permissions.\"")
# Hardcode the installation path for OSX for security, openvpn is
# run as root
INSTALL_PATH = "/Applications/Bitmask.app/"
INSTALL_PATH_ESCAPED = os.path.realpath(os.getcwd() + "/../../")
OPENVPN_BIN = 'openvpn.leap'
OPENVPN_PATH = "%s/Contents/Resources/openvpn" % (INSTALL_PATH,)
OPENVPN_PATH_ESCAPED = "%s/Contents/Resources/openvpn" % (
INSTALL_PATH_ESCAPED,)
OPENVPN_BIN_PATH = "%s/Contents/Resources/%s" % (INSTALL_PATH,
OPENVPN_BIN)
UP_SCRIPT = "%s/client.up.sh" % (OPENVPN_PATH,)
DOWN_SCRIPT = "%s/client.down.sh" % (OPENVPN_PATH,)
OPENVPN_DOWN_PLUGIN = '%s/openvpn-down-root.so' % (OPENVPN_PATH,)
UPDOWN_FILES = (UP_SCRIPT, DOWN_SCRIPT, OPENVPN_DOWN_PLUGIN)
OTHER_FILES = []
@classmethod
def cmd_for_missing_scripts(kls, frompath):
"""
Returns a command that can copy the missing scripts.
:rtype: str
"""
to = kls.OPENVPN_PATH_ESCAPED
cmd = "#!/bin/sh\n"
cmd += "mkdir -p {0}\n".format(to)
cmd += "cp '{0}'/* {1}\n".format(frompath, to)
cmd += "chmod 744 {0}/*".format(to)
return cmd
@classmethod
def is_kext_loaded(kls):
"""
Checks if the needed kext is loaded before launching openvpn.
:returns: True if kext is loaded, False otherwise.
:rtype: bool
"""
return bool(commands.getoutput('kextstat | grep "leap.tun"'))
@classmethod
def _get_icon_path(kls):
"""
Returns the absolute path to the app icon.
:rtype: str
"""
resources_path = os.path.abspath(
os.path.join(os.getcwd(), "../../Contents/Resources"))
return os.path.join(resources_path, "bitmask.tiff")
@classmethod
def get_cocoasudo_ovpn_cmd(kls):
"""
Returns a string with the cocoasudo command needed to run openvpn
as admin with a nice password prompt. The actual command needs to be
appended.
:rtype: (str, list)
"""
# TODO add translation support for this
sudo_msg = ("Bitmask needs administrative privileges to run "
"Encrypted Internet.")
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (sudo_msg,))
return kls.COCOASUDO, args
@classmethod
def get_cocoasudo_installmissing_cmd(kls):
"""
Returns a string with the cocoasudo command needed to install missing
files as admin with a nice password prompt. The actual command needs to
be appended.
:rtype: (str, list)
"""
# TODO add translation support for this
install_msg = ('"Bitmask needs administrative privileges to install '
'missing scripts and fix permissions."')
iconpath = kls._get_icon_path()
has_icon = os.path.isfile(iconpath)
args = ["--icon=%s" % iconpath] if has_icon else []
args.append("--prompt=%s" % (install_msg,))
return kls.COCOASUDO, args
@classmethod
def get_vpn_command(kls, eipconfig, providerconfig, socket_host,
socket_port="unix", openvpn_verb=1):
"""
Returns the OSX implementation for the vpn launching command.
Might raise:
EIPNoTunKextLoaded,
OpenVPNNotFoundException,
VPNLauncherException.<|fim▁hole|> :type eipconfig: EIPConfig
:param providerconfig: provider specific configuration
:type providerconfig: ProviderConfig
:param socket_host: either socket path (unix) or socket IP
:type socket_host: str
:param socket_port: either string "unix" if it's a unix socket,
or port otherwise
:type socket_port: str
:param openvpn_verb: the openvpn verbosity wanted
:type openvpn_verb: int
:return: A VPN command ready to be launched.
:rtype: list
"""
if not kls.is_kext_loaded():
raise EIPNoTunKextLoaded
# we use `super` in order to send the class to use
command = super(DarwinVPNLauncher, kls).get_vpn_command(
eipconfig, providerconfig, socket_host, socket_port, openvpn_verb)
cocoa, cargs = kls.get_cocoasudo_ovpn_cmd()
cargs.extend(command)
command = cargs
command.insert(0, cocoa)
command.extend(['--setenv', "LEAPUSER", getpass.getuser()])
return command
@classmethod
def get_vpn_env(kls):
"""
Returns a dictionary with the custom env for the platform.
This is mainly used for setting LD_LIBRARY_PATH to the correct
path when distributing a standalone client
:rtype: dict
"""
ld_library_path = os.path.join(get_path_prefix(), "..", "lib")
ld_library_path.encode(sys.getfilesystemencoding())
return {
"DYLD_LIBRARY_PATH": ld_library_path
}<|fim▁end|>
|
:param eipconfig: eip configuration object
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* NetSuite Records<|fim▁hole|>
Records.RecordRef = require('./recordRef');<|fim▁end|>
|
* @return {Records}
*/
var Records = module.exports = {};
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.