prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>panaz_test.go<|end_file_name|><|fim▁begin|>package sc
import (
"testing"
)
func TestPanAz(t *testing.T) {
defName := "PanAzTest"<|fim▁hole|> Bus: C(0),
Channels: A(PanAz{
NumChans: 2,
In: DC{In: C(1)}.Rate(AR),
Pos: A(Line{
Start: C(0),
End: C(0.5),
Dur: C(0.1),
}),
}),
}.Rate(AR)
}))
}<|fim▁end|>
|
// Out.ar(0, PanAz.ar(2, DC.ar(1), Line.ar(0, 1/2, 0.1)));
compareAndWriteStructure(t, defName, NewSynthdef(defName, func(p Params) Ugen {
return Out{
|
<|file_name|>submit_v2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import lib_v2 as lib
import sys
import os
def main(argv=None):<|fim▁hole|> Run from the working dir of the job which must contain (in addition
to the job files) a file named scheduler.conf with scheduler properties for the job.
<chargecode>, if present, gives the project to charge the job to.
Url is the url of the submitting website including the taskid parameter.
Returns 0 with "jobid=<jobid>" on stdout if job submitted ok
Returns 1 with multiline error message on stdout if error.
Returns 2 for the specific error of queue limit exceeded.
"""
#COMMAND LINE PARSING
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--account', metavar="ACCOUNT", type=str, default=lib.account,
help="The account string to use when submitting jobs. Default is read from config files.")
parser.add_argument('--url', metavar="URL", dest="URL", type=str,
help="Notification URL")
try:
cmdline_options, cmdline = parser.parse_known_args(argv)
cmdline = cmdline[1:] if not ('--' in cmdline) else cmdline[cmdline.index('--')+1:]
except Exception as e:
print "There was a problem submitting your job"
print e
sys.exit(1)
account = cmdline_options.account
url = cmdline_options.URL
#cmdline as an array (and already set)
tooltype = lib.getToolType(cmdline)
scheduler_properties = lib.getProperties("scheduler.conf")
# print scheduler_properties
scheduler_info = lib.schedulerInfo(scheduler_properties, tooltype)
# print scheduler_info
# If this is a "direct" run type job we don't need to create a qsub script, we'll just run batch_ommand.cmdline.
if scheduler_info["is_direct"]:
return lib.submitDirectJob(account, url, lib.email, lib.jobname, cmdline)
runtime = int(scheduler_info["runtime"])
useLocalDisk = False
"""
Workaround for problems with file io on oasis and longer mrbayes runs. Instead of running on
oasis, we'll copy the working dir to the compute nodes local storage and copy the results back
when the job completes. Since many mrbayes jobs timeout we need a special trick to copy results
of jobs that timeout: Right before we launch mrbayes we launch a shell script in the background
that sleeps a few min less than the job's runtime and then copies the results. If mrbayes terminates
normally the background sleep is killed automatically.
"""
if (tooltype == "mrbayes" and runtime > 60):
useLocalDisk = True
# I'm backing out the workaround by setting useLocalDisk to false.
useLocalDisk = False
# Write the command line to a file, batch_command.cmdline.
rfile = open(lib.cmdfile, "w")
rfile.write("#!/bin/sh\n")
rfile.writelines((" ".join(cmdline), "\n"))
rfile.close()
os.chmod(lib.cmdfile, 0744);
# Create the qsub script
rfile = open(lib.runfile, "w")
text = """#!/bin/sh
#PBS -q %s
#PBS -N %s
#PBS -l walltime=00:%d:00
#PBS -o scheduler_stdout.txt
#PBS -e scheduler_stderr.txt
#PBS -W umask=0007
##PBS -V
#PBS -v QOS=2
#PBS -M %s
#PBS -m ae
#PBS -A %s
""" % (scheduler_info["queue"], lib.jobname, scheduler_info["runtime"], lib.email, account)
rfile.write(text)
text = "#PBS -l nodes=%d:ppn=%d\n" % (scheduler_info["nodes"], scheduler_info["ppn"])
rfile.write(text)
rfile.write("cd %s\n" % (lib.jobdir, lib.local_jobdir)[useLocalDisk])
if useLocalDisk == True:
# Note that it's critical that newlines in the text string are all within the double
# quotes; otherwise the echo command line would be split across lines and make no sense.
text = """"Due to filesystem problems intermediate results for longer mrbayes runs
will not be available while the job is running. The result files will be
available when mrbayes finishes.
We're working to find a solution." """
rfile.write("echo %s > %s/INTERMEDIATE_RESULTS_README.TXT\n" % (text, lib.jobdir))
rfile.write("cp -r %s/* .\n" % lib.jobdir);
sleepTime = int(scheduler_info["runtime"]) - 10
rfile.write("sleep_cp.sh %s %s &\n" % (sleepTime, lib.jobdir))
text = """
source /etc/profile.d/modules.sh
echo Job starting at `date` > start.txt
curl %s\&status=START
export CIPRES_THREADSPP=%d
export CIPRES_NP=%d
%s 1>stdout.txt 2>stderr.txt
echo Job finished at `date` > done.txt
""" % (url,
int(scheduler_info["threads_per_process"]),
int(scheduler_info["mpi_processes"]),
lib.cmdfile)
rfile.write(text)
if (useLocalDisk):
text = """
echo "Job completed, starting to copy working directory."
echo "mkdir %s.complete"
mkdir %s.complete
echo "cp -r * %s.complete"
cp -r * %s.complete
echo "mv %s %s.sleep"
mv %s %s.sleep
echo "mv %s.complete %s"
mv %s.complete %s
echo "rm -rf %s.sleep"
rm -rf %s.sleep
echo "Finished copying working directory."
""" % (lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir, lib.jobdir)
rfile.write(text)
rfile.write("curl %s\&status=DONE\n" % url)
rfile.close()
return lib.submitJob()
return 0
if __name__ == "__main__":
sys.exit(main())<|fim▁end|>
|
"""
Usage is:
submit.py [--account <chargecode>] [--url <url>] -- <commandline>
|
<|file_name|>config.js<|end_file_name|><|fim▁begin|>/* eslint-env node*/
var gutil = require('gulp-util')
var paths = {
layouts: {
componentsDir: './app/components/**/*.jade',
src: './app/views/**/*.jade',
dest: './app/public/assets/html/'
},
styles: {
componentsDir: './app/lib/stylesheets/**/*.styl',
src: './app/lib/stylesheets/styles.styl',
dest: './app/public/assets/css/'
},
scripts: {
entry: './app/lib/scripts/entry.jsx',
src: ['./app/lib/scripts/**/*.jsx', './app/lib/scripts/**/*.js'],
<|fim▁hole|>var onError = function (error) {
gutil.log(gutil.colors.red(error))
this.emit('end')
}
module.exports = {
paths: paths,
onError: onError
}<|fim▁end|>
|
dest: './app/public/assets/js/'
}
}
|
<|file_name|>ClassLoaderHelper.java<|end_file_name|><|fim▁begin|>package de.newsarea.homecockpit.connector.facade.registration.util;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.net.URL;<|fim▁hole|>
public final class ClassLoaderHelper {
private static Logger log = LoggerFactory.getLogger(ClassLoaderHelper.class);
private ClassLoaderHelper() { }
public static Constructor<?> determineFirstConstructor(Class<?> clazz) {
try {
for(Constructor<?> constructor : clazz.getConstructors()) {
return constructor;
}
} catch (SecurityException e) {
log.error(e.getMessage(), e);
}
return null;
}
public static Constructor<?> determineConstructorByArgumentTypes(Class<?> clazz, Class<?>[] argumentTypes) {
try {
for(Constructor<?> constructor : clazz.getConstructors()) {
if(isAssignableFrom(constructor, argumentTypes)) {
return constructor;
}
}
} catch (SecurityException e) {
log.error(e.getMessage(), e);
}
return null;
}
private static boolean isAssignableFrom(Constructor<?> constructor, Class<?>[] argumentTypes) {
Class<?>[] constructorArgTypes = constructor.getParameterTypes();
if(constructorArgTypes.length != argumentTypes.length) {
return false;
}
// ~
for(int i=0; i < argumentTypes.length; i++) {
if(!argumentTypes[i].isAssignableFrom(constructorArgTypes[i])) {
return false;
}
}
return true;
}
public static List<Class<?>> determineClasses(String packageName) throws ClassNotFoundException, IOException {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
assert classLoader != null;
String path = packageName.replace('.', '/');
Enumeration<URL> resources = classLoader.getResources(path);
List<File> dirs = new ArrayList<>();
while (resources.hasMoreElements()) {
URL resource = resources.nextElement();
dirs.add(new File(resource.getFile().replaceAll("%20", " ")));
}
ArrayList<Class<?>> classes = new ArrayList<>();
for (File directory : dirs) {
classes.addAll(findClasses(directory, packageName));
}
return classes;
}
public static List<Class<?>> findClasses(File directory, String packageName) throws ClassNotFoundException {
List<Class<?>> classes = new ArrayList<>();
if (!directory.exists()) {
return classes;
}
File[] files = directory.listFiles();
for (File file : files) {
if (file.isDirectory()) {
assert !file.getName().contains(".");
classes.addAll(findClasses(file, packageName + "." + file.getName()));
} else if (file.getName().endsWith(".class")) {
classes.add(Class.forName(packageName + '.' + file.getName().substring(0, file.getName().length() - 6)));
}
}
return classes;
}
public static Method determineSetterMethod(Class<?> clazz, String name) {
for(Method method : clazz.getMethods()) {
if(method.getName().equalsIgnoreCase("set" + name)) {
return method;
}
}
return null;
}
}<|fim▁end|>
|
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.List;
|
<|file_name|>MenuWindowManager.java<|end_file_name|><|fim▁begin|>package org.jfl2.fx.controller.menu;
import javafx.application.Platform;
import javafx.event.Event;
import javafx.scene.Node;
import javafx.scene.control.RadioButton;
import javafx.scene.input.KeyEvent;
import javafx.scene.input.MouseEvent;
import lombok.extern.slf4j.Slf4j;
import org.jfl2.core.util.Jfl2NumberUtils;
import org.jfl2.fx.control.MenuPane;
import java.util.*;
import java.util.stream.Collectors;
@Slf4j
public class MenuWindowManager {
private Map<String, MenuWindow> id2MenuWindow = new HashMap<>();
/**
* 表示中のメニュー
*/
private MenuWindow nowMenu;<|fim▁hole|> /**
* Pane
*/
private MenuPane menuPane;
/**
* Constructor
*
* @param pane
*/
public MenuWindowManager(MenuPane pane) {
menuPane = pane;
}
/**
* 管理対象にMenuWindowを追加
*
* @param menu
* @return
*/
public MenuWindowManager add(MenuWindow menu) {
id2MenuWindow.put(menu.id, menu);
return this;
}
/**
* id からMenuWindowを取得
*
* @param id Specify string
* @return
*/
public MenuWindow get(String id) {
return id2MenuWindow.get(id);
}
/**
* メニューを開く
*
* @param id
* @return
*/
public MenuWindowManager show(String id) {
nowMenu = get(id);
if (nowMenu != null) {
menuPane.setTitleText(nowMenu.id);
menuPane.setDescriptionText(nowMenu.description);
menuPane.getListView().setVisible(false);
menuPane.getListView().setManaged(false);
/* listview は廃止
menuPane.setItems(FXCollections.observableList(nowMenu.items));
VirtualFlow flow = (VirtualFlow) menuPane.getListView().getChildrenUnmodifiable().get(0);
double height = 0;
for (int n = 0; n < nowMenu.items.size(); n++) {
IndexedCell cell = flow.getCell(n);
if (cell != null) {
height += cell.getHeight();
}
}
height = Jfl2Const.getMaxValue(height, Jfl2Const.MENU_MAX_HEIGHT);
// menuPane.getListView().setStyle("-fx-pref-height: " + height + ";");
/**/
List<RadioButton> rList = nowMenu.items.stream().map(menuItem -> {
RadioButton btn = new RadioButton(menuItem.toString());
btn.setFocusTraversable(false);
btn.setToggleGroup(menuPane.getToggleGroup());
btn.onMouseEnteredProperty().set((ev) -> select(menuItem));
menuPane.getRadioBox().getChildren().add(btn);
menuPane.getButtons().add(btn);
return btn;
}).collect(Collectors.toList());
selectFirst();
menuPane.setVisible(true);
getFocus();
}
return this;
}
/**
* 1つ上を選択
*/
public MenuWindowManager up() {
select(selected - 1, true);
return this;
}
/**
* 1つ下を選択
*/
public MenuWindowManager down() {
select(selected + 1, true);
return this;
}
/**
* 現在選択されているものを実行する
*
* @return
*/
public MenuWindowManager enter() {
return enter(selected);
}
/**
* 指定したIndexのMenuを実行する
*
* @return
*/
public MenuWindowManager enter(int index) {
return enter(nowMenu.items.get(index));
}
/**
* 指定したMenuItemを実行する
*
* @param item MenuItem is executed.
* @return
*/
public MenuWindowManager enter(MenuItem item) {
hide();
item.getConsumer().accept(null);
return this;
}
/**
* 指定したボタンを選択する
*
* @param index 0開始
* @param loop 上下間ループするならtrue
* @return
*/
public MenuWindowManager select(int index, boolean loop) {
if (menuPane.getButtons() != null) {
selected = Jfl2NumberUtils.loopValue(index, menuPane.getButtons().size(), loop);
menuPane.getButtons().get(selected).setSelected(true);
}
return this;
}
/**
* 指定したボタンを選択する
*
* @param menuItem MenuItem
* @return
*/
public MenuWindowManager select(MenuItem menuItem){
int index=0;
for( MenuItem item : nowMenu.items ){
if(Objects.equals(item, menuItem)){
select(index, false);
}
index++;
}
return this;
}
/**
* 最初のボタンを選択する
*/
public MenuWindowManager selectFirst() {
select(0, false);
return this;
}
/**
* ListViewにフォーカスを移す
*/
public void getFocus() {
Platform.runLater(() -> {
Optional<RadioButton> selected = menuPane.getButtons().stream().filter(RadioButton::isSelected).findFirst();
selected.ifPresent(RadioButton::requestFocus);
});
}
/**
* メニューを閉じる
*
* @return
*/
public MenuWindowManager hide() {
menuPane.setVisible(false);
menuPane.setDescriptionText("");
// menuPane.clearItems();
menuPane.getMenuBox().getChildren().removeAll();
menuPane.getButtons().stream().forEach(node->node.onMouseEnteredProperty().unbind());
menuPane.getButtons().clear();
menuPane.getRadioBox().getChildren().clear();
nowMenu = null;
return this;
}
/**
* その他キーの処理
*
* @param event
* @return
*/
public MenuWindowManager quickSelect(Event event) {
if (KeyEvent.class.isInstance(event)) {
KeyEvent keyEvent = (KeyEvent) event;
nowMenu.items.stream().filter(
(item) -> item.getKey().isHandle(keyEvent)
).findFirst().ifPresent(item -> enter(item));
}
return this;
}
/**
* ホバー時のイベント
* @param mouseEvent
* @return
*/
public MenuWindowManager hover(MouseEvent mouseEvent) {
int index = 0;
for( Node node : menuPane.getRadioBox().getChildren() ){
if( node.contains(mouseEvent.getSceneX(), mouseEvent.getSceneY()) ){
select(index, false);
}
if( node.contains(mouseEvent.getScreenX(), mouseEvent.getScreenY()) ){
select(index, false);
}
index++;
}
return this;
}
}<|fim▁end|>
|
/**
* 選択中のアイテムIndex
*/
private int selected = -1;
|
<|file_name|>scheduled.py<|end_file_name|><|fim▁begin|>import json<|fim▁hole|>
import boto3
import hashlib
import jsonpatch
from dart.context.locator import injectable
from dart.model.trigger import TriggerType, TriggerState
from dart.message.call import TriggerCall
from dart.trigger.base import TriggerProcessor, execute_trigger
from dart.model.exception import DartValidationException
_logger = logging.getLogger(__name__)
scheduled_trigger = TriggerType(
name='scheduled',
description='Triggering from a scheduler',
params_json_schema={
'type': 'object',
'properties': {
'cron_pattern': {
'type': 'string',
'description': 'The CRON pattern for the schedule. See <a target="_blank" href=' + \
'"http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/ScheduledEvents.html"' + \
'>here</a> for correct syntax.'
},
},
'additionalProperties': False,
'required': ['cron_pattern'],
}
)
@injectable
class ScheduledTriggerProcessor(TriggerProcessor):
def __init__(self, workflow_service, dart_config):
self._workflow_service = workflow_service
self._trigger_type = scheduled_trigger
self._dart_config = dart_config
def trigger_type(self):
return self._trigger_type
def initialize_trigger(self, trigger, trigger_service):
""" :type trigger: dart.model.trigger.Trigger
:type trigger_service: dart.service.trigger.TriggerService """
self._validate_aws_cron_expression(trigger.data.args['cron_pattern'])
# http://boto3.readthedocs.org/en/latest/reference/services/events.html#CloudWatchEvents.Client.put_rule
client = boto3.client('events')
rule_name = self._create_rule_if_needed(client, trigger)
user_id = 'anonymous'
if trigger.data.user_id:
user_id = trigger.data.user_id
if len(trigger.data.tags) > 0:
workflow_id = trigger.data.tags[0]
# When a trigger is created in Dart, we should only create a corresponding rule + target if the state is set to
# ACTIVE.
if trigger.data.state == TriggerState.ACTIVE:
target = {
'Id': trigger.id,
'Arn': self._dart_config['triggers']['scheduled']['cloudwatch_scheduled_events_sns_arn'],
'Input': json.dumps({
'call': TriggerCall.PROCESS_TRIGGER,
'trigger_type_name': self._trigger_type.name,
'message': {
'trigger_id': trigger.id,
'user_id': user_id, # This info is for tracking WF when viewed in cloudwatch rules
# logging workflow_id will be auto generated in '/workflow/<workflow>/do-manual-trigger', this one is for future needs.
'workflow_id': workflow_id
},
}),
}
self._add_target_to_rule(client, rule_name, target)
def update_trigger(self, unmodified_trigger, modified_trigger):
""" :type unmodified_trigger: dart.model.trigger.Trigger
:type modified_trigger: dart.model.trigger.Trigger """
client = boto3.client('events')
patch_list = jsonpatch.make_patch(unmodified_trigger.to_dict(), modified_trigger.to_dict())
target = {
'Id': modified_trigger.id,
'Arn': self._dart_config['triggers']['scheduled']['cloudwatch_scheduled_events_sns_arn'],
'Input': json.dumps({
'call': TriggerCall.PROCESS_TRIGGER,
'trigger_type_name': self._trigger_type.name,
'message': {
'trigger_id': modified_trigger.id,
'user_id': modified_trigger.data.user_id,
'workflow_id': modified_trigger.data.workflow_ids[0]
},
}),
}
for patch in patch_list:
if patch['path'] == '/data/state':
if modified_trigger.data.state == TriggerState.ACTIVE:
rule_name = self._create_rule_if_needed(client, modified_trigger)
self._add_target_to_rule(client, rule_name, target)
elif modified_trigger.data.state == TriggerState.INACTIVE:
self._remove_target_from_prefix(client, unmodified_trigger)
else:
raise Exception('unrecognized trigger state "%s"' % modified_trigger.data.state)
elif patch['path'] == '/data/args/cron_pattern' and patch['op'] == 'replace':
self._remove_target_from_prefix(client, unmodified_trigger)
rule_name = self._create_rule_if_needed(client, modified_trigger)
self._add_target_to_rule(client, rule_name, target)
return modified_trigger
def evaluate_message(self, message, trigger_service):
""" :type message: dict
:type trigger_service: dart.service.trigger.TriggerService """
trigger_id = message['trigger_id']
trigger = trigger_service.get_trigger(trigger_id, raise_when_missing=False)
if not trigger:
_logger.info('trigger (id=%s) not found' % trigger_id)
return []
if trigger.data.state != TriggerState.ACTIVE:
_logger.info('expected trigger (id=%s) to be in ACTIVE state' % trigger.id)
return []
execute_trigger(trigger, self._trigger_type, self._workflow_service, _logger)
return [trigger_id]
def teardown_trigger(self, trigger, trigger_service):
""" :type trigger: dart.model.trigger.Trigger
:type trigger_service: dart.service.trigger.TriggerService """
client = boto3.client('events')
self._remove_target_from_prefix(client, trigger)
def _create_rule_if_needed(self, client, trigger):
"""
:param client: boto3.session.Session.client
:param trigger: dart.model.trigger.Trigger
:return: str
"""
rule_name = self._next_rule_name(client, trigger)
try:
client.describe_rule(Name=rule_name)
except Exception as e:
if 'ResourceNotFoundException' in e.message:
response = client.put_rule(
Name=rule_name,
ScheduleExpression='cron(%s)' % trigger.data.args['cron_pattern'],
State='ENABLED',
Description='scheduled trigger for dart'
)
_logger.info('Created cloudwatch rule (arn=%s) for trigger (id=%s, cron=%s)' % (response['RuleArn'], trigger.id, trigger.data.args['cron_pattern']))
else:
_logger.info('Failed to create cloudwatch rule for trigger (id=%s, cron=%s)' % (trigger.id, trigger.data.args['cron_pattern']))
raise e
return rule_name
def _add_target_to_rule(self, client, rule_name, target):
"""
:param client: boto3.session.Session.client
:param rule_name: str
:param target: str
"""
response = client.put_targets(
Rule=rule_name,
Targets=[target]
)
self._check_response(response)
_logger.info('Created target for trigger (id=%s) on cloudwatch rule (name=%s)' % (target['Id'], rule_name))
def _next_rule_name(self, client, trigger):
"""
This method determines what the next rule name should be for new triggers e.g. iff there is a certain cron
expression that resolves to 'dart-ABCDEF' after hashing and it already has 5 targets, then we create a new
cloudwatch rule with the name 'dart-ABCDEF-1'.
:param client: boto3.session.Session.client
:param trigger: dart.model.trigger.Trigger
:return: str
"""
rule_prefix = self._get_cloudwatch_events_rule_prefix(trigger.data.args['cron_pattern'])
rules = client.list_rules(NamePrefix=rule_prefix)['Rules']
if not rules:
return rule_prefix
for _rule in rules:
response = client.list_targets_by_rule(Rule=_rule['Name'], Limit=5)
if len(response['Targets']) < 5:
return _rule['Name']
return '%s-%d'% (rule_prefix, len(rules) + 1)
def _remove_target_from_prefix(self, client, trigger):
"""
This method goes through all rules with the determined rule prefix to remove the target from the appropriate
rule. The reason we have to iterate through all rules that match the prefix and can't do a direct removal by
rule name is because we don't store that anywhere on Dart side on creation.
:param client: boto3.session.Session.client
:param trigger: dart.model.trigger.Trigger
"""
rule_prefix = self._get_cloudwatch_events_rule_prefix(trigger.data.args['cron_pattern'])
rules = client.list_rules(NamePrefix=rule_prefix)['Rules']
for _rule in rules:
response = client.list_targets_by_rule(Rule=_rule['Name'], Limit=5)
for _target in response['Targets']:
if _target['Id'] == trigger.id:
r = client.remove_targets(Rule=_rule['Name'], Ids=[_target['Id']])
self._check_response(r)
_logger.info('Deleted target for trigger (id=%s) from cloudwatch rule (name=%s)' % (_target['Id'], _rule['Name']))
if len(response['Targets']) == 1:
client.delete_rule(Name=_rule['Name'])
_logger.info('Deleted cloudwatch rule (name=%s)' % _rule['Name'])
return
@staticmethod
def _get_cloudwatch_events_rule_name(trigger):
return 'dart-trigger-%s' % trigger.id
@staticmethod
def _get_cloudwatch_events_rule_prefix(cron_expression, hash_size=20):
"""
This method returns the new naming system for dart triggers. It hashes the cron pattern with sha1 to create new
cloudwatch rule name. We take only the first 20 chars because the max length allowed for cloudwatch rule name is
64.
:param cron_expression: dart.model.trigger.Trigger
:return: str
"""
return 'dart-%s' % hashlib.sha1(cron_expression).hexdigest()[:hash_size]
@staticmethod
def _check_response(response):
if response['FailedEntryCount'] > 0:
error_msg = ''
for failure in response['FailedEntries']:
msg = 'Failed on -- Target Id %s, ErrorCode %s, ErrorMessage: %s\n'
error_msg += msg % (failure['TargetId'], failure['ErrorCode'], failure['ErrorMessage'])
raise Exception(error_msg)
@staticmethod
def _validate_aws_cron_expression(cron_expression):
# See the Note on: http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/ScheduledEvents.html
cron_pattern_split = cron_expression.split()
if '?' not in [cron_pattern_split[2], cron_pattern_split[4]]:
raise DartValidationException('CRON Validation Error: Support for specifying both a day-of-week and a '
'day-of-month value is not complete (you must currently use the "?"'
'character in one of these fields).')<|fim▁end|>
|
import logging
|
<|file_name|>zone_thousand_needles.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2006-2009 ScriptDev2 <https://scriptdev2.svn.sourceforge.net/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Thousand Needles
SD%Complete: 100
SDComment: Support for Quest: 4770, 4904, 4966, 5151.
SDCategory: Thousand Needles
EndScriptData */
/* ContentData
npc_kanati
npc_lakota_windsong
npc_swiftmountain
npc_enraged_panther
go_panther_cage
EndContentData */
#include "ScriptMgr.h"
#include "GameObject.h"
#include "GameObjectAI.h"
#include "Player.h"
#include "ScriptedEscortAI.h"
#include "ScriptedGossip.h"
/*#####
# npc_kanati
######*/
enum Kanati
{
SAY_KAN_START = 0,
QUEST_PROTECT_KANATI = 4966,
NPC_GALAK_ASS = 10720
};
Position const GalakLoc = {-4867.387695f, -1357.353760f, -48.226f, 0.0f};
class npc_kanati : public CreatureScript
{
public:
npc_kanati() : CreatureScript("npc_kanati") { }
struct npc_kanatiAI : public EscortAI
{
npc_kanatiAI(Creature* creature) : EscortAI(creature) { }
void Reset() override { }
void WaypointReached(uint32 waypointId, uint32 /*pathId*/) override
{
switch (waypointId)
{
case 0:
Talk(SAY_KAN_START);
DoSpawnGalak();
break;
case 1:
if (Player* player = GetPlayerForEscort())
player->GroupEventHappens(QUEST_PROTECT_KANATI, me);
break;
}
}
void DoSpawnGalak()
{
for (int i = 0; i < 3; ++i)
me->SummonCreature(NPC_GALAK_ASS, GalakLoc, TEMPSUMMON_TIMED_DESPAWN_OUT_OF_COMBAT, 25000);<|fim▁hole|> summoned->AI()->AttackStart(me);
}
void QuestAccept(Player* player, Quest const* quest) override
{
if (quest->GetQuestId() == QUEST_PROTECT_KANATI)
Start(false, false, player->GetGUID(), quest, true);
}
};
CreatureAI* GetAI(Creature* creature) const override
{
return new npc_kanatiAI(creature);
}
};
/*######
# npc_lakota_windsong
######*/
enum Lakota
{
SAY_LAKO_START = 0,
SAY_LAKO_LOOK_OUT = 1,
SAY_LAKO_HERE_COME = 2,
SAY_LAKO_MORE = 3,
SAY_LAKO_END = 4,
QUEST_FREE_AT_LAST = 4904,
NPC_GRIM_BANDIT = 10758,
ID_AMBUSH_1 = 0,
ID_AMBUSH_2 = 2,
ID_AMBUSH_3 = 4
};
Position const BanditLoc[6] =
{
{-4905.479492f, -2062.732666f, 84.352f, 0.0f},
{-4915.201172f, -2073.528320f, 84.733f, 0.0f},
{-4878.883301f, -1986.947876f, 91.966f, 0.0f},
{-4877.503906f, -1966.113403f, 91.859f, 0.0f},
{-4767.985352f, -1873.169189f, 90.192f, 0.0f},
{-4788.861328f, -1888.007813f, 89.888f, 0.0f}
};
class npc_lakota_windsong : public CreatureScript
{
public:
npc_lakota_windsong() : CreatureScript("npc_lakota_windsong") { }
struct npc_lakota_windsongAI : public EscortAI
{
npc_lakota_windsongAI(Creature* creature) : EscortAI(creature) { }
void Reset() override { }
void WaypointReached(uint32 waypointId, uint32 /*pathId*/) override
{
switch (waypointId)
{
case 8:
Talk(SAY_LAKO_LOOK_OUT);
DoSpawnBandits(ID_AMBUSH_1);
break;
case 14:
Talk(SAY_LAKO_HERE_COME);
DoSpawnBandits(ID_AMBUSH_2);
break;
case 21:
Talk(SAY_LAKO_MORE);
DoSpawnBandits(ID_AMBUSH_3);
break;
case 45:
Talk(SAY_LAKO_END);
if (Player* player = GetPlayerForEscort())
player->GroupEventHappens(QUEST_FREE_AT_LAST, me);
break;
}
}
void DoSpawnBandits(int AmbushId)
{
for (int i = 0; i < 2; ++i)
me->SummonCreature(NPC_GRIM_BANDIT, BanditLoc[i+AmbushId], TEMPSUMMON_TIMED_OR_DEAD_DESPAWN, 60000);
}
void QuestAccept(Player* player, Quest const* quest) override
{
if (quest->GetQuestId() == QUEST_FREE_AT_LAST)
{
Talk(SAY_LAKO_START, player);
me->SetFaction(FACTION_ESCORTEE_H_NEUTRAL_ACTIVE);
Start(false, false, player->GetGUID(), quest);
}
}
};
CreatureAI* GetAI(Creature* creature) const override
{
return new npc_lakota_windsongAI(creature);
}
};
/*######
# npc_paoka_swiftmountain
######*/
enum Packa
{
SAY_START = 0,
SAY_WYVERN = 1,
SAY_COMPLETE = 2,
QUEST_HOMEWARD = 4770,
NPC_WYVERN = 4107
};
Position const WyvernLoc[3] =
{
{-4990.606f, -906.057f, -5.343f, 0.0f},
{-4970.241f, -927.378f, -4.951f, 0.0f},
{-4985.364f, -952.528f, -5.199f, 0.0f}
};
class npc_paoka_swiftmountain : public CreatureScript
{
public:
npc_paoka_swiftmountain() : CreatureScript("npc_paoka_swiftmountain") { }
struct npc_paoka_swiftmountainAI : public EscortAI
{
npc_paoka_swiftmountainAI(Creature* creature) : EscortAI(creature) { }
void Reset() override { }
void WaypointReached(uint32 waypointId, uint32 /*pathId*/) override
{
switch (waypointId)
{
case 15:
Talk(SAY_WYVERN);
DoSpawnWyvern();
break;
case 26:
Talk(SAY_COMPLETE);
break;
case 27:
if (Player* player = GetPlayerForEscort())
player->GroupEventHappens(QUEST_HOMEWARD, me);
break;
}
}
void DoSpawnWyvern()
{
for (int i = 0; i < 3; ++i)
me->SummonCreature(NPC_WYVERN, WyvernLoc[i], TEMPSUMMON_TIMED_OR_DEAD_DESPAWN, 60000);
}
void QuestAccept(Player* player, Quest const* quest) override
{
if (quest->GetQuestId() == QUEST_HOMEWARD)
{
Talk(SAY_START, player);
me->SetFaction(FACTION_ESCORTEE_H_NEUTRAL_ACTIVE);
Start(false, false, player->GetGUID(), quest);
}
}
};
CreatureAI* GetAI(Creature* creature) const override
{
return new npc_paoka_swiftmountainAI(creature);
}
};
enum PantherCage
{
QUEST_HYPERCAPACITOR_GIZMO = 5151,
ENRAGED_PANTHER = 10992
};
class go_panther_cage : public GameObjectScript
{
public:
go_panther_cage() : GameObjectScript("go_panther_cage") { }
struct go_panther_cageAI : public GameObjectAI
{
go_panther_cageAI(GameObject* go) : GameObjectAI(go) { }
bool GossipHello(Player* player) override
{
me->UseDoorOrButton();
if (player->GetQuestStatus(QUEST_HYPERCAPACITOR_GIZMO) == QUEST_STATUS_INCOMPLETE)
{
if (Creature* panther = me->FindNearestCreature(ENRAGED_PANTHER, 5, true))
{
panther->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NON_ATTACKABLE);
panther->SetReactState(REACT_AGGRESSIVE);
panther->AI()->AttackStart(player);
}
}
return true;
}
};
GameObjectAI* GetAI(GameObject* go) const override
{
return new go_panther_cageAI(go);
}
};
class npc_enraged_panther : public CreatureScript
{
public:
npc_enraged_panther() : CreatureScript("npc_enraged_panther") { }
CreatureAI* GetAI(Creature* creature) const override
{
return new npc_enraged_pantherAI(creature);
}
struct npc_enraged_pantherAI : public ScriptedAI
{
npc_enraged_pantherAI(Creature* creature) : ScriptedAI(creature) { }
void Reset() override
{
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NON_ATTACKABLE);
me->SetReactState(REACT_PASSIVE);
}
void UpdateAI(uint32 /*diff*/) override
{
if (!UpdateVictim())
return;
DoMeleeAttackIfReady();
}
};
};
void AddSC_thousand_needles()
{
new npc_kanati();
new npc_lakota_windsong();
new npc_paoka_swiftmountain();
new npc_enraged_panther();
new go_panther_cage();
}<|fim▁end|>
|
}
void JustSummoned(Creature* summoned) override
{
|
<|file_name|>test_affine_channel_op.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit testing for affine_channel_op
"""
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
import paddle.fluid.core as core
import paddle.fluid as fluid
def affine_channel(x, scale, bias, layout):
C = x.shape[1] if layout == 'NCHW' else x.shape[-1]
if len(x.shape) == 4:
new_shape = (1, C, 1, 1) if layout == 'NCHW' else (1, 1, 1, C)
else:
new_shape = (1, C)
scale = scale.reshape(new_shape)
bias = bias.reshape(new_shape)
return x * scale + bias
class TestAffineChannelOp(OpTest):
def setUp(self):
self.op_type = "affine_channel"
self.init_test_case()
x = np.random.random(self.shape).astype("float64")
scale = np.random.random(self.C).astype("float64")
bias = np.random.random(self.C).astype("float64")
y = affine_channel(x, scale, bias, self.layout)
self.inputs = {'X': x, 'Scale': scale, 'Bias': bias}
self.attrs = {'data_layout': self.layout}
self.outputs = {'Out': y}
def test_check_output(self):
self.check_output()
def test_check_grad(self):
self.check_grad(['X', 'Scale', 'Bias'], 'Out')
def test_check_grad_stopgrad_dx(self):
self.check_grad(['Scale', 'Bias'], 'Out', no_grad_set=set('X'))
def test_check_grad_stopgrad_dscale_dbias(self):
self.check_grad(['X'], 'Out', no_grad_set=set(['Scale', 'Bias']))
def init_test_case(self):
self.shape = [2, 100, 3, 3]
self.C = 100
self.layout = 'NCHW'
class TestAffineChannelOpError(unittest.TestCase):
def test_errors(self):
with fluid.program_guard(fluid.Program()):
def test_x_type():
input_data = np.random.random(2, 1, 2, 2).astype("float32")
fluid.layers.affine_channel(input_data)
self.assertRaises(TypeError, test_x_type)
def test_x_dtype():
x2 = fluid.layers.data(
name='x2', shape=[None, 1, 2, 2], dtype='int32')
fluid.layers.affine_channel(x2)
self.assertRaises(TypeError, test_x_dtype)
def test_scale_type():
x3 = fluid.layers.data(
name='x3', shape=[None, 1, 2, 2], dtype='float32')
fluid.layers.affine_channel(x3, scale=1)
self.assertRaises(TypeError, test_scale_type)
def test_bias_type():
x4 = fluid.layers.data(
name='x4', shape=[None, 1, 2, 2], dtype='float32')
fluid.layers.affine_channel(x4, bias=1)
self.assertRaises(TypeError, test_bias_type)
class TestAffineChannelNHWC(TestAffineChannelOp):
def init_test_case(self):
self.shape = [2, 3, 3, 100]
self.C = 100
self.layout = 'NHWC'
def test_check_grad_stopgrad_dx(self):
return
def test_check_grad_stopgrad_dscale_dbias(self):
return
class TestAffineChannel2D(TestAffineChannelOp):
def init_test_case(self):
self.shape = [2, 100]
self.C = 100
self.layout = 'NCHW'
def test_check_grad_stopgrad_dx(self):
return
def test_check_grad_stopgrad_dscale_dbias(self):
return
# TODO(qingqing): disable unit testing for large shape
#class TestAffineChannelNCHWLargeShape(TestAffineChannelOp):
# def init_test_case(self):
# self.shape = [4, 128, 112, 112]
# self.C = 128
# self.layout = 'NCHW'
#
# # since the gradient check is very slow in large shape, so skip check_grad
# def test_check_grad(self):
# pass
#
# def test_check_grad_stopgrad_dx(self):
# pass
#
# def test_check_grad_stopgrad_dscale_dbias(self):
# pass
#class TestAffineChannelNHWCLargeShape(TestAffineChannelNCHWLargeShape):
# def init_test_case(self):
# self.shape = [64, 32, 32, 128]
# self.C = 128
# self.layout = 'NHWC'
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>dfvWindow.tsx<|end_file_name|><|fim▁begin|>import * as React from './dfvReact'
import { dfvFront } from "./dfvFront";
export interface PopWindowPara {
/**
* 是否不覆盖全屏
*/
notCover?: boolean;
/**
* 是否显示红色背景的错误提示框
*/
isErr?: boolean;
/**
* 自动关闭时间,为0则不自动关闭
*/
closeTime?: number;
}
export class dfvWindow {
//主窗口
private dialog: HTMLDivElement | undefined;
//cover层
private divCover: HTMLDivElement | undefined;
//内容层
private divContent: HTMLDivElement | undefined;
static coverZ = 999;
constructor() {
dfvWindow.coverZ++;
}
/**
* 添加一个黑色半透明的遮盖层
* @returns {dfvWindow}
*/
addCover() {
if (!this.divCover) {
this.divCover = document.createElement("div");
this.divCover.className = "cover_div cover_black"
this.divCover.style.zIndex = dfvWindow.coverZ + "";
document.body.appendChild(this.divCover);
}
return this;
}
/**
* 处理PopWindowPara参数
* @param para
* @returns {dfvWindow}
*/
procParas(para: PopWindowPara) {
if (para && para.closeTime! > 0) {
this.autoClose(para.closeTime)
}
this.isError = para.isErr!!;
if (!para.notCover)
this.addCover();
return this;
}
/**
* 是否显示红色错误提示样式
* @type {boolean}
*/
isError = false;
/**
* 显示窗口
* @param title 标题
* @param content 内容
* @returns {dfvWindow}
*/
public show(title: string | HTMLElement, content?: string | HTMLElement | null) {
if (this.dialog)
return this;
let c1 = this.isError ? "ba_tra_red" : "ba_tra_blue"
let c2 = this.isError ? "icon_err" : "icon_close"
this.dialog =
<div className={"pop_border anim_in " + c1}>
{
this.divContent =
<div className="pop_cont">
<div className="vmid pad5">
{title}
</div>
{content ? <div style="margin-top: 10px">{content}</div> : null}
</div>
}
<div className="absol_close">
<tt onclick={() => this.onButtonCancelClick()}
className={"rotate_hover " + c2} />
</div>
</div>
this.dialog!.style.zIndex = (dfvWindow.coverZ + 1) + "";
document.body.appendChild(this.dialog!);
this.reSize();
this.resizeTime = setInterval(this.reSize, 200);
return this;
}
/**
* 显示带一个【确定】按钮的窗口
* @param title
* @param content
* @param onOk 按钮回调
* @returns {dfvWindow}
*/
public showWithOk(title: string | HTMLElement,
content: string | HTMLElement | null,
onOk: (e: HTMLElement) => void) {
this.show(title, this.okWindow(content, onOk))
return this;
}
/**
* 关闭按钮点击事件回调
*/
onButtonCancelClick = () => {
this.close();
}
/**
* 将窗口设为自动关闭
* @param time 自动关闭时间:毫秒
* @returns {dfvWindow}
*/
autoClose(time: number = 3000) {
setTimeout(() => {
this.close()
}, time);
return this;
}
/**
* 关闭窗口
*/
close = () => {
try {
clearInterval(this.resizeTime);
//窗口已关闭
if (!this.divContent || !this.dialog) {
return;
}
if (this.divCover) {
try {
document.body.removeChild(this.divCover);
} catch (e) {
}
}
this.divCover = null as any;
this.divContent = null as any;
let dia = this.dialog;
this.dialog = undefined;
if (window.history.pushState != null) {
dia.className += " anim_out";
setTimeout(() => {
//窗口已关闭
try {
document.body.removeChild(dia);
} catch (e) {
}
}, 300)
} else {
try {
document.body.removeChild(dia);
} catch (e) {<|fim▁hole|> }
} catch (e) {
dfvFront.onCatchError(e)
}
}
/**
* 修正窗口大小与位置
*/
private reSize = () => {
if (!this.dialog || !this.divContent)
return;
if (this.dialog.offsetWidth < document.documentElement!.clientWidth) {
let w = document.documentElement!.clientWidth - this.dialog.offsetWidth;
this.dialog.style.marginLeft = ((w >> 1) & (~3)) + "px";
}
this.divContent.style.maxWidth = document.documentElement!.clientWidth - 40 + "px";
if (this.dialog.offsetHeight < document.documentElement!.clientHeight) {
let h = (Math.floor((document.documentElement!.clientHeight - this.dialog.offsetHeight) / 3));
h = h & (~3);
this.dialog.style.marginTop = h + "px";
}
this.divContent.style.maxHeight = document.documentElement!.clientHeight - 45 + "px";
}
private resizeTime: any;
/**
* 确定按钮的文字
* @type {string}
*/
buttonOkText = "确定";
private okWindow(content: string | HTMLElement | null, onOk: (e: HTMLElement) => void) {
return (
<div>
<div>
{content}
</div>
<div class="h_m">
<button class="button_blue pad6-12 mar5t font_0 bold" onclick={e => onOk(e.currentTarget)}>
{this.buttonOkText}
</button>
</div>
</div>
)
}
}<|fim▁end|>
|
}
|
<|file_name|>dapp_id.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
<|fim▁hole|>// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Dapp Id type
/// Dapplication Internal Id
#[derive(Debug, Default, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct DappId(pub String);
impl Into<String> for DappId {
fn into(self) -> String {
self.0
}
}
impl From<String> for DappId {
fn from(s: String) -> Self {
DappId(s)
}
}
#[cfg(test)]
mod tests {
use serde_json;
use super::DappId;
#[test]
fn should_serialize_dapp_id() {
// given
let id = DappId("testapp".into());
// when
let res = serde_json::to_string(&id).unwrap();
// then
assert_eq!(res, r#""testapp""#);
}
#[test]
fn should_deserialize_dapp_id() {
// given
let id = r#""testapp""#;
// when
let res: DappId = serde_json::from_str(id).unwrap();
// then
assert_eq!(res, DappId("testapp".into()));
}
}<|fim▁end|>
|
// You should have received a copy of the GNU General Public License
|
<|file_name|>1-2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import random
import sys
class DayLife:
"""Life in a day."""
def __init__(self, date, life):
"""Set birth datetime and life."""
self.birthdate = date
self.life = life
finalyear = self.birthdate.year + self.life
finaldate = datetime.datetime(finalyear, self.birthdate.month,
self.birthdate.day)
self.finaldate = finaldate - datetime.timedelta(days=1)
def now(self):
"""Calculate current time."""
curdate = datetime.datetime.now()
maxdays = (self.finaldate - self.birthdate).days
curdays = (curdate - self.birthdate).days
curtime = datetime.timedelta(days=1) / maxdays
curtime = curtime * curdays
return datetime.time(
(curtime.seconds / 60) / 60,
(curtime.seconds / 60) % 60,
curtime.seconds % 60)
if __name__ == '__main__':
# options
startyear = 1900
endyear = 2000
life = 200
print startyear, "<= a <=", endyear
print "n =", life
daycount = (datetime.datetime(endyear, 12, 31) -
datetime.datetime(startyear, 1, 1)).days
birthdate = datetime.datetime(startyear, 1, 1) + \
datetime.timedelta(days=random.randint(0, daycount))
args = sys.argv
if len(args) == 4:
year = int(args[1])
month = int(args[2])
date = int(args[3])
birthdate = datetime.datetime(year, month, date)
print "birthdate:", birthdate.date()<|fim▁hole|> mylife = DayLife(birthdate, life)
print "finaldate:", mylife.finaldate.date()
print "today:", mylife.now()<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Application level configuration and logging
"""
import os
import global_settings
import sys
from logging.config import dictConfig
from importlib import import_module
import logging
log = logging.getLogger(__name__)
class Settings(object):
"""
Configuration class for percept
"""
settings_list = None
def _initialize(self, settings_module):
"""
Initialize the settings from a given settings_module
settings_module - path to settings module
"""
#Get the global settings values and assign them as self attributes
self.settings_list = []
for setting in dir(global_settings):
#Only get upper case settings
if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
self.settings_list.append(setting)
#If a settings module was passed in, import it, and grab settings from it
#Overwrite global settings with theses
if settings_module is not None:
self.SETTINGS_MODULE = settings_module
#Try to import the settings module
try:
mod = import_module(self.SETTINGS_MODULE)
except ImportError:
error_message = "Could not import settings at {0}".format(self.SETTINGS_MODULE)
log.exception(error_message)
raise ImportError(error_message)
#Grab uppercased settings as set them as self attrs
for setting in dir(mod):
if setting == setting.upper():
if setting == "INSTALLED_APPS":
self.INSTALLED_APPS += getattr(mod, setting)
else:
setattr(self, setting, getattr(mod, setting))
self.settings_list.append(setting)
#If PATH_SETTINGS is in the settings file, extend the system path to include it
if hasattr(self, "PATH_SETTINGS"):
for path in self.PATH_SETTINGS:
sys.path.extend(getattr(self,path))
self.settings_list = list(set(self.settings_list))
def _setup(self):
"""
Perform initial setup of the settings class, such as getting the settings module and setting the settings<|fim▁hole|> #Get the settings module from the environment variables
try:
settings_module = os.environ[global_settings.MODULE_VARIABLE]
except KeyError:
error_message = "Settings not properly configured. Cannot find the environment variable {0}".format(global_settings.MODULE_VARIABLE)
log.exception(error_message)
self._initialize(settings_module)
self._configure_logging()
def __getattr__(self, name):
"""
If a class is trying to get settings (attributes on this class)
"""
#If settings have not been setup, do so
if not self.configured:
self._setup()
#Return setting if it exists as a self attribute, None if it doesn't
if name in self.settings_list:
return getattr(self, name)
else:
return None
def _configure_logging(self):
"""
Setting up logging from logging config in settings
"""
if not self.LOGGING_CONFIG:
#Fallback to default logging in global settings if needed
dictConfig(self.DEFAULT_LOGGING)
else:
dictConfig(self.LOGGING_CONFIG)
@property
def configured(self):
return self.settings_list is not None
#Import this if trying to get settings elsewhere
settings = Settings()<|fim▁end|>
|
"""
settings_module = None
|
<|file_name|>formula_debugger.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2009 - 2015 by Yurii Chernyi <[email protected]>
Part of the Battle for Wesnoth Project http://www.wesnoth.org/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY.
See the COPYING file for more details.
*/
/**
* @file
* Formula debugger - implementation
* */
#include "formula_debugger.hpp"
#include "formula.hpp"
#include "formula_function.hpp"
#include "game_display.hpp"
#include "log.hpp"
#include "resources.hpp"
#include "gui/dialogs/formula_debugger.hpp"
#include "gui/widgets/settings.hpp"
#include <boost/lexical_cast.hpp>
static lg::log_domain log_formula_debugger("ai/debug/formula");
#define DBG_FDB LOG_STREAM(debug, log_formula_debugger)
#define LOG_FDB LOG_STREAM(info, log_formula_debugger)
#define WRN_FDB LOG_STREAM(warn, log_formula_debugger)
#define ERR_FDB LOG_STREAM(err, log_formula_debugger)
namespace game_logic {
debug_info::debug_info(int arg_number, int counter, int level, const std::string &name, const std::string &str, const variant &value, bool evaluated)
: arg_number_(arg_number), counter_(counter), level_(level), name_(name), str_(str), value_(value), evaluated_(evaluated)
{
}
debug_info::~debug_info()
{
}
int debug_info::level() const
{
return level_;
}
const std::string& debug_info::name() const
{
return name_;
}
int debug_info::counter() const
{
return counter_;
}
const variant& debug_info::value() const
{
return value_;
}
void debug_info::set_value(const variant &value)
{
value_ = value;
}
bool debug_info::evaluated() const
{
return evaluated_;
}
void debug_info::set_evaluated(bool evaluated)
{
evaluated_ = evaluated;
}
const std::string& debug_info::str() const
{
return str_;
}
formula_debugger::formula_debugger()
: call_stack_(), counter_(0), current_breakpoint_(), breakpoints_(), execution_trace_(),arg_number_extra_debug_info(-1), f_name_extra_debug_info("")
{
add_breakpoint_step_into();
add_breakpoint_continue_to_end();
}
formula_debugger::~formula_debugger()
{
}
static void msg(const char *act, debug_info &i, const char *to="", const char *result = "")
{
DBG_FDB << "#" << i.counter() << act << std::endl <<" \""<< i.name().c_str() << "\"='" << i.str().c_str() << "' " << to << result << std::endl;
}
void formula_debugger::add_debug_info(int arg_number, const char *f_name)
{
arg_number_extra_debug_info = arg_number;
f_name_extra_debug_info = f_name;
}
const std::deque<debug_info>& formula_debugger::get_call_stack() const
{
return call_stack_;
}
const breakpoint_ptr formula_debugger::get_current_breakpoint() const
{
return current_breakpoint_;
}
const std::deque<debug_info>& formula_debugger::get_execution_trace() const
{
return execution_trace_;
}
void formula_debugger::check_breakpoints()
{
for( std::deque< breakpoint_ptr >::iterator b = breakpoints_.begin(); b!= breakpoints_.end(); ++b) {
if ((*b)->is_break_now()){
current_breakpoint_ = (*b);
show_gui();
current_breakpoint_ = breakpoint_ptr();
if ((*b)->is_one_time_only()) {
breakpoints_.erase(b);
}
break;
}
}
}
void formula_debugger::show_gui()
{
if (resources::screen == NULL) {
WRN_FDB << "do not showing debug window due to NULL gui" << std::endl;
return;
}
if (gui2::new_widgets) {
gui2::tformula_debugger debug_dialog(*this);
debug_dialog.show(resources::screen->video());
} else {
WRN_FDB << "do not showing debug window due to disabled --new-widgets"<< std::endl;
}
}
void formula_debugger::call_stack_push(const std::string &str)
{
call_stack_.push_back(debug_info(arg_number_extra_debug_info,counter_++,call_stack_.size(),f_name_extra_debug_info,str,variant(),false));
arg_number_extra_debug_info = -1;
f_name_extra_debug_info = "";
execution_trace_.push_back(call_stack_.back());
}
void formula_debugger::call_stack_pop()
{
execution_trace_.push_back(call_stack_.back());
call_stack_.pop_back();
}
void formula_debugger::call_stack_set_evaluated(bool evaluated)
{
call_stack_.back().set_evaluated(evaluated);
}
void formula_debugger::call_stack_set_value(const variant &v)
{
call_stack_.back().set_value(v);
}
variant formula_debugger::evaluate_arg_callback(const formula_expression &expression, const formula_callable &variables)
{
call_stack_push(expression.str());
check_breakpoints();
msg(" evaluating expression: ",call_stack_.back());
variant v = expression.execute(variables,this);
call_stack_set_value(v);
call_stack_set_evaluated(true);
msg(" evaluated expression: ",call_stack_.back()," to ",v.to_debug_string(NULL,true).c_str());
check_breakpoints();
call_stack_pop();
return v;
}
variant formula_debugger::evaluate_formula_callback(const formula &f, const formula_callable &variables)
{
call_stack_push(f.str());
check_breakpoints();
msg(" evaluating formula: ",call_stack_.back());
variant v = f.execute(variables,this);
call_stack_set_value(v);
call_stack_set_evaluated(true);
msg(" evaluated formula: ",call_stack_.back()," to ",v.to_debug_string(NULL,true).c_str());
check_breakpoints();
call_stack_pop();
return v;
}
variant formula_debugger::evaluate_formula_callback(const formula &f)
{
call_stack_push(f.str());
check_breakpoints();
msg(" evaluating formula without variables: ",call_stack_.back());
variant v = f.execute(this);
call_stack_set_value(v);
call_stack_set_evaluated(true);
msg(" evaluated formula without variables: ",call_stack_.back()," to ",v.to_debug_string(NULL,true).c_str());
check_breakpoints();
call_stack_pop();
return v;
}
base_breakpoint::base_breakpoint(formula_debugger &fdb, const std::string &name, bool one_time_only)
: fdb_(fdb), name_(name), one_time_only_(one_time_only)
{
}
base_breakpoint::~base_breakpoint()
{
}
bool base_breakpoint::is_one_time_only() const
{
return one_time_only_;
}
const std::string& base_breakpoint::name() const
{
return name_;
}
class end_breakpoint : public base_breakpoint {
public:
end_breakpoint(formula_debugger &fdb)
: base_breakpoint(fdb,"End", true)
{
}
virtual ~end_breakpoint()
{
}
virtual bool is_break_now() const
{
const std::deque<debug_info> &call_stack = fdb_.get_call_stack();
if ((call_stack.size() == 1) && (call_stack[0].evaluated()) ) {
return true;
}
return false;
}
};
class step_in_breakpoint : public base_breakpoint {
public:
step_in_breakpoint(formula_debugger &fdb)
: base_breakpoint(fdb,"Step",true)
{
}
virtual ~step_in_breakpoint()
{
}
virtual bool is_break_now() const
{
const std::deque<debug_info> &call_stack = fdb_.get_call_stack();
if (call_stack.empty() || call_stack.back().evaluated()) {
return false;
}
return true;
}
};
class step_out_breakpoint : public base_breakpoint {
public:
step_out_breakpoint(formula_debugger &fdb)
: base_breakpoint(fdb,"Step out",true), level_(fdb.get_call_stack().size()-1)
{
}
virtual ~step_out_breakpoint()
{
}
virtual bool is_break_now() const
{
const std::deque<debug_info> &call_stack = fdb_.get_call_stack();
if (call_stack.empty() || call_stack.back().evaluated()) {
return false;
}
if (call_stack.size() == level_) {
return true;
}
return false;
}
private:<|fim▁hole|> size_t level_;
};
class next_breakpoint : public base_breakpoint {
public:
next_breakpoint(formula_debugger &fdb)
: base_breakpoint(fdb,"Next",true), level_(fdb.get_call_stack().size())
{
}
virtual ~next_breakpoint()
{
}
virtual bool is_break_now() const
{
const std::deque<debug_info> &call_stack = fdb_.get_call_stack();
if (call_stack.empty() || call_stack.back().evaluated()) {
return false;
}
if (call_stack.size() == level_) {
return true;
}
return false;
}
private:
size_t level_;
};
void formula_debugger::add_breakpoint_continue_to_end()
{
breakpoints_.push_back(breakpoint_ptr(new end_breakpoint(*this)));
LOG_FDB << "added 'end' breakpoint"<< std::endl;
}
void formula_debugger::add_breakpoint_step_into()
{
breakpoints_.push_back(breakpoint_ptr(new step_in_breakpoint(*this)));
LOG_FDB << "added 'step into' breakpoint"<< std::endl;
}
void formula_debugger::add_breakpoint_step_out()
{
breakpoints_.push_back(breakpoint_ptr(new step_out_breakpoint(*this)));
LOG_FDB << "added 'step out' breakpoint"<< std::endl;
}
void formula_debugger::add_breakpoint_next()
{
breakpoints_.push_back(breakpoint_ptr(new next_breakpoint(*this)));
LOG_FDB << "added 'next' breakpoint"<< std::endl;
}
} // end of namespace game_logic<|fim▁end|>
| |
<|file_name|>sqlpoolrecommendedsensitivitylabels.go<|end_file_name|><|fim▁begin|>package synapse
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// SQLPoolRecommendedSensitivityLabelsClient is the azure Synapse Analytics Management Client
type SQLPoolRecommendedSensitivityLabelsClient struct {
BaseClient
}
// NewSQLPoolRecommendedSensitivityLabelsClient creates an instance of the SQLPoolRecommendedSensitivityLabelsClient
// client.
func NewSQLPoolRecommendedSensitivityLabelsClient(subscriptionID string) SQLPoolRecommendedSensitivityLabelsClient {
return NewSQLPoolRecommendedSensitivityLabelsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewSQLPoolRecommendedSensitivityLabelsClientWithBaseURI creates an instance of the
// SQLPoolRecommendedSensitivityLabelsClient client using a custom endpoint. Use this when interacting with an Azure
// cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewSQLPoolRecommendedSensitivityLabelsClientWithBaseURI(baseURI string, subscriptionID string) SQLPoolRecommendedSensitivityLabelsClient {
return SQLPoolRecommendedSensitivityLabelsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// Update update recommended sensitivity labels states of a given SQL Pool using an operations batch.
// Parameters:
// resourceGroupName - the name of the resource group. The name is case insensitive.
// workspaceName - the name of the workspace
// SQLPoolName - SQL pool name
func (client SQLPoolRecommendedSensitivityLabelsClient) Update(ctx context.Context, resourceGroupName string, workspaceName string, SQLPoolName string, parameters RecommendedSensitivityLabelUpdateList) (result autorest.Response, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/SQLPoolRecommendedSensitivityLabelsClient.Update")
defer func() {
sc := -1
if result.Response != nil {
sc = result.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
if err := validation.Validate([]validation.Validation{
{TargetValue: client.SubscriptionID,
Constraints: []validation.Constraint{{Target: "client.SubscriptionID", Name: validation.MinLength, Rule: 1, Chain: nil}}},
{TargetValue: resourceGroupName,
Constraints: []validation.Constraint{{Target: "resourceGroupName", Name: validation.MaxLength, Rule: 90, Chain: nil},
{Target: "resourceGroupName", Name: validation.MinLength, Rule: 1, Chain: nil},
{Target: "resourceGroupName", Name: validation.Pattern, Rule: `^[-\w\._\(\)]+$`, Chain: nil}}}}); err != nil {
return result, validation.NewError("synapse.SQLPoolRecommendedSensitivityLabelsClient", "Update", err.Error())
}
<|fim▁hole|> req, err := client.UpdatePreparer(ctx, resourceGroupName, workspaceName, SQLPoolName, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "synapse.SQLPoolRecommendedSensitivityLabelsClient", "Update", nil, "Failure preparing request")
return
}
resp, err := client.UpdateSender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "synapse.SQLPoolRecommendedSensitivityLabelsClient", "Update", resp, "Failure sending request")
return
}
result, err = client.UpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "synapse.SQLPoolRecommendedSensitivityLabelsClient", "Update", resp, "Failure responding to request")
return
}
return
}
// UpdatePreparer prepares the Update request.
func (client SQLPoolRecommendedSensitivityLabelsClient) UpdatePreparer(ctx context.Context, resourceGroupName string, workspaceName string, SQLPoolName string, parameters RecommendedSensitivityLabelUpdateList) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"sqlPoolName": autorest.Encode("path", SQLPoolName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"workspaceName": autorest.Encode("path", workspaceName),
}
const APIVersion = "2020-12-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/recommendedSensitivityLabels", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client SQLPoolRecommendedSensitivityLabelsClient) UpdateSender(req *http.Request) (*http.Response, error) {
return client.Send(req, azure.DoRetryWithRegistration(client.Client))
}
// UpdateResponder handles the response to the Update request. The method always
// closes the http.Response Body.
func (client SQLPoolRecommendedSensitivityLabelsClient) UpdateResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}<|fim▁end|>
| |
<|file_name|>stock_picking.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Odoo, Open Source Business Applications
# Copyright (c) 2015 Odoo S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
class StockPicking(models.Model):
_inherit = 'stock.picking'
carrier_price = fields.Float(string="Shipping Cost", readonly=True)
delivery_type = fields.Selection(related='carrier_id.delivery_type', readonly=True)
@api.multi
def do_transfer(self):
res = super(StockPicking, self).do_transfer()
if self.carrier_id and self.carrier_id.delivery_type != 'grid':
self.send_to_shipper()
return res
# Signature due to strange old api methods
@api.model
def _prepare_shipping_invoice_line(self, picking, invoice):
picking.ensure_one()
invoice.ensure_one()
carrier = picking.carrier_id
# No carrier
if not carrier:
return None
# Carrier already invoiced on the sale order
if any(inv_line.product_id.id == carrier.product_id.id for inv_line in invoice.invoice_line_ids):
return None
# Classic carrier
if carrier.delivery_type == 'grid':
return super(StockPicking, self)._prepare_shipping_invoice_line(picking, invoice)
# Shipping provider
price = picking.carrier_price
account_id = carrier.product_id.property_account_income.id
if not account_id:
account_id = carrier.product_id.categ_id.property_account_income_categ.id
taxes = carrier.product_id.taxes_id
taxes_ids = taxes.ids
# Apply original SO fiscal position
if picking.sale_id.fiscal_position_id:
fpos = picking.sale_id.fiscal_position_id
account_id = fpos.map_account(account_id)
taxes_ids = fpos.map_tax(taxes).ids
res = {
'name': carrier.name,
'invoice_id': invoice.id,
'uos_id': carrier.product_id.uos_id.id,
'product_id': carrier.product_id.id,
'account_id': account_id,
'price_unit': price,
'quantity': 1,
'invoice_line_tax_ids': [(6, 0, taxes_ids)],
}
return res
<|fim▁hole|> self.carrier_price = res['exact_price']
self.carrier_tracking_ref = res['tracking_number']
msg = "Shipment sent to carrier %s for expedition with tracking number %s" % (self.carrier_id.name, self.carrier_tracking_ref)
self.message_post(body=msg)
@api.multi
def open_website_url(self):
self.ensure_one()
client_action = {'type': 'ir.actions.act_url',
'name': "Shipment Tracking Page",
'target': 'new',
'url': self.carrier_id.get_tracking_link(self)[0]
}
return client_action
@api.one
def cancel_shipment(self):
self.carrier_id.cancel_shipment(self)
msg = "Shipment %s cancelled" % self.carrier_tracking_ref
self.message_post(body=msg)
self.carrier_tracking_ref = False<|fim▁end|>
|
@api.one
def send_to_shipper(self):
res = self.carrier_id.send_shipping(self)[0]
|
<|file_name|>handler_proxy.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package apiserver
import (
"context"
"fmt"
"net/http"
"net/url"
"sync/atomic"
"github.com/golang/glog"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/httpstream"
"k8s.io/apimachinery/pkg/util/httpstream/spdy"
utilnet "k8s.io/apimachinery/pkg/util/net"
"k8s.io/apiserver/pkg/endpoints/handlers/responsewriters"
genericapirequest "k8s.io/apiserver/pkg/endpoints/request"
genericfeatures "k8s.io/apiserver/pkg/features"
genericrest "k8s.io/apiserver/pkg/registry/generic/rest"
utilfeature "k8s.io/apiserver/pkg/util/feature"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/transport"
apiregistrationapi "k8s.io/kube-aggregator/pkg/apis/apiregistration"
)
// proxyHandler provides a http.Handler which will proxy traffic to locations
// specified by items implementing Redirector.
type proxyHandler struct {
contextMapper genericapirequest.RequestContextMapper
// localDelegate is used to satisfy local APIServices
localDelegate http.Handler
// proxyClientCert/Key are the client cert used to identify this proxy. Backing APIServices use
// this to confirm the proxy's identity
proxyClientCert []byte
proxyClientKey []byte
proxyTransport *http.Transport
// Endpoints based routing to map from cluster IP to routable IP
routing ServiceResolver
handlingInfo atomic.Value
}
type proxyHandlingInfo struct {
// local indicates that this APIService is locally satisfied
local bool
// restConfig holds the information for building a roundtripper
restConfig *restclient.Config
// transportBuildingError is an error produced while building the transport. If this
// is non-nil, it will be reported to clients.
transportBuildingError error
// proxyRoundTripper is the re-useable portion of the transport. It does not vary with any request.
proxyRoundTripper http.RoundTripper
// serviceName is the name of the service this handler proxies to
serviceName string
// namespace is the namespace the service lives in
serviceNamespace string
}
func (r *proxyHandler) ServeHTTP(w http.ResponseWriter, req *http.Request) {
value := r.handlingInfo.Load()
if value == nil {
r.localDelegate.ServeHTTP(w, req)
return
}
handlingInfo := value.(proxyHandlingInfo)
if handlingInfo.local {
if r.localDelegate == nil {
http.Error(w, "", http.StatusNotFound)
return
}
r.localDelegate.ServeHTTP(w, req)
return
}
if handlingInfo.transportBuildingError != nil {
http.Error(w, handlingInfo.transportBuildingError.Error(), http.StatusInternalServerError)
return
}
ctx, ok := r.contextMapper.Get(req)
if !ok {
http.Error(w, "missing context", http.StatusInternalServerError)
return
}
user, ok := genericapirequest.UserFrom(ctx)
if !ok {
http.Error(w, "missing user", http.StatusInternalServerError)
return
}
// write a new location based on the existing request pointed at the target service
location := &url.URL{}
location.Scheme = "https"
rloc, err := r.routing.ResolveEndpoint(handlingInfo.serviceNamespace, handlingInfo.serviceName)
if err != nil {
http.Error(w, fmt.Sprintf("missing route (%s)", err.Error()), http.StatusInternalServerError)
return
}
location.Host = rloc.Host
location.Path = req.URL.Path
location.RawQuery = req.URL.Query().Encode()<|fim▁hole|> newReq := req.WithContext(context.Background())
newReq.Header = utilnet.CloneHeader(req.Header)
newReq.URL = location
if handlingInfo.proxyRoundTripper == nil {
http.Error(w, "", http.StatusNotFound)
return
}
// we need to wrap the roundtripper in another roundtripper which will apply the front proxy headers
proxyRoundTripper, upgrade, err := maybeWrapForConnectionUpgrades(handlingInfo.restConfig, handlingInfo.proxyRoundTripper, req)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
proxyRoundTripper = transport.NewAuthProxyRoundTripper(user.GetName(), user.GetGroups(), user.GetExtra(), proxyRoundTripper)
// if we are upgrading, then the upgrade path tries to use this request with the TLS config we provide, but it does
// NOT use the roundtripper. Its a direct call that bypasses the round tripper. This means that we have to
// attach the "correct" user headers to the request ahead of time. After the initial upgrade, we'll be back
// at the roundtripper flow, so we only have to muck with this request, but we do have to do it.
if upgrade {
transport.SetAuthProxyHeaders(newReq, user.GetName(), user.GetGroups(), user.GetExtra())
}
handler := genericrest.NewUpgradeAwareProxyHandler(location, proxyRoundTripper, true, upgrade, &responder{w: w})
handler.ServeHTTP(w, newReq)
}
// maybeWrapForConnectionUpgrades wraps the roundtripper for upgrades. The bool indicates if it was wrapped
func maybeWrapForConnectionUpgrades(restConfig *restclient.Config, rt http.RoundTripper, req *http.Request) (http.RoundTripper, bool, error) {
if !httpstream.IsUpgradeRequest(req) {
return rt, false, nil
}
tlsConfig, err := restclient.TLSConfigFor(restConfig)
if err != nil {
return nil, true, err
}
followRedirects := utilfeature.DefaultFeatureGate.Enabled(genericfeatures.StreamingProxyRedirects)
upgradeRoundTripper := spdy.NewRoundTripper(tlsConfig, followRedirects)
wrappedRT, err := restclient.HTTPWrappersForConfig(restConfig, upgradeRoundTripper)
if err != nil {
return nil, true, err
}
return wrappedRT, true, nil
}
// responder implements rest.Responder for assisting a connector in writing objects or errors.
type responder struct {
w http.ResponseWriter
}
// TODO this should properly handle content type negotiation
// if the caller asked for protobuf and you write JSON bad things happen.
func (r *responder) Object(statusCode int, obj runtime.Object) {
responsewriters.WriteRawJSON(statusCode, obj, r.w)
}
func (r *responder) Error(err error) {
http.Error(r.w, err.Error(), http.StatusInternalServerError)
}
// these methods provide locked access to fields
func (r *proxyHandler) updateAPIService(apiService *apiregistrationapi.APIService) {
if apiService.Spec.Service == nil {
r.handlingInfo.Store(proxyHandlingInfo{local: true})
return
}
newInfo := proxyHandlingInfo{
restConfig: &restclient.Config{
TLSClientConfig: restclient.TLSClientConfig{
Insecure: apiService.Spec.InsecureSkipTLSVerify,
ServerName: apiService.Spec.Service.Name + "." + apiService.Spec.Service.Namespace + ".svc",
CertData: r.proxyClientCert,
KeyData: r.proxyClientKey,
CAData: apiService.Spec.CABundle,
},
},
serviceName: apiService.Spec.Service.Name,
serviceNamespace: apiService.Spec.Service.Namespace,
}
newInfo.proxyRoundTripper, newInfo.transportBuildingError = restclient.TransportFor(newInfo.restConfig)
if newInfo.transportBuildingError == nil && r.proxyTransport.Dial != nil {
switch transport := newInfo.proxyRoundTripper.(type) {
case *http.Transport:
transport.Dial = r.proxyTransport.Dial
default:
newInfo.transportBuildingError = fmt.Errorf("unable to set dialer for %s/%s as rest transport is of type %T", apiService.Spec.Service.Namespace, apiService.Spec.Service.Name, newInfo.proxyRoundTripper)
glog.Warning(newInfo.transportBuildingError.Error())
}
}
r.handlingInfo.Store(newInfo)
}<|fim▁end|>
|
// WithContext creates a shallow clone of the request with the new context.
|
<|file_name|>v32.rs<|end_file_name|><|fim▁begin|>//! Internal 32-bit wide vector types
use crate::masks::*;
impl_simd_array!([i8; 4]: i8x4 | i8, i8, i8, i8);<|fim▁hole|>impl_simd_array!([i16; 2]: i16x2 | i16, i16);
impl_simd_array!([u16; 2]: u16x2 | u16, u16);
impl_simd_array!([m16; 2]: m16x2 | i16, i16);<|fim▁end|>
|
impl_simd_array!([u8; 4]: u8x4 | u8, u8, u8, u8);
impl_simd_array!([m8; 4]: m8x4 | i8, i8, i8, i8);
|
<|file_name|>ddea_cli.py<|end_file_name|><|fim▁begin|>#!/adsc/DDEA_PROTO/bin/python<|fim▁hole|>import traceback
import sys
if __name__ == '__main__':
try:
if 3 <= len(sys.argv):
###urls = open(sys.argv[1]).readlines()
start_time = sys.argv[1]
end_time = sys.argv[2]
stime = datetime.strptime(start_time, "%y-%m-%d")
etime = datetime.strptime(end_time, "%y-%m-%d")
ddea_analysis('', stime, etime)
else:
raise "Invalid Arguments"
except:
print traceback.print_exc()
print("Example: %s 14-01-01 14-02-02" % sys.argv[0])
raise SystemExit<|fim▁end|>
|
from df_data_analysis_ddea import ddea_analysis
from datetime import datetime
|
<|file_name|>alias_map.go<|end_file_name|><|fim▁begin|>package changelog
import (
"fmt"
"sort"
"strings"
)
// SectionAliasMap is for associating commit prefixes to a section of the
// changelog
type SectionAliasMap map[string][]string
// NewSectionAliasMap returns the default map
func NewSectionAliasMap() SectionAliasMap {
sectionAliasMap := make(SectionAliasMap)
sectionAliasMap["Features"] = []string{"ft", "feat"}
sectionAliasMap["Bug Fixes"] = []string{"fx", "fix"}
sectionAliasMap["Performance"] = []string{"perf"}
sectionAliasMap["Breaking Changes"] = []string{"breaks"}
sectionAliasMap["Unknown"] = []string{"unk"}
return sectionAliasMap
}
// SectionFor returns the section title for a given alias
func (s SectionAliasMap) SectionFor(alias string) string {
for title, aliases := range s {
for i := range aliases {
if aliases[i] == alias {
return strings.Title(title)
}
}
}
return "Unknown"
}
// MergeSectionAliasMaps merges multiple maps into the first and returns the first
func MergeSectionAliasMaps(first SectionAliasMap, additional ...SectionAliasMap) SectionAliasMap {
for _, successive := range additional {
for title, aliases := range successive {
title = strings.Title(title)
// key doesn't exist in the first map, just add it
if _, ok := first[title]; !ok {
first[title] = aliases
}
// key already exists, union the values
first[title] = mergeStringSlices(first[title], aliases)
}
}
return first
}
func mergeStringSlices(first []string, successive ...[]string) []string {
values := map[string]bool{}
for _, word := range first {
values[word] = true
}
for _, next := range successive {
for _, word := range next {
values[word] = true
}
}
keys := make([]string, 0, len(values))
for k := range values {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
// Grep produces a regex to search for lines starting with each section key
func (s SectionAliasMap) Grep() string {
prefixes := []string{"BREAKING"}
for _, items := range s {
for _, item := range items {
if item == "" {
continue
}
prefixes = append(prefixes, fmt.Sprintf("^%s", item))
}
}<|fim▁hole|> sort.Strings(prefixes)
return strings.Join(prefixes, "|")
}<|fim▁end|>
| |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
import os
<|fim▁hole|> os.environ.setdefault("DJANGO_SETTINGS_MODULE", "api.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|>
|
if __name__ == "__main__":
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub fn it_works() {<|fim▁hole|>}<|fim▁end|>
|
println!("This is from lib.rs.");
|
<|file_name|>aluminum_testcell.py<|end_file_name|><|fim▁begin|>import numpy as np
import sys
import os
import time
from ase import Atom, Atoms
from ase.visualize import view
from ase.units import Bohr
from ase.structure import bulk
from gpaw import GPAW
from gpaw.atom.basis import BasisMaker
from gpaw.response.df import DF
from gpaw.mpi import serial_comm, rank, size
from gpaw.utilities import devnull
# Ground state calculation
a = 4.043
atoms = bulk('Al', 'fcc', a=a)
atoms.center()
calc = GPAW(gpts=(12,12,12),
kpts=(4,4,4),
xc='LDA')
atoms.set_calculator(calc)
atoms.get_potential_energy()
calc.write('Al1.gpw','all')
# Excited state calculation
q = np.array([1./4.,0.,0.])
w = np.linspace(0, 24, 241)
df = DF(calc='Al1.gpw', q=q, w=w, eta=0.2, ecut=50)
#df.write('Al.pckl')
df.get_EELS_spectrum(filename='EELS_Al_1')<|fim▁hole|>atoms = Atoms('Al8',scaled_positions=[(0,0,0),
(0.5,0,0),
(0,0.5,0),
(0,0,0.5),
(0.5,0.5,0),
(0.5,0,0.5),
(0.,0.5,0.5),
(0.5,0.5,0.5)],
cell=[(0,a,a),(a,0,a),(a,a,0)],
pbc=True)
calc = GPAW(gpts=(24,24,24),
kpts=(2,2,2),
xc='LDA')
atoms.set_calculator(calc)
atoms.get_potential_energy()
calc.write('Al2.gpw','all')
# Excited state calculation
q = np.array([1./2.,0.,0.])
w = np.linspace(0, 24, 241)
df = DF(calc='Al2.gpw', q=q, w=w, eta=0.2, ecut=50)
#df.write('Al.pckl')
df.get_EELS_spectrum(filename='EELS_Al_2')
d1 = np.loadtxt('EELS_Al_1')
d2 = np.loadtxt('EELS_Al_2')
error1 = (d1[1:,1] - d2[1:,1]) / d1[1:,1] * 100
error2 = (d1[1:,2] - d2[1:,2]) / d1[1:,2] * 100
if error1.max() > 0.2 or error2.max() > 0.2: # percent
print error1.max(), error2.max()
raise ValueError('Pls check spectrum !')
#if rank == 0:
# os.remove('Al1.gpw')
# os.remove('Al2.gpw')<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod sign;
use world::block::Block;
use shared::Position;
use ecs;
pub fn add_systems(m: &mut ecs::Manager) {
sign::add_systems(m);
}
pub enum BlockEntityType {
Sign
}
impl BlockEntityType {
pub fn get_block_entity(bl: Block) -> Option<BlockEntityType> {
match bl {
Block::StandingSign{..} | Block::WallSign{..} => Some(BlockEntityType::Sign),
_ => None,
}
}
pub fn create_entity(&self, m: &mut ecs::Manager, pos: Position) -> ecs::Entity {
let e = m.create_entity();
m.add_component_direct(e, pos);
match *self {
BlockEntityType::Sign => sign::init_entity(m, e),
}
e
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>test_conversions.py<|end_file_name|><|fim▁begin|>import json
import pymarc
from siskin.conversions import (de_listify, imslp_xml_to_marc, osf_to_intermediate)
def test_imslp_xml_to_marc():
example = """<?xml version="1.0"?>
<document docID="imslpvalsskramstadhans">
<localClass localClassName="col">imslp</localClass>
<localClass localClassName="vifa">vifamusik</localClass>
<identifier identifierEncodingSchema="originalID">valsskramstadhans</identifier>
<creator>
<mainForm>Skramstad, Hans</mainForm>
</creator>
<title>Vals for pianoforte</title>
<subject>
<mainForm>Romantic</mainForm>
</subject>
<music_arrangement_of>Piano</music_arrangement_of>
<url urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url>
<vifatype>Internetressource</vifatype>
<fetchDate>2018-04-25T00:00:00.01Z</fetchDate>
<vifaxml><![CDATA[<document docID="imslpvalsskramstadhans"><localClass
localClassName="col">imslp</localClass><localClass
localClassName="vifa">vifamusik</localClass><identifier
identifierEncodingSchema="originalID">valsskramstadhans</identifier><creator><mainForm>Skramstad,
Hans</mainForm></creator><title>Vals for
pianoforte</title><subject><mainForm>Romantic</mainForm></subject><music_arrangement_of>Piano</music_arrangement_of><url
urlEncodingSchema="originalDetailView">http://imslp.org/wiki/Vals_(Skramstad,_Hans)</url><vifatype>Internetressource</vifatype></document>]]></vifaxml>
</document>
"""
result = imslp_xml_to_marc(example)
assert result is not None
assert isinstance(result, pymarc.Record)
assert result["001"].value() == "finc-15-dmFsc3NrcmFtc3RhZGhhbnM"
assert result["100"]["a"] == "Skramstad, Hans"
assert result["245"]["a"] == "Vals for pianoforte"
assert result["856"]["u"] == "http://imslp.org/wiki/Vals_(Skramstad,_Hans)"
def test_de_listify():
cases = (
(None, None),
("", ""),
([], None),
({1, 2, 3}, 1),
([1, 2, 3], 1),
)
for v, expected in cases:
assert de_listify(v) == expected
def test_osf_to_intermediate():
cases = (
(None, None),
({}, None),
(json.loads("""
{
"id": "egcsk",
"type": "preprints",
"attributes": {
"date_created": "2021-07-19T07:32:33.252615",
"date_modified": "2021-07-19T07:42:12.725457",
"date_published": "2021-07-19T07:41:43.501204",
"original_publication_date": "2021-02-28T17:00:00",
"doi": null,
"title": "Konsep Allah Dalam Teologi Proses",
"description": "Penulisan karya ilmiah ini dikhususkan untuk membahas mengenai Allah yang dirumuskan dalam teologi proses, yang dicetuskan oleh Alfred Whitehead. Dalam bagian bagian ini penulis menyajikan konsep Allah dalam teologi proses dan bagaimana tanggapan terhadap konsep tersebut secara Alkitabiah Metode penelitian, penulis menggunakan pendekatan metode penelitian kualitatif analisis deskriptif, dengan pendekatan literatur dan tergolong dalam penelitian perpustakaan. Konsep Allah menurut teologi proses adalah Allah yang berproses, tidak berpribadi dan tidak memiliki kedaulatan absolut. Namun pandangan tentang Allah dalam teologi proses adalah suatu kumpulan pengalaman pribadi dan dijadikan sebagai suatu konsep dalam pemikiran manusia. Tanggapan tersebut menunjukan perbandingan dari pola pikir teologi proses mengenai Allah yang menyimpang dan mengarahkan seseorang dalam memahami konsep Allah yang benar sesuai dengan pernyataan Allah m",
"is_published": true,
"is_preprint_orphan": false,
"license_record": {
"copyright_holders": [
""
],
"year": "2021"
},
"tags": [
"Gambar",
"Respon",
"Teologi Proses",
"Tuhan"
],
"preprint_doi_created": "2021-07-19T07:42:12.695116",
"date_withdrawn": null,
"current_user_permissions": [],
"public": true,
"reviews_state": "accepted",
"date_last_transitioned": "2021-07-19T07:41:43.501204",
"has_coi": false,
"conflict_of_interest_statement": null,
"has_data_links": "no",
"why_no_data": null,
"data_links": [],
"has_prereg_links": "no",
"why_no_prereg": null,
"prereg_links": [],
"prereg_link_info": "",
"subjects": [
[
{
"id": "584240da54be81056cecaab4",
"text": "Arts and Humanities"
},
{
"id": "584240da54be81056cecaa9c",
"text": "Religion"
},
{
"id": "584240da54be81056cecaaf5",
"text": "Christianity"
}
]
]
},
"relationships": {
"contributors": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/contributors/",
"meta": {}
}
}
},
"bibliographic_contributors": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/bibliographic_contributors/",
"meta": {}
}
}
},
"citation": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/citation/",
"meta": {}
}
},
"data": {
"id": "egcsk",
"type": "preprints"
}
},
"identifiers": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/identifiers/",
"meta": {}
}
}
},
"node": {
"links": {
"related": {
"href": "https://api.osf.io/v2/nodes/uka4p/",
"meta": {}
},
"self": {
"href": "https://api.osf.io/v2/preprints/egcsk/relationships/node/",
"meta": {}
}
},
"data": {
"id": "uka4p",
"type": "nodes"
}
},
"license": {
"links": {
"related": {
"href": "https://api.osf.io/v2/licenses/563c1cf88c5e4a3877f9e96a/",
"meta": {}
}
},
"data": {
"id": "563c1cf88c5e4a3877f9e96a",
"type": "licenses"
}
},
"provider": {
"links": {
"related": {
"href": "https://api.osf.io/v2/providers/preprints/osf/",
"meta": {}
}
},
"data": {
"id": "osf",
"type": "preprint-providers"
}
},
"files": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/files/",
"meta": {}
}
}
},
"primary_file": {
"links": {
"related": {
"href": "https://api.osf.io/v2/files/60f52a94f1369301d5793a17/",
"meta": {}
}
},
"data": {
"id": "60f52a94f1369301d5793a17",
"type": "files"
}
},
"review_actions": {
"links": {
"related": {
"href": "https://api.osf.io/v2/preprints/egcsk/review_actions/",
"meta": {}
}
}
},
"requests": {
"links": {
"related": {<|fim▁hole|> }
}
}
},
"links": {
"self": "https://api.osf.io/v2/preprints/egcsk/",
"html": "https://osf.io/egcsk/",
"preprint_doi": "https://doi.org/10.31219/osf.io/egcsk"
}
}"""), {
'abstract':
'Penulisan karya ilmiah ini dikhususkan untuk membahas mengenai '
'Allah yang dirumuskan dalam teologi proses, yang dicetuskan oleh '
'Alfred Whitehead. Dalam bagian bagian ini penulis menyajikan '
'konsep Allah dalam teologi proses dan bagaimana tanggapan '
'terhadap konsep tersebut secara Alkitabiah Metode penelitian, '
'penulis menggunakan pendekatan metode penelitian kualitatif '
'analisis deskriptif, dengan pendekatan literatur dan tergolong '
'dalam penelitian perpustakaan. Konsep Allah menurut teologi '
'proses adalah Allah yang berproses, tidak berpribadi dan tidak '
'memiliki kedaulatan absolut. Namun pandangan tentang Allah dalam '
'teologi proses adalah suatu kumpulan pengalaman pribadi dan '
'dijadikan sebagai suatu konsep dalam pemikiran manusia. '
'Tanggapan tersebut menunjukan perbandingan dari pola pikir '
'teologi proses mengenai Allah yang menyimpang dan mengarahkan '
'seseorang dalam memahami konsep Allah yang benar sesuai dengan '
'pernyataan Allah m',
'authors': [{
'rft.aufirst': 'Ceria',
'rft.aulast': 'Ceria'
}],
'doi':
'10.31219/osf.io/egcsk',
'finc.format':
'Article',
'finc.id':
'ai-191-egcsk',
'finc.mega_collection': ['sid-191-col-osf', 'Osf'],
'finc.source_id':
'191',
'languages': ['eng'],
'rft.atitle':
'Konsep Allah Dalam Teologi Proses',
'rft.date':
'2021-07-19',
'rft.genre':
'article',
'rft.jtitle':
'osf',
'rft.pub': ['OSF Preprints'],
'subjects': ['Gambar', 'Respon', 'Teologi Proses', 'Tuhan'],
'url': ['https://doi.org/10.31219/osf.io/egcsk'],
'x.date':
'2021-07-19T07:42:12.695116Z',
}),
)
for v, expected in cases:
assert osf_to_intermediate(v) == expected<|fim▁end|>
|
"href": "https://api.osf.io/v2/preprints/egcsk/requests/",
"meta": {}
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Client module for connecting to and interacting with SmartyStreets API
"""
import json
import numbers
import requests
from .data import Address, AddressCollection
from .exceptions import SmartyStreetsError, ERROR_CODES
def validate_args(f):
"""
Ensures that *args consist of a consistent type
:param f: any client method with *args parameter
:return: function f
"""
def wrapper(self, args):
arg_types = set([type(arg) for arg in args])
if len(arg_types) > 1:
raise TypeError("Mixed input types are not allowed")
elif list(arg_types)[0] not in (dict, str):
raise TypeError("Only dict and str types accepted")
return f(self, args)
return wrapper
def truncate_args(f):
"""
Ensures that *args do not exceed a set limit or are truncated to meet that limit
:param f: any Client method with *args parameter<|fim▁hole|> """
def wrapper(self, args):
if len(args) > 100:
if self.truncate_addresses:
args = args[:100]
else:
raise ValueError("This exceeds 100 address at a time SmartyStreets limit")
return f(self, args)
return wrapper
def stringify(data):
"""
Ensure all values in the dictionary are strings, except for the value for `candidate` which
should just be an integer.
:param data: a list of addresses in dictionary format
:return: the same list with all values except for `candidate` count as a string
"""
def serialize(k, v):
if k == "candidates":
return int(v)
if isinstance(v, numbers.Number):
if k == "zipcode":
# If values are presented as integers then leading digits may be cut off,
# and these are significant for the zipcode. Add them back.
return str(v).zfill(5)
return str(v)
return v
return [
{
k: serialize(k, v) for k, v in json_dict.items()
}
for json_dict in data
]
class Client(object):
"""
Client class for interacting with the SmartyStreets API
"""
BASE_URL = "https://api.smartystreets.com/"
def __init__(self, auth_id, auth_token, standardize=False, invalid=False, logging=True,
accept_keypair=False, truncate_addresses=False, timeout=None):
"""
Constructs the client
:param auth_id: authentication ID from SmartyStreets
:param auth_token: authentication token
:param standardize: boolean include addresses that match zip+4 in addition to DPV confirmed
addresses
:param invalid: boolean to include address candidates that may not be deliverable
:param logging: boolean to allow SmartyStreets to log requests
:param accept_keypair: boolean to toggle default keypair behavior
:param truncate_addresses: boolean to silently truncate address lists in excess of the
SmartyStreets maximum rather than raise an error.
:param timeout: optional timeout value in seconds for requests.
:return: the configured client object
"""
self.auth_id = auth_id
self.auth_token = auth_token
self.standardize = standardize
self.invalid = invalid
self.logging = logging
self.accept_keypair = accept_keypair
self.truncate_addresses = truncate_addresses
self.timeout = timeout
self.session = requests.Session()
self.session.mount(self.BASE_URL, requests.adapters.HTTPAdapter(max_retries=5))
def post(self, endpoint, data):
"""
Executes the HTTP POST request
:param endpoint: string indicating the URL component to call
:param data: the data to submit
:return: the dumped JSON response content
"""
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'x-standardize-only': 'true' if self.standardize else 'false',
'x-include-invalid': 'true' if self.invalid else 'false',
'x-accept-keypair': 'true' if self.accept_keypair else 'false',
}
if not self.logging:
headers['x-suppress-logging'] = 'true'
params = {'auth-id': self.auth_id, 'auth-token': self.auth_token}
url = self.BASE_URL + endpoint
response = self.session.post(url, json.dumps(stringify(data)),
params=params, headers=headers, timeout=self.timeout)
if response.status_code == 200:
return response.json()
raise ERROR_CODES.get(response.status_code, SmartyStreetsError)
@truncate_args
@validate_args
def street_addresses(self, addresses):
"""
API method for verifying street address and geolocating
Returns an AddressCollection always for consistency. In common usage it'd be simple and
sane to return an Address when only one address was searched, however this makes
populating search addresses from lists of unknown length problematic. If that list
returns only one address now the code has to check the type of return value to ensure
that it isn't applying behavior for an expected list type rather than a single dictionary.
>>> client.street_addresses(["100 Main St, Anywhere, USA"], ["6 S Blvd, Richmond, VA"])
>>> client.street_addresses([{"street": "100 Main St, anywhere USA"}, ... ])
:param addresses: 1 or more addresses in string or dict format
:return: an AddressCollection
"""
# While it's okay in theory to accept freeform addresses they do need to be submitted in
# a dictionary format.
if type(addresses[0]) != dict:
addresses = [{'street': arg} for arg in addresses]
return AddressCollection(self.post('street-address', data=addresses))
def street_address(self, address):
"""
Geocode one and only address, get a single Address object back
>>> client.street_address("100 Main St, Anywhere, USA")
>>> client.street_address({"street": "100 Main St, anywhere USA"})
:param address: string or dictionary with street address information
:return: an Address object or None for no match
"""
address = self.street_addresses([address])
if not len(address):
return None
return Address(address[0])
def zipcode(self, *args):
raise NotImplementedError("You cannot lookup zipcodes yet")<|fim▁end|>
|
:return: function f
|
<|file_name|>utils.ts<|end_file_name|><|fim▁begin|>import { parse, Url, URL } from 'url';
import { RequestOptions } from 'http';
import { HttpsAgent } from './https-agent';
import { WithUrl } from './request';
import * as zlib from 'zlib';
const socks = require('socks-wrapper');
export function rewrite(url: string): string {
if (!/^\w+:/.test(url)) {
if (url.substr(0, 2) !== '//') {
url = 'http://' + url;
} else {
url = 'http:' + url;
}
}
return url;
}
export function parseUrl(url: string | WithUrl) {
const href = typeof url === 'string' ? url : url.url;
return <URL>(<any>parse(rewrite(href)));
}
interface WithKey {
[key: string]: any;
}
export function pickAssign(target: WithKey, source: WithKey, keys: string[]) {
if (source) {
for (const key of keys) {
if (source[key] !== undefined) {
target[key] = source[key];
}
}
}
return target;
}
export type Header = [string, string | number];
export function setHeader(
headers: Header[],
key: string,<|fim▁hole|>
for (let i = 0; i < headers.length; i++) {
const nameLower = headers[i][0].toLowerCase();
if (keyLower === nameLower) {
headers[i] = header;
return;
}
if (keyLower < nameLower) {
headers.splice(i, 0, header);
return;
}
}
headers.push(header);
}
export function inflate(content: Buffer, encoding: string): Promise<Buffer> {
return new Promise(resolve => {
if (encoding === 'gzip') {
zlib.gunzip(content, function(err, buf) {
if (err) throw err;
resolve(buf);
});
} else {
zlib.inflate(content, function(err, buf) {
if (err) {
zlib.inflateRaw(content, function(err, buf) {
if (err) throw err;
resolve(buf);
});
} else {
resolve(buf);
}
});
}
});
}
export function setProxy(request: RequestOptions, proxyUrl: string, url: Url) {
const proxy = parse(rewrite(proxyUrl));
if (/^socks/.test(<string>proxy.protocol)) {
const T = request.protocol === 'http:' ? socks.HttpAgent : socks.HttpsAgent;
request.agent = new T(proxy.port, proxy.hostname);
} else {
if (request.protocol === 'http:') {
request.protocol = proxy.protocol;
request.hostname = proxy.hostname;
request.port = proxy.port;
request.path = url.href;
} else {
const agent = new HttpsAgent({ proxy, servername: <string>url.host });
request.port = request.port || 443;
request.agent = agent;
}
}
}<|fim▁end|>
|
value: string | number
) {
const keyLower = key.toLowerCase();
const header: Header = [key, value];
|
<|file_name|>_font.py<|end_file_name|><|fim▁begin|>from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Font(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "scatterternary.hoverlabel"
_path_str = "scatterternary.hoverlabel.font"
_valid_props = {"color", "colorsrc", "family", "familysrc", "size", "sizesrc"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# colorsrc
# --------
@property
def colorsrc(self):
"""
Sets the source reference on Chart Studio Cloud for color .
The 'colorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["colorsrc"]
@colorsrc.setter
def colorsrc(self, val):
self["colorsrc"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".<|fim▁hole|> The 'family' property is a string and must be specified as:
- A non-empty string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# familysrc
# ---------
@property
def familysrc(self):
"""
Sets the source reference on Chart Studio Cloud for family .
The 'familysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["familysrc"]
@familysrc.setter
def familysrc(self, val):
self["familysrc"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|float|numpy.ndarray
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# sizesrc
# -------
@property
def sizesrc(self):
"""
Sets the source reference on Chart Studio Cloud for size .
The 'sizesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["sizesrc"]
@sizesrc.setter
def sizesrc(self, val):
self["sizesrc"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
"""
def __init__(
self,
arg=None,
color=None,
colorsrc=None,
family=None,
familysrc=None,
size=None,
sizesrc=None,
**kwargs
):
"""
Construct a new Font object
Sets the font used in hover labels.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.scatterternary
.hoverlabel.Font`
color
colorsrc
Sets the source reference on Chart Studio Cloud for
color .
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud for
family .
size
sizesrc
Sets the source reference on Chart Studio Cloud for
size .
Returns
-------
Font
"""
super(Font, self).__init__("font")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.scatterternary.hoverlabel.Font
constructor must be a dict or
an instance of :class:`plotly.graph_objs.scatterternary.hoverlabel.Font`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("colorsrc", None)
_v = colorsrc if colorsrc is not None else _v
if _v is not None:
self["colorsrc"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("familysrc", None)
_v = familysrc if familysrc is not None else _v
if _v is not None:
self["familysrc"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
_v = arg.pop("sizesrc", None)
_v = sizesrc if sizesrc is not None else _v
if _v is not None:
self["sizesrc"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False<|fim▁end|>
| |
<|file_name|>chksumtree.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
import pickle
import os
import hashlib
import pprint
import time
from optparse import OptionParser
VERSION=1.0
def parseOptions():
usage = """
%prog [options]\n
Scrub a given directory by calculating the md5 hash of every file and compare
it with the one stored in the datfile. If a file's mtime has changed, the md5
in the datfile will be updated. If the md5s are different and the mtime hasn't
changed, an Exception will be raised. """
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose",
action="store_true",
dest="verbose",
default=False,
help="Verbose output")
parser.add_option("-n", "--noaction",
action="store_true",
dest="dryrun",
default=False,
help="Dry run. No action will be taken.")
parser.add_option("-p", "--path",
action="store",
dest="path",
help="Path to walk")
parser.add_option("-d", "--data",
action="store",
dest="data_file",
default=".chksumdat",
help="Data file to store path checksums")
parser.add_option("-b", "--buffer",
action="store",
dest="read_buffer",
type="int",
default="8192",
help="Read buffer used when calculating the md5sum in bytes")
(options, args) = parser.parse_args()
return options
class Filechksum():
def __init__(self, options, path):
'''
Filechksum.path = full path to file
Filechksum.md5sum = checksum for file
Filechksum.stat = stat for file
'''
self.path = path
self.md5sum = md5sum(path, options.read_buffer)
self.stat = os.stat(file)
def md5sum(file, read_buffer):
''' Get the md5 of a file '''
md5 = hashlib.md5()
f = open(file,'rb')
for chunk in iter(lambda: f.read(read_buffer), ''):
md5.update(chunk)
f.close()
return md5.hexdigest()
class Treechksum():
def __init__(self, options, datfile, path):
'''
Treechksum.datfile = filename in path to load/write checksum data to.
Treechksum.chksums = dict of checksum data.
Treechksum.path = full path of tree to checksum
'''
self.datfile = os.path.join(path, datfile)
self.path = path
self.cksums = {}
self._read(options)
def _read(self, options):
'''
Read the datfile
'''
if os.path.exists(self.datfile):
print "Dat file found successfully"
f = open(self.datfile)
(v, self.cksums) = pickle.load(f)
f.close()
if options.verbose: pprint.pprint(self.cksums)
else:
#raise Exception("%s does not exist" % self._file)
print "%s does not exist. Creating new one." % self.datfile
if v != VERSION:
raise Exception("Wrong version. Please delete %s" % self.datfile)
def save(self):
'''
Save the datfile.
'''
f = open(self.datfile, "wa")
pickle.dump((VERSION, self.cksums), f)
f.close()
def compute(self, options):
'''
Actually do the work. Walk the given directory, compute md5s,
diff it with the known md5, if the mtime is the same and the md5s
are the same, you're good. If mtime is different, update the file's
md5 in the datfile. GC removed files from the datfile to save space.
'''
seen = []
total_keys = len(self.cksums.keys())
count = 0
for (root, dirs, files) in os.walk(self.path):<|fim▁hole|> continue
in_file = os.path.join(root, file)
if not os.path.isfile(in_file):
continue
# add it to the files we've seen
# so we can subtract it from the dict
# to gc the deleted ones
seen.append(self._get_rel_path(in_file))
self._checkfile(in_file, options)
count = count + 1
if not options.verbose: self._printprogress(count, total_keys)
self._gc(seen)
print "\n"
def _get_rel_path(self, in_file):
if in_file.startswith(self.path):
rel_path = in_file[len(self.path):].lstrip("/")
else:
rel_path = in_file.lstrip("/")
return rel_path
def _checkfile(self, in_file, options):
'''
Add new files, check existing files, and update modified files.
'''
in_file_cksum = {'stat': os.stat(in_file),
'md5': md5sum(in_file, options.read_buffer)}
if options.verbose: print in_file
rel_path = self._get_rel_path(in_file)
if options.verbose:
print rel_path
f = self.cksums.get(rel_path)
if f == None:
# New file.
print "%s was added." % rel_path
self.cksums[rel_path] = in_file_cksum
else:
# check fi the file was updated
if (f['stat'].st_mtime == in_file_cksum['stat'].st_mtime):
# stat is the same. check md5
if f['md5'] != in_file_cksum['md5']:
# Fuck
raise Exception("%s changed from %s to %s" % (rel_path,
f['md5'],
in_file_cksum['md5']))
else:
# All good in the hood
if options.verbose: print "%s passes md5 %s" % (rel_path,
in_file_cksum['md5'])
else:
# file was modified
print "%s was updated to %s on %s" % (rel_path,
in_file_cksum['md5'],
time.ctime(in_file_cksum['stat'].st_mtime))
self.cksums[rel_path] = in_file_cksum
def _gc(self, seen):
'''
Remove unseen files from datfile
'''
for file in (set(self.cksums.keys()) - set(seen)):
print "%s was deleted" % file
del self.cksums[file]
def _printprogress(self, sofar, total):
if total > 0:
s = "\t%s/%s Files" % (sofar, total)
else:
s = "\t%s Files" % sofar
sys.stdout.write(s + " " * (78 - len(s)) + "\r")
sys.stdout.flush()
def main():
options = parseOptions()
pprint.pprint(options)
chksums = Treechksum(options,
options.data_file,
options.path)
chksums.compute(options)
if not options.dryrun: chksums.save()
if __name__ == '__main__':
main()<|fim▁end|>
|
for file in files:
# chomp the full path
if file in [".DS_Store", self.datfile[len(self.path):]]:
|
<|file_name|>member-templates.cpp<|end_file_name|><|fim▁begin|>// RUN: %clang_cc1 %s -triple=x86_64-apple-darwin10 -emit-llvm -o - | FileCheck %s
// CHECK: ; ModuleID
struct A {
template<typename T>
A(T);
};
template<typename T> A::A(T) {}
struct B {
template<typename T>
B(T);
};
template<typename T> B::B(T) {}
<|fim▁hole|>// CHECK: define void @_ZN1BC2IiEET_(%struct.B* %this, i32)
template B::B(int);
template<typename T>
struct C {
void f() {
int a[] = { 1, 2, 3 };
}
};
void f(C<int>& c) {
c.f();
}<|fim▁end|>
|
// CHECK: define void @_ZN1BC1IiEET_(%struct.B* %this, i32)
|
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>import io
import sys
import mock
import argparse
from monolith.compat import unittest
from monolith.cli.base import arg
from monolith.cli.base import ExecutionManager
from monolith.cli.base import SimpleExecutionManager
from monolith.cli.base import BaseCommand
from monolith.cli.base import CommandError
from monolith.cli.base import LabelCommand
from monolith.cli.base import SingleLabelCommand
from monolith.cli.base import Parser
from monolith.cli.exceptions import AlreadyRegistered
from io import StringIO
class DummyCommand(BaseCommand):
pass
class AnotherDummyCommand(BaseCommand):
pass
class TestExecutionManager(unittest.TestCase):
def assertRegistryClassesEqual(self, actual, expected):
self.assertEqual(list(sorted(actual)), list(sorted(expected)))
for key in actual:
self.assertEqual(actual[key].__class__, expected[key],
"Command class don't match for %r (it's %r but "
"expected %r)" % (key, actual[key].__class__,
expected[key]))
def setUp(self):
self.manager = ExecutionManager(['foobar'], stderr=StringIO())
def test_init_prog_name(self):
self.assertEqual(self.manager.prog_name, 'foobar')
def test_init_stderr(self):
manager = ExecutionManager()
self.assertEqual(manager.stderr, sys.stderr)
def test_default_argv(self):
with mock.patch.object(sys, 'argv', ['vcs', 'foo', 'bar']):
manager = ExecutionManager()
self.assertEqual(manager.argv, ['foo', 'bar'])
def test_get_usage(self):
self.manager.usage = 'foobar baz'
self.assertEqual(self.manager.get_usage(), 'foobar baz')
def test_get_parser(self):
self.manager.usage = 'foo bar'
parser = self.manager.get_parser()
self.assertIsInstance(parser, argparse.ArgumentParser)
self.assertEqual(parser.prog, 'foobar') # argv[0]
self.assertEqual(parser.usage, 'foo bar')
self.assertEqual(parser.stream, self.manager.stderr)
def test_get_parser_calls_setup_parser(self):
class DummyCommand(BaseCommand):
pass
self.manager.register('foo', DummyCommand)
with mock.patch.object(DummyCommand, 'setup_parser') as setup_parser:
self.manager.get_parser()
self.assertTrue(setup_parser.called)
def test_register(self):
Command = type('Command', (BaseCommand,), {})
self.manager.register('foo', Command)
self.assertRegistryClassesEqual(self.manager.registry, {'foo': Command})
command = self.manager.registry['foo']
self.assertEqual(command.manager, self.manager)
def test_register_raise_if_command_with_same_name_registered(self):
Command = type('Command', (BaseCommand,), {})
self.manager.register('foobar', Command)
with self.assertRaises(AlreadyRegistered):
self.manager.register('foobar', Command)
def test_register_respects_force_argument(self):
Command1 = type('Command', (BaseCommand,), {})
Command2 = type('Command', (BaseCommand,), {})<|fim▁hole|> self.manager.register('foobar', Command1)
self.manager.register('foobar', Command2, force=True)
self.assertRegistryClassesEqual(self.manager.registry, {
'foobar': Command2})
def test_get_commands(self):
FooCommand = type('FooCommand', (BaseCommand,), {})
BarCommand = type('BarCommand', (BaseCommand,), {})
self.manager.register('foo', FooCommand)
self.manager.register('bar', BarCommand)
self.assertEqual(list(self.manager.get_commands().keys()), ['bar', 'foo'])
self.assertRegistryClassesEqual(self.manager.get_commands(), {
'foo': FooCommand,
'bar': BarCommand,
})
def test_get_commands_to_register(self):
FooCommand = type('FooCommand', (BaseCommand,), {})
BarCommand = type('BarCommand', (BaseCommand,), {})
class Manager(ExecutionManager):
def get_commands_to_register(self):
return {
'foo': FooCommand,
'bar': BarCommand,
}
manager = Manager(['foobar'])
self.assertRegistryClassesEqual(manager.registry, {
'foo': FooCommand,
'bar': BarCommand,
})
def test_call_command(self):
class Command(BaseCommand):
name = 'init'
handle = mock.Mock()
self.manager.register('init', Command)
self.manager.call_command('init')
self.assertTrue(Command.handle.called)
def test_called_command_has_prog_name_properly_set(self):
prog_names = []
class Command(BaseCommand):
name = 'init'
def handle(self, namespace):
prog_names.append(self.prog_name)
self.manager.register('init', Command)
self.manager.call_command('init')
self.assertEqual(prog_names, ['foobar'])
def test_call_command_with_args(self):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
handle = mock.Mock()
self.manager.register('add', Command)
self.manager.call_command('add', '-f')
self.assertTrue(Command.handle.called)
namespace = Command.handle.call_args[0][0]
self.assertTrue(namespace.force)
@mock.patch('monolith.cli.base.sys.stderr')
def test_call_command_fails(self, stderr):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
def handle(self, namespace):
raise CommandError('foo bar baz', 92)
self.manager.register('add', Command)
with self.assertRaises(SystemExit):
self.manager.call_command('add', '-f')
stderr.write.assert_called_once_with('ERROR: foo bar baz\n')
def test_execute_calls_handle_command(self):
class Command(BaseCommand):
args = [
arg('-f', '--force', action='store_true', default=False),
]
name = 'add'
handle = mock.Mock()
self.manager.register('add', Command)
with mock.patch.object(sys, 'argv', ['prog', 'add', '-f']):
self.manager.execute()
namespace = Command.handle.call_args[0][0]
Command.handle.assert_called_once_with(namespace)
class TestSimpleExecutionManager(unittest.TestCase):
def test_get_commands_to_register(self):
# importing dummy commands to local namespace so they have full class
# paths properly set
from monolith.tests.test_cli import DummyCommand
from monolith.tests.test_cli import AnotherDummyCommand
manager = SimpleExecutionManager('git', {
'push': DummyCommand,
'pull': 'monolith.tests.test_cli.AnotherDummyCommand',
})
self.assertDictEqual(manager.get_commands_to_register(), {
'push': DummyCommand,
'pull': AnotherDummyCommand,
})
class TestBaseCommand(unittest.TestCase):
def test_get_args(self):
Command = type('Command', (BaseCommand,), {'args': ['foo', 'bar']})
command = Command()
self.assertEqual(command.get_args(), ['foo', 'bar'])
def test_handle_raises_error(self):
with self.assertRaises(NotImplementedError):
BaseCommand().handle(argparse.Namespace())
def test_post_register_hooks(self):
Command = type('Command', (BaseCommand,), {'args': ['foo', 'bar']})
class Command(BaseCommand):
def post_register(self, manager):
manager.completion = True
manager = ExecutionManager()
self.assertFalse(manager.completion)
manager.register('completion', Command)
self.assertTrue(manager.completion)
class TestLabelCommand(unittest.TestCase):
def test_handle_raise_if_handle_label_not_implemented(self):
command = LabelCommand()
with self.assertRaises(NotImplementedError):
command.handle(argparse.Namespace(labels=['foo']))
def test_handle_calls_handle_label(self):
namespace = argparse.Namespace(labels=['foo', 'bar'])
command = LabelCommand()
command.handle_label = mock.Mock()
command.handle(namespace)
self.assertEqual(command.handle_label.call_args_list, [
arg('foo', namespace),
arg('bar', namespace),
])
def test_labels_required_true(self):
Command = type('Command', (LabelCommand,), {'labels_required': True})
command = Command()
self.assertEqual(command.get_args()[0].kwargs.get('nargs'), '+')
def test_labels_required_false(self):
Command = type('Command', (LabelCommand,), {'labels_required': False})
command = Command()
self.assertEqual(command.get_args()[0].kwargs.get('nargs'), '*')
def test_handle_no_labels_called_if_no_labels_given(self):
Command = type('Command', (LabelCommand,), {'labels_required': False})
command = Command()
command.handle_no_labels = mock.Mock()
namespace = argparse.Namespace(labels=[])
command.handle(namespace)
command.handle_no_labels.assert_called_once_with(namespace)
class TestSingleLabelCommand(unittest.TestCase):
def test_get_label_arg(self):
Command = type('Command', (SingleLabelCommand,), {})
label_arg = Command().get_label_arg()
self.assertEqual(label_arg, arg('label',
default=Command.label_default_value, nargs='?'))
def test_get_args(self):
Command = type('Command', (SingleLabelCommand,), {})
command = Command()
self.assertEqual(command.get_args(), [command.get_label_arg()])
def test_handle_raise_if_handle_label_not_implemented(self):
command = SingleLabelCommand()
with self.assertRaises(NotImplementedError):
command.handle(argparse.Namespace(label='foo'))
def test_handle_calls_handle_label(self):
namespace = argparse.Namespace(label='foobar')
command = SingleLabelCommand()
command.handle_label = mock.Mock()
command.handle(namespace)
self.assertEqual(command.handle_label.call_args_list, [
arg('foobar', namespace),
])
class TestArg(unittest.TestCase):
def test_args(self):
self.assertEqual(arg(1, 2, 'foo', bar='baz').args, (1, 2, 'foo'))
def test_kargs(self):
self.assertEqual(arg(1, 2, 'foo', bar='baz').kwargs, {'bar': 'baz'})
class TestParser(unittest.TestCase):
def setUp(self):
self.stream = io.StringIO()
self.parser = Parser(stream=self.stream)
def test_print_message_default_file(self):
self.parser._print_message('foobar')
self.assertEqual(self.stream.getvalue(), 'foobar')<|fim▁end|>
| |
<|file_name|>stltopology.cpp<|end_file_name|><|fim▁begin|>#include <mystdlib.h>
#include <myadt.hpp>
#include <linalg.hpp>
#include <gprim.hpp>
#include <meshing.hpp>
#include "stlgeom.hpp"
namespace netgen
{
STLTopology :: STLTopology()
: trias(), topedges(), points(), ht_topedges(NULL),
trigsperpoint(), neighbourtrigs()
{
;
}
STLTopology :: ~STLTopology()
{
;
}
STLGeometry * STLTopology :: LoadBinary (istream & ist)
{
STLGeometry * geom = new STLGeometry();
Array<STLReadTriangle> readtrigs;
PrintMessage(1,"Read STL binary file");
if (sizeof(int) != 4 || sizeof(float) != 4)
{
PrintWarning("for stl-binary compatibility only use 32 bit compilation!!!");
}
//specific settings for stl-binary format
const int namelen = 80; //length of name of header in file
const int nospaces = 2; //number of spaces after a triangle
//read header: name
char buf[namelen+1];
FIOReadStringE(ist,buf,namelen);
PrintMessage(5,"header = ",buf);
//Read Number of facets
int nofacets;
FIOReadInt(ist,nofacets);
PrintMessage(5,"NO facets = ",nofacets);
Point<3> pts[3];
Vec<3> normal;
char spaces[nospaces+1];
for (int cntface = 0; cntface < nofacets; cntface++)
{
if (cntface % 10000 == 0)
// { PrintDot(); }
PrintMessageCR (3, cntface, " triangles loaded\r");
float f;
FIOReadFloat(ist,f); normal(0) = f;
FIOReadFloat(ist,f); normal(1) = f;
FIOReadFloat(ist,f); normal(2) = f;
for (int j = 0; j < 3; j++)
{
FIOReadFloat(ist,f); pts[j](0) = f;
FIOReadFloat(ist,f); pts[j](1) = f;
FIOReadFloat(ist,f); pts[j](2) = f;
}
readtrigs.Append (STLReadTriangle (pts, normal));
FIOReadString(ist,spaces,nospaces);
}
PrintMessage (3, nofacets, " triangles loaded\r");
geom->InitSTLGeometry(readtrigs);
return geom;
}
void STLTopology :: SaveBinary (const char* filename, const char* aname) const
{
ofstream ost(filename);
PrintFnStart("Write STL binary file '",filename,"'");
if (sizeof(int) != 4 || sizeof(float) != 4)
{PrintWarning("for stl-binary compatibility only use 32 bit compilation!!!");}
//specific settings for stl-binary format
const int namelen = 80; //length of name of header in file
const int nospaces = 2; //number of spaces after a triangle
//write header: aname
int i, j;
char buf[namelen+1];
int strend = 0;
for(i = 0; i <= namelen; i++)
{
if (aname[i] == 0) {strend = 1;}
if (!strend) {buf[i] = aname[i];}
else {buf[i] = 0;}
}
FIOWriteString(ost,buf,namelen);
PrintMessage(5,"header = ",buf);
//RWrite Number of facets
int nofacets = GetNT();
FIOWriteInt(ost,nofacets);
PrintMessage(5,"NO facets = ", nofacets);
float f;
char spaces[nospaces+1];
for (i = 0; i < nospaces; i++) {spaces[i] = ' ';}
spaces[nospaces] = 0;
for (i = 1; i <= GetNT(); i++)
{
const STLTriangle & t = GetTriangle(i);
const Vec<3> & n = t.Normal();
f = n(0); FIOWriteFloat(ost,f);
f = n(1); FIOWriteFloat(ost,f);
f = n(2); FIOWriteFloat(ost,f);
for (j = 1; j <= 3; j++)
{
const Point3d p = GetPoint(t.PNum(j));
f = p.X(); FIOWriteFloat(ost,f);
f = p.Y(); FIOWriteFloat(ost,f);
f = p.Z(); FIOWriteFloat(ost,f);
}
FIOWriteString(ost,spaces,nospaces);
}
PrintMessage(5,"done");
}
void STLTopology :: SaveSTLE (const char* filename) const
{
ofstream outf (filename);
int i, j;
outf << GetNT() << endl;
for (i = 1; i <= GetNT(); i++)
{
const STLTriangle & t = GetTriangle(i);
for (j = 1; j <= 3; j++)
{
const Point3d p = GetPoint(t.PNum(j));
outf << p.X() << " " << p.Y() << " " << p.Z() << endl;
}
}
int ned = 0;
for (i = 1; i <= GetNTE(); i++)
{
if (GetTopEdge (i).GetStatus() == ED_CONFIRMED)
ned++;
}
outf << ned << endl;
for (i = 1; i <= GetNTE(); i++)
{
const STLTopEdge & edge = GetTopEdge (i);
if (edge.GetStatus() == ED_CONFIRMED)
for (j = 1; j <= 2; j++)
{
const Point3d p = GetPoint(edge.PNum(j));
outf << p.X() << " " << p.Y() << " " << p.Z() << endl;
}
}
}
STLGeometry * STLTopology :: LoadNaomi (istream & ist)
{
int i;
STLGeometry * geom = new STLGeometry();
Array<STLReadTriangle> readtrigs;
PrintFnStart("read NAOMI file format");
char buf[100];
Vec<3> normal;
//int cntface = 0;
//int cntvertex = 0;
double px, py, pz;
int noface, novertex;
Array<Point<3> > readpoints;
ist >> buf;
if (strcmp (buf, "NODES") == 0)
{
ist >> novertex;
PrintMessage(5,"nuber of vertices = ", novertex);
for (i = 0; i < novertex; i++)
{
ist >> px;
ist >> py;
ist >> pz;
readpoints.Append(Point<3> (px,py,pz));
}
}
else
{
PrintFileError("no node information");
}
ist >> buf;
if (strcmp (buf, "2D_EDGES") == 0)
{
ist >> noface;
PrintMessage(5,"number of faces=",noface);
int dummy, p1, p2, p3;
Point<3> pts[3];
for (i = 0; i < noface; i++)
{
ist >> dummy; //2
ist >> dummy; //1
ist >> p1;
ist >> p2;
ist >> p3;
ist >> dummy; //0
pts[0] = readpoints.Get(p1);
pts[1] = readpoints.Get(p2);
pts[2] = readpoints.Get(p3);
normal = Cross (pts[1]-pts[0], pts[2]-pts[0]) . Normalize();
readtrigs.Append (STLReadTriangle (pts, normal));
}
PrintMessage(5,"read ", readtrigs.Size(), " triangles");
}
else
{
PrintMessage(5,"read='",buf,"'\n");
PrintFileError("ERROR: no Triangle information");
}
geom->InitSTLGeometry(readtrigs);
return geom;
}
void STLTopology :: Save (const char* filename) const
{
PrintFnStart("Write stl-file '",filename, "'");
ofstream fout(filename);
fout << "solid\n";
char buf1[50];
char buf2[50];
char buf3[50];
int i, j;
for (i = 1; i <= GetNT(); i++)
{
const STLTriangle & t = GetTriangle(i);
fout << "facet normal ";
const Vec3d& n = GetTriangle(i).Normal();
sprintf(buf1,"%1.9g",n.X());
sprintf(buf2,"%1.9g",n.Y());
sprintf(buf3,"%1.9g",n.Z());
fout << buf1 << " " << buf2 << " " << buf3 << "\n";
fout << "outer loop\n";
for (j = 1; j <= 3; j++)
{
const Point3d p = GetPoint(t.PNum(j));
sprintf(buf1,"%1.9g",p.X());
sprintf(buf2,"%1.9g",p.Y());
sprintf(buf3,"%1.9g",p.Z());
fout << "vertex " << buf1 << " " << buf2 << " " << buf3 << "\n";
}
fout << "endloop\n";
fout << "endfacet\n";
}
fout << "endsolid\n";
// write also NETGEN surface mesh:
ofstream fout2("geom.surf");
fout2 << "surfacemesh" << endl;
fout2 << GetNP() << endl;
for (i = 1; i <= GetNP(); i++)
{
for (j = 0; j < 3; j++)
{
fout2.width(8);
fout2 << GetPoint(i)(j);
}
fout2 << endl;<|fim▁hole|> for (i = 1; i <= GetNT(); i++)
{
const STLTriangle & t = GetTriangle(i);
for (j = 1; j <= 3; j++)
{
fout2.width(8);
fout2 << t.PNum(j);
}
fout2 << endl;
}
}
STLGeometry * STLTopology ::Load (istream & ist)
{
STLGeometry * geom = new STLGeometry();
Array<STLReadTriangle> readtrigs;
char buf[100];
Point<3> pts[3];
Vec<3> normal;
int cntface = 0;
int vertex = 0;
bool badnormals = false;
while (ist.good())
{
ist >> buf;
int n = strlen (buf);
for (int i = 0; i < n; i++)
buf[i] = tolower (buf[i]);
if (strcmp (buf, "facet") == 0)
{
cntface++;
}
if (strcmp (buf, "normal") == 0)
{
ist >> normal(0)
>> normal(1)
>> normal(2);
normal.Normalize();
}
if (strcmp (buf, "vertex") == 0)
{
ist >> pts[vertex](0)
>> pts[vertex](1)
>> pts[vertex](2);
vertex++;
if (vertex == 3)
{
if (normal.Length() <= 1e-5)
{
normal = Cross (pts[1]-pts[0], pts[2]-pts[0]);
normal.Normalize();
}
else
{
Vec<3> hnormal = Cross (pts[1]-pts[0], pts[2]-pts[0]);
hnormal.Normalize();
if (normal * hnormal < 0.5)
badnormals = true;
}
vertex = 0;
if ( (Dist2 (pts[0], pts[1]) > 1e-16) &&
(Dist2 (pts[0], pts[2]) > 1e-16) &&
(Dist2 (pts[1], pts[2]) > 1e-16) )
{
readtrigs.Append (STLReadTriangle (pts, normal));
if (readtrigs.Size() % 100000 == 0)
PrintMessageCR (3, readtrigs.Size(), " triangles loaded\r");
}
else
{
cout << "Skipping flat triangle "
<< "l1 = " << Dist(pts[0], pts[1])
<< ", l2 = " << Dist(pts[0], pts[2])
<< ", l3 = " << Dist(pts[2], pts[1]) << endl;
}
}
}
}
PrintMessage (3, readtrigs.Size(), " triangles loaded");
if (badnormals)
{
PrintWarning("File has normal vectors which differ extremly from geometry->correct with stldoctor!!!");
}
geom->InitSTLGeometry(readtrigs);
return geom;
}
void STLTopology :: InitSTLGeometry(const Array<STLReadTriangle> & readtrigs)
{
// const double geometry_tol_fact = 1E6;
// distances lower than max_box_size/tol are ignored
trias.SetSize(0);
points.SetSize(0);
PrintMessage(3,"number of triangles = ", readtrigs.Size());
if (!readtrigs.Size()) return;
boundingbox.Set (readtrigs[0][0]);
for (int i = 0; i < readtrigs.Size(); i++)
for (int k = 0; k < 3; k++)
boundingbox.Add (readtrigs[i][k]);
PrintMessage(5,"boundingbox: ", Point3d(boundingbox.PMin()), " - ",
Point3d(boundingbox.PMax()));
Box<3> bb = boundingbox;
bb.Increase (1);
pointtree = new Point3dTree (bb.PMin(), bb.PMax());
Array<int> pintersect;
pointtol = boundingbox.Diam() * stldoctor.geom_tol_fact;
PrintMessage(5,"point tolerance = ", pointtol);
PrintMessageCR(5,"identify points ...");
for(int i = 0; i < readtrigs.Size(); i++)
{
const STLReadTriangle & t = readtrigs[i];
STLTriangle st;
st.SetNormal (t.Normal());
for (int k = 0; k < 3; k++)
{
Point<3> p = t[k];
Point<3> pmin = p - Vec<3> (pointtol, pointtol, pointtol);
Point<3> pmax = p + Vec<3> (pointtol, pointtol, pointtol);
pointtree->GetIntersecting (pmin, pmax, pintersect);
if (pintersect.Size() > 1)
PrintError("too many close points");
int foundpos = -1;
if (pintersect.Size())
foundpos = pintersect[0];
if (foundpos == -1)
{
foundpos = AddPoint(p);
pointtree->Insert (p, foundpos);
}
if (Dist(p, points.Get(foundpos)) > 1e-10)
cout << "identify close points: " << p << " " << points.Get(foundpos)
<< ", dist = " << Dist(p, points.Get(foundpos))
<< endl;
st[k] = foundpos;
}
if ( (st[0] == st[1]) ||
(st[0] == st[2]) ||
(st[1] == st[2]) )
{
PrintError("STL Triangle degenerated");
}
else
{
AddTriangle(st);
}
}
PrintMessage(5,"identify points ... done");
FindNeighbourTrigs();
}
int STLTopology :: GetPointNum (const Point<3> & p)
{
Point<3> pmin = p - Vec<3> (pointtol, pointtol, pointtol);
Point<3> pmax = p + Vec<3> (pointtol, pointtol, pointtol);
Array<int> pintersect;
pointtree->GetIntersecting (pmin, pmax, pintersect);
if (pintersect.Size() == 1)
return pintersect[0];
else
return 0;
}
void STLTopology :: FindNeighbourTrigs()
{
// if (topedges.Size()) return;
PushStatusF("Find Neighbour Triangles");
PrintMessage(5,"build topology ...");
// build up topology tables
int nt = GetNT();
INDEX_2_HASHTABLE<int> * oldedges = ht_topedges;
ht_topedges = new INDEX_2_HASHTABLE<int> (GetNP()+1);
topedges.SetSize(0);
for (int i = 1; i <= nt; i++)
{
STLTriangle & trig = GetTriangle(i);
for (int j = 1; j <= 3; j++)
{
int pi1 = trig.PNumMod (j+1);
int pi2 = trig.PNumMod (j+2);
INDEX_2 i2(pi1, pi2);
i2.Sort();
int enr;
int othertn;
if (ht_topedges->Used(i2))
{
enr = ht_topedges->Get(i2);
topedges.Elem(enr).TrigNum(2) = i;
othertn = topedges.Get(enr).TrigNum(1);
STLTriangle & othertrig = GetTriangle(othertn);
trig.NBTrigNum(j) = othertn;
trig.EdgeNum(j) = enr;
for (int k = 1; k <= 3; k++)
if (othertrig.EdgeNum(k) == enr)
othertrig.NBTrigNum(k) = i;
}
else
{
enr = topedges.Append (STLTopEdge (pi1, pi2, i, 0));
ht_topedges->Set (i2, enr);
trig.EdgeNum(j) = enr;
}
}
}
PrintMessage(5,"topology built, checking");
topology_ok = 1;
int ne = GetNTE();
for (int i = 1; i <= nt; i++)
GetTriangle(i).flags.toperror = 0;
for (int i = 1; i <= nt; i++)
for (int j = 1; j <= 3; j++)
{
const STLTopEdge & edge = GetTopEdge (GetTriangle(i).EdgeNum(j));
if (edge.TrigNum(1) != i && edge.TrigNum(2) != i)
{
topology_ok = 0;
GetTriangle(i).flags.toperror = 1;
}
}
for (int i = 1; i <= ne; i++)
{
const STLTopEdge & edge = GetTopEdge (i);
if (!edge.TrigNum(2))
{
topology_ok = 0;
GetTriangle(edge.TrigNum(1)).flags.toperror = 1;
}
}
if (topology_ok)
{
orientation_ok = 1;
for (int i = 1; i <= nt; i++)
{
const STLTriangle & t = GetTriangle (i);
for (int j = 1; j <= 3; j++)
{
const STLTriangle & nbt = GetTriangle (t.NBTrigNum(j));
if (!t.IsNeighbourFrom (nbt))
orientation_ok = 0;
}
}
}
else
orientation_ok = 0;
status = STL_GOOD;
statustext = "";
if (!topology_ok || !orientation_ok)
{
status = STL_ERROR;
if (!topology_ok)
statustext = "Topology not ok";
else
statustext = "Orientation not ok";
}
PrintMessage(3,"topology_ok = ",topology_ok);
PrintMessage(3,"orientation_ok = ",orientation_ok);
PrintMessage(3,"topology found");
// generate point -> trig table
trigsperpoint.SetSize(GetNP());
for (int i = 1; i <= GetNT(); i++)
for (int j = 1; j <= 3; j++)
trigsperpoint.Add1(GetTriangle(i).PNum(j),i);
//check trigs per point:
/*
for (i = 1; i <= GetNP(); i++)
{
if (trigsperpoint.EntrySize(i) < 3)
{
(*testout) << "ERROR: Point " << i << " has " << trigsperpoint.EntrySize(i) << " triangles!!!" << endl;
}
}
*/
topedgesperpoint.SetSize (GetNP());
for (int i = 1; i <= ne; i++)
for (int j = 1; j <= 2; j++)
topedgesperpoint.Add1 (GetTopEdge (i).PNum(j), i);
PrintMessage(5,"point -> trig table generated");
// transfer edge data:
// .. to be done
delete oldedges;
for (STLTrigIndex ti = 0; ti < GetNT(); ti++)
{
STLTriangle & trig = trias[ti];
for (int k = 0; k < 3; k++)
{
STLPointIndex pi = trig[k] - STLBASE;
STLPointIndex pi2 = trig[(k+1)%3] - STLBASE;
STLPointIndex pi3 = trig[(k+2)%3] - STLBASE;
// vector along edge
Vec<3> ve = points[pi2] - points[pi];
ve.Normalize();
// vector along third point
Vec<3> vt = points[pi3] - points[pi];
vt -= (vt * ve) * ve;
vt.Normalize();
Vec<3> vn = trig.GeomNormal (points);
vn.Normalize();
double phimin = 10, phimax = -1; // out of (0, 2 pi)
for (int j = 0; j < trigsperpoint[pi].Size(); j++)
{
STLTrigIndex ti2 = trigsperpoint[pi][j] - STLBASE;
const STLTriangle & trig2 = trias[ti2];
if (ti == ti2) continue;
bool hasboth = 0;
for (int l = 0; l < 3; l++)
if (trig2[l] - STLBASE == pi2)
{
hasboth = 1;
break;
}
if (!hasboth) continue;
STLPointIndex pi4(0);
for (int l = 0; l < 3; l++)
if (trig2[l] - STLBASE != pi && trig2[l] - STLBASE != pi2)
pi4 = trig2[l] - STLBASE;
Vec<3> vt2 = points[pi4] - points[pi];
double phi = atan2 (vt2 * vn, vt2 * vt);
if (phi < 0) phi += 2 * M_PI;
if (phi < phimin)
{
phimin = phi;
trig.NBTrig (0, (k+2)%3) = ti2 + STLBASE;
}
if (phi > phimax)
{
phimax = phi;
trig.NBTrig (1, (k+2)%3) = ti2 + STLBASE;
}
}
}
}
if (status == STL_GOOD)
{
// for compatibility:
neighbourtrigs.SetSize(GetNT());
for (int i = 1; i <= GetNT(); i++)
for (int k = 1; k <= 3; k++)
AddNeighbourTrig (i, GetTriangle(i).NBTrigNum(k));
}
else
{
// assemble neighbourtrigs (should be done only for illegal topology):
neighbourtrigs.SetSize(GetNT());
int tr, found;
int wrongneighbourfound = 0;
for (int i = 1; i <= GetNT(); i++)
{
SetThreadPercent((double)i/(double)GetNT()*100.);
if (multithread.terminate)
{
PopStatus();
return;
}
for (int k = 1; k <= 3; k++)
{
for (int j = 1; j <= trigsperpoint.EntrySize(GetTriangle(i).PNum(k)); j++)
{
tr = trigsperpoint.Get(GetTriangle(i).PNum(k),j);
if (i != tr && (GetTriangle(i).IsNeighbourFrom(GetTriangle(tr))
|| GetTriangle(i).IsWrongNeighbourFrom(GetTriangle(tr))))
{
if (GetTriangle(i).IsWrongNeighbourFrom(GetTriangle(tr)))
{
/*(*testout) << "ERROR: triangle " << i << " has a wrong neighbour triangle!!!" << endl;*/
wrongneighbourfound ++;
}
found = 0;
for (int ii = 1; ii <= NONeighbourTrigs(i); ii++)
{if (NeighbourTrig(i,ii) == tr) {found = 1;break;};}
if (! found) {AddNeighbourTrig(i,tr);}
}
}
}
if (NONeighbourTrigs(i) != 3)
{
PrintError("TRIG ",i," has ",NONeighbourTrigs(i)," neighbours!!!!");
for (int kk=1; kk <= NONeighbourTrigs(i); kk++)
{
PrintMessage(5,"neighbour-trig",kk," = ",NeighbourTrig(i,kk));
}
};
}
if (wrongneighbourfound)
{
PrintError("++++++++++++++++++++\n");
PrintError(wrongneighbourfound, " wrong oriented neighbourtriangles found!");
PrintError("try to correct it (with stldoctor)!");
PrintError("++++++++++++++++++++\n");
status = STL_ERROR;
statustext = "STL Mesh not consistent";
multithread.terminate = 1;
#ifdef STAT_STREAM
(*statout) << "non-conform stl geometry \\hline" << endl;
#endif
}
}
TopologyChanged();
PopStatus();
}
void STLTopology :: GetTrianglesInBox (/*
const Point<3> & pmin,
const Point<3> & pmax,
*/
const Box<3> & box,
Array<int> & btrias) const
{
if (searchtree)
searchtree -> GetIntersecting (box.PMin(), box.PMax(), btrias);
else
{
int i;
Box<3> box1 = box;
box1.Increase (1e-4);
btrias.SetSize(0);
int nt = GetNT();
for (i = 1; i <= nt; i++)
{
if (box1.Intersect (GetTriangle(i).box))
{
btrias.Append (i);
}
}
}
}
void STLTopology :: AddTriangle(const STLTriangle& t)
{
trias.Append(t);
const Point<3> & p1 = GetPoint (t.PNum(1));
const Point<3> & p2 = GetPoint (t.PNum(2));
const Point<3> & p3 = GetPoint (t.PNum(3));
Box<3> box;
box.Set (p1);
box.Add (p2);
box.Add (p3);
/*
// Point<3> pmin(p1), pmax(p1);
pmin.SetToMin (p2);
pmin.SetToMin (p3);
pmax.SetToMax (p2);
pmax.SetToMax (p3);
*/
trias.Last().box = box;
trias.Last().center = Center (p1, p2, p3);
double r1 = Dist (p1, trias.Last().center);
double r2 = Dist (p2, trias.Last().center);
double r3 = Dist (p3, trias.Last().center);
trias.Last().rad = max2 (max2 (r1, r2), r3);
if (geomsearchtreeon)
{searchtree->Insert (box.PMin(), box.PMax(), trias.Size());}
}
int STLTopology :: GetLeftTrig(int p1, int p2) const
{
int i;
for (i = 1; i <= trigsperpoint.EntrySize(p1); i++)
{
if (GetTriangle(trigsperpoint.Get(p1,i)).HasEdge(p1,p2)) {return trigsperpoint.Get(p1,i);}
}
PrintSysError("ERROR in GetLeftTrig !!!");
return 0;
}
int STLTopology :: GetRightTrig(int p1, int p2) const
{
return GetLeftTrig(p2,p1);
}
int STLTopology :: NeighbourTrigSorted(int trig, int edgenum) const
{
int i, p1, p2;
int psearch = GetTriangle(trig).PNum(edgenum);
for (i = 1; i <= 3; i++)
{
GetTriangle(trig).GetNeighbourPoints(GetTriangle(NeighbourTrig(trig,i)),p1,p2);
if (p1 == psearch) {return NeighbourTrig(trig,i);}
}
PrintSysError("ERROR in NeighbourTrigSorted");
return 0;
}
int STLTopology :: GetTopEdgeNum (int pi1, int pi2) const
{
if (!ht_topedges) return 0;
INDEX_2 i2(pi1, pi2);
i2.Sort();
if (!ht_topedges->Used(i2)) return 0;
return ht_topedges->Get(i2);
}
void STLTopology :: InvertTrig (int trig)
{
if (trig >= 1 && trig <= GetNT())
{
GetTriangle(trig).ChangeOrientation();
FindNeighbourTrigs();
}
else
{
PrintUserError("no triangle selected!");
}
}
void STLTopology :: DeleteTrig (int trig)
{
if (trig >= 1 && trig <= GetNT())
{
trias.DeleteElement(trig);
FindNeighbourTrigs();
}
else
{
PrintUserError("no triangle selected!");
}
}
void STLTopology :: OrientAfterTrig (int trig)
{
int starttrig = trig;
if (starttrig >= 1 && starttrig <= GetNT())
{
Array <int> oriented;
oriented.SetSize(GetNT());
int i;
for (i = 1; i <= oriented.Size(); i++)
{
oriented.Elem(i) = 0;
}
oriented.Elem(starttrig) = 1;
int k;
Array <int> list1;
list1.SetSize(0);
Array <int> list2;
list2.SetSize(0);
list1.Append(starttrig);
int cnt = 1;
int end = 0;
int nt;
while (!end)
{
end = 1;
for (i = 1; i <= list1.Size(); i++)
{
const STLTriangle& tt = GetTriangle(list1.Get(i));
for (k = 1; k <= 3; k++)
{
nt = tt.NBTrigNum (k); // NeighbourTrig(list1.Get(i),k);
if (oriented.Get(nt) == 0)
{
if (tt.IsWrongNeighbourFrom(GetTriangle(nt)))
{
GetTriangle(nt).ChangeOrientation();
}
oriented.Elem(nt) = 1;
list2.Append(nt);
cnt++;
end = 0;
}
}
}
list1.SetSize(0);
for (i = 1; i <= list2.Size(); i++)
{
list1.Append(list2.Get(i));
}
list2.SetSize(0);
}
PrintMessage(5,"NO corrected triangles = ",cnt);
if (cnt == GetNT())
{
PrintMessage(5,"ALL triangles oriented in same way!");
}
else
{
PrintWarning("NOT ALL triangles oriented in same way!");
}
// topedges.SetSize(0);
FindNeighbourTrigs();
}
else
{
PrintUserError("no triangle selected!");
}
}
}<|fim▁end|>
|
}
fout2 << GetNT() << endl;
|
<|file_name|>admin_template_unittest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for the admin template gatherer.'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
import StringIO
import tempfile
import unittest
from grit.gather import admin_template
from grit import util
from grit import grd_reader
from grit import grit_runner
from grit.tool import build
class AdmGathererUnittest(unittest.TestCase):
def testParsingAndTranslating(self):
pseudofile = StringIO.StringIO(
'bingo bongo\n'
'ding dong\n'
'[strings] \n'
'whatcha="bingo bongo"\n'
'gotcha = "bingolabongola "the wise" fingulafongula" \n')
gatherer = admin_template.AdmGatherer.FromFile(pseudofile)
gatherer.Parse()
self.failUnless(len(gatherer.GetCliques()) == 2)
self.failUnless(gatherer.GetCliques()[1].GetMessage().GetRealContent() ==
'bingolabongola "the wise" fingulafongula')
translation = gatherer.Translate('en')
self.failUnless(translation == gatherer.GetText().strip())
def testErrorHandling(self):
pseudofile = StringIO.StringIO(
'bingo bongo\n'
'ding dong\n'
'whatcha="bingo bongo"\n'
'gotcha = "bingolabongola "the wise" fingulafongula" \n')
gatherer = admin_template.AdmGatherer.FromFile(pseudofile)
self.assertRaises(admin_template.MalformedAdminTemplateException,
gatherer.Parse)
_TRANSLATABLES_FROM_FILE = (
'Google', 'Google Desktop', 'Preferences',
'Controls Google Desktop preferences',
'Indexing and Capture Control',
'Controls what files, web pages, and other content will be indexed by Google Desktop.',
'Prevent indexing of email',
# there are lots more but we don't check any further
)
def VerifyCliquesFromAdmFile(self, cliques):
self.failUnless(len(cliques) > 20)
for ix in range(len(self._TRANSLATABLES_FROM_FILE)):
text = cliques[ix].GetMessage().GetRealContent()
self.failUnless(text == self._TRANSLATABLES_FROM_FILE[ix])
def testFromFile(self):
fname = util.PathFromRoot('grit/testdata/GoogleDesktop.adm')
gatherer = admin_template.AdmGatherer.FromFile(fname)
gatherer.Parse()<|fim▁hole|> def MakeGrd(self):
grd = grd_reader.Parse(StringIO.StringIO('''<?xml version="1.0" encoding="UTF-8"?>
<grit latest_public_release="2" source_lang_id="en-US" current_release="3">
<release seq="3">
<structures>
<structure type="admin_template" name="IDAT_GOOGLE_DESKTOP_SEARCH"
file="GoogleDesktop.adm" exclude_from_rc="true" />
<structure type="txt" name="BINGOBONGO"
file="README.txt" exclude_from_rc="true" />
</structures>
</release>
<outputs>
<output filename="de_res.rc" type="rc_all" lang="de" />
</outputs>
</grit>'''), util.PathFromRoot('grit/testdata'))
grd.SetOutputContext('en', {})
grd.RunGatherers(recursive=True)
return grd
def testInGrd(self):
grd = self.MakeGrd()
cliques = grd.children[0].children[0].children[0].GetCliques()
self.VerifyCliquesFromAdmFile(cliques)
def testFileIsOutput(self):
grd = self.MakeGrd()
dirname = tempfile.mkdtemp()
try:
tool = build.RcBuilder()
tool.o = grit_runner.Options()
tool.output_directory = dirname
tool.res = grd
tool.Process()
self.failUnless(os.path.isfile(
os.path.join(dirname, 'de_GoogleDesktop.adm')))
self.failUnless(os.path.isfile(
os.path.join(dirname, 'de_README.txt')))
finally:
for f in os.listdir(dirname):
os.unlink(os.path.join(dirname, f))
os.rmdir(dirname)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
cliques = gatherer.GetCliques()
self.VerifyCliquesFromAdmFile(cliques)
|
<|file_name|>regen.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -u
# coding=utf-8
"""
Generate certificates via Let's Encrypt
"""
import re
from subprocess import check_output, check_call
from os import path
import click
from colorama import Fore
import pexpect
# Extract the file/challenge from the LetsEncrypt output e.g.
CREX = re.compile(
".well-known\/acme-challenge\/(\S+) before continuing:\s+(\S+)",
re.MULTILINE
)
MODULE_CONFIG = 'module.yaml' # The file in our project root
APPENGINE_URL = ("https://console.cloud.google.com/" +
"appengine/settings/certificates")
def get_default_email():
"""Get a default user email from the git config."""
return check_output(['git', 'config', 'user.email']).strip()<|fim▁hole|>
@click.command()
@click.option('--appid', '-A', prompt=True)
@click.option('--test/--no-test', default=True)
@click.option('--domains', '-d', multiple=True)
@click.option('--app-path', default=path.abspath(path.dirname(__file__)))
@click.option('--acme-path', required=True)
@click.option('--email', default=get_default_email)
def gen(test, appid, domains, acme_path, app_path, email):
"""Regenerate the keys.
Run all the steps, being:
1. Call Let's Encrypt
2. Capture the challenges from the LE output
3. Deploy the AppEngine module
4. Print Cert. to terminal
"""
common_name = domains[0] # noqa
sans = " ".join(domains) # noqa
click.echo("""
APPID: {appid}
Test: {test}
Common Name: {common_name}
Domain(s): {sans}
App Path: {app_path}
ACME path: {acme_path}
User Email: {email}
""".format(**{
k: Fore.YELLOW + str(v) + Fore.RESET
for k, v in locals().items()
}))
CERT_PATH = acme_path
KEY_PATH = acme_path
CHAIN_PATH = acme_path
FULLCHAIN_PATH = acme_path
CONFIG_DIR = acme_path
WORK_DIR = path.join(acme_path, 'tmp')
LOG_DIR = path.join(acme_path, 'logs')
cmd = [
'letsencrypt',
'certonly',
'--rsa-key-size',
'2048',
'--manual',
'--agree-tos',
'--manual-public-ip-logging-ok',
'--text',
'--cert-path', CERT_PATH,
'--key-path', KEY_PATH,
'--chain-path', CHAIN_PATH,
'--fullchain-path', FULLCHAIN_PATH,
'--config-dir', CONFIG_DIR,
'--work-dir', WORK_DIR,
'--logs-dir', LOG_DIR,
'--email', email,
'--domain', ",".join(domains),
]
if test:
cmd.append('--staging')
print("$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET)
le = pexpect.spawn(" ".join(cmd))
out = ''
idx = le.expect(["Press ENTER", "Select the appropriate number"])
if idx == 1:
# 1: Keep the existing certificate for now
# 2: Renew & replace the cert (limit ~5 per 7 days)
print le.before + le.after
le.interact("\r")
print "..."
le.sendline("")
if le.expect(["Press ENTER", pexpect.EOF]) == 1:
# EOF - User chose to not update certs.
return
out += le.before
# Hit "Enter" for each domain; we extract all challenges at the end;
# We stop just at the last "Enter to continue" so we can publish
# our challenges on AppEngine.
for i in xrange(len(domains) - 1):
le.sendline("")
le.expect("Press ENTER")
out += le.before
# The challenges will be in `out` in the form of CREX
challenges = CREX.findall(out)
if not challenges:
raise Exception("Expected challenges from the output")
for filename, challenge in challenges:
filepath = path.join(app_path, "challenges", filename)
print "[%s]\n\t%s\n\t=> %s" % (
Fore.BLUE + filepath + Fore.RESET,
Fore.GREEN + filename + Fore.RESET,
Fore.YELLOW + challenge + Fore.RESET
)
with open(filepath, 'w') as f:
f.write(challenge)
# Deploy to AppEngine
cmd = [
'appcfg.py',
'update',
'-A', appid,
path.join(app_path, MODULE_CONFIG)
]
print("$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET)
check_call(cmd)
# After deployment, continue the Let's Encrypt (which has been waiting
# on the last domain)
le.sendline("")
le.expect(pexpect.EOF)
le.close()
if le.exitstatus:
print Fore.RED + "\nletsencrypt failure: " + Fore.RESET + le.before
return
print "\nletsencrypt complete.", le.before
# Convert the key to a format AppEngine can use
# LE seems to choose the domain at random, so we have to pluck it.
CPATH_REX = (
"Your certificate and chain have been saved at (.+)fullchain\.pem\."
)
outstr = le.before.replace("\n", "").replace('\r', '')
results = re.search(CPATH_REX, outstr, re.MULTILINE)
LIVE_PATH = "".join(results.group(1).split())
CHAIN_PATH = path.join(LIVE_PATH, "fullchain.pem")
PRIVKEY_PATH = path.join(LIVE_PATH, "privkey.pem")
cmd = [
'openssl', 'rsa',
'-in', PRIVKEY_PATH,
'-outform', 'pem',
'-inform', 'pem'
]
print "$ " + Fore.MAGENTA + " ".join(cmd) + Fore.RESET
priv_text = check_output(cmd)
with open(CHAIN_PATH, 'r') as cp:
pub_text = cp.read()
print """
--- Private Key ---
at {PRIVKEY_PATH}
(the above file must be converted with {cmd} to a format usable by
AppEngine, the result of which will be as follows)
{priv_text}
--- Public Key Chain ---
at {CHAIN_PATH}
{pub_text}
✄ Copy the above into the respective fields of AppEngine at
https://console.cloud.google.com/appengine/settings/certificates
""".format(
PRIVKEY_PATH=PRIVKEY_PATH,
priv_text=Fore.RED + priv_text + Fore.RESET,
CHAIN_PATH=CHAIN_PATH,
pub_text=Fore.BLUE + pub_text + Fore.RESET,
cmd=Fore.MAGENTA + " ".join(cmd) + Fore.RESET,
)
if __name__ == '__main__':
gen()<|fim▁end|>
| |
<|file_name|>hl_prometheus.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 SONATA-NFV, Thales Communications & Security
# ALL RIGHTS RESERVED.
#<|fim▁hole|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, Thales Communications & Security
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
"""High level pandas structure for the Sonata prometheus data"""
import datetime
import typing # noqa pylint: disable=unused-import
from typing import Dict
import pandas # type: ignore
from son_analyze.core.prometheus import PrometheusData
def convert_timestamp_to_posix(timestamp: str) -> datetime.datetime:
"""Convert the timestamp into a datetime"""
return datetime.datetime.fromtimestamp(float(timestamp), # type: ignore
tz=datetime.timezone.utc)
# pylint: disable=unsubscriptable-object
def build_sonata_df_by_id(prom_data: PrometheusData) -> Dict[str,
pandas.DataFrame]:
"""Build a dict of dataframe. Each dataframe contains the values matching
the corresponding id"""
# noqa TODO: find the longest metrics and use it as the index. Interpolate the
# other metric against it before the merge
result = {}
items_itr = prom_data._by_id.items() # pylint: disable=protected-access
for id_index, all_metrics in items_itr:
acc_ts = []
for elt in all_metrics:
metric_name = elt['metric']['__name__']
index, data = zip(*elt['values'])
index = [convert_timestamp_to_posix(z) for z in index]
this_serie = pandas.Series(data, index=index)
this_serie.name = metric_name
acc_ts.append(this_serie)
dataframe = pandas.concat(acc_ts, join='outer', axis=1)
dataframe.index = pandas.date_range(
start=dataframe.index[0],
periods=len(dataframe.index),
freq='S')
dataframe = dataframe.interpolate(method='index')
# import pdb; pdb.set_trace()
result[id_index] = dataframe
return result<|fim▁end|>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
|
<|file_name|>ListFonts.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import java.awt.*;
public class ListFonts
{
public static void main(String[] args)
{
String[] fontNames = GraphicsEnvironment
.getLocalGraphicsEnvironment()
.getAvailableFontFamilyNames();
for (int i = 0; i < fontNames.length; i++)
System.out.println(fontNames[i]);
}
}<|fim▁end|>
| |
<|file_name|>get_room_visibility.rs<|end_file_name|><|fim▁begin|>//! [GET /_matrix/client/r0/directory/list/room/{roomId}](https://matrix.org/docs/spec/client_server/r0.6.0#get-matrix-client-r0-directory-list-room-roomid)
use ruma_api::ruma_api;
use ruma_identifiers::RoomId;
use crate::r0::room::Visibility;
<|fim▁hole|> description: "Get the visibility of a public room on a directory.",
name: "get_room_visibility",
method: GET,
path: "/_matrix/client/r0/directory/list/room/:room_id",
rate_limited: false,
requires_authentication: false,
}
request {
/// The ID of the room of which to request the visibility.
#[ruma_api(path)]
pub room_id: RoomId,
}
response {
/// Visibility of the room.
pub visibility: Visibility,
}
error: crate::Error
}<|fim▁end|>
|
ruma_api! {
metadata {
|
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var Encore = require('@symfony/webpack-encore');
// Manually configure the runtime environment if not already configured yet by the "encore" command.
// It's useful when you use tools that rely on webpack.config.js file.
if (!Encore.isRuntimeEnvironmentConfigured()) {
Encore.configureRuntimeEnvironment(process.env.NODE_ENV || 'dev');
}
Encore
// directory where compiled assets will be stored
.setOutputPath('public/build/')
// public path used by the web server to access the output path
.setPublicPath('/build')
// only needed for CDN's or sub-directory deploy
//.setManifestKeyPrefix('build/')
/*
* ENTRY CONFIG
*
* Add 1 entry for each "page" of your app
* (including one that's included on every page - e.g. "app")
*
* Each entry will result in one JavaScript file (e.g. app.js)
* and one CSS file (e.g. app.css) if your JavaScript imports CSS.
*/
.addEntry('app', './assets/js/app.js')
//.addEntry('page1', './assets/js/page1.js')
//.addEntry('page2', './assets/js/page2.js')
// When enabled, Webpack "splits" your files into smaller pieces for greater optimization.
.splitEntryChunks()
// will require an extra script tag for runtime.js
// but, you probably want this, unless you're building a single-page app
.enableSingleRuntimeChunk()
/*
* FEATURE CONFIG
*
* Enable & configure other features below. For a full
* list of features, see:
* https://symfony.com/doc/current/frontend.html#adding-more-features
*/
.cleanupOutputBeforeBuild()
.enableBuildNotifications()
.enableSourceMaps(!Encore.isProduction())
// enables hashed filenames (e.g. app.abc123.css)
.enableVersioning(Encore.isProduction())
// enables @babel/preset-env polyfills<|fim▁hole|> config.useBuiltIns = 'usage';
config.corejs = 3;
})
// enables Sass/SCSS support
//.enableSassLoader()
// uncomment if you use TypeScript
//.enableTypeScriptLoader()
// uncomment to get integrity="..." attributes on your script & link tags
// requires WebpackEncoreBundle 1.4 or higher
//.enableIntegrityHashes(Encore.isProduction())
// uncomment if you're having problems with a jQuery plugin
//.autoProvidejQuery()
// uncomment if you use API Platform Admin (composer req api-admin)
//.enableReactPreset()
//.addEntry('admin', './assets/js/admin.js')
;
module.exports = Encore.getWebpackConfig();<|fim▁end|>
|
.configureBabelPresetEnv((config) => {
|
<|file_name|>1_SetIRTemplatesToManual.py<|end_file_name|><|fim▁begin|>from elan import *
#Set_Location_And_Weather_By_Country_City.py
<|fim▁hole|>Configurator.Start()
Configurator.Wait()
sleep(3)
Configurator.media.Click()
Configurator.interfacetemplates.Click()
for i in range(100):
try:
Configurator.ComboBox.Select(0,1)
break
except:
sleep(2)
print("Try again")
Configurator.apply.Click()
Configurator.CloseAndClean()<|fim▁end|>
| |
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|>"""
Unit tests for the base mechanism class.
"""
import pytest
from azmq.mechanisms.base import Mechanism
from azmq.errors import ProtocolError
@pytest.mark.asyncio
async def test_expect_command(reader):
reader.write(b'\x04\x09\x03FOOhello')
reader.seek(0)
result = await Mechanism._expect_command(reader=reader, name=b'FOO')
assert result == b'hello'
@pytest.mark.asyncio
async def test_expect_command_large(reader):
reader.write(b'\x06\x00\x00\x00\x00\x00\x00\x00\x09\x03FOOhello')
reader.seek(0)
result = await Mechanism._expect_command(reader=reader, name=b'FOO')
assert result == b'hello'
@pytest.mark.asyncio
async def test_expect_command_invalid_size_type(reader):
reader.write(b'\x03')
reader.seek(0)
with pytest.raises(ProtocolError):
await Mechanism._expect_command(reader=reader, name=b'FOO')
@pytest.mark.asyncio
async def test_expect_command_invalid_name_size(reader):
reader.write(b'\x04\x09\x04HELOhello')
reader.seek(0)
with pytest.raises(ProtocolError):
await Mechanism._expect_command(reader=reader, name=b'FOO')
@pytest.mark.asyncio
async def test_expect_command_invalid_name(reader):
reader.write(b'\x04\x08\x03BARhello')
reader.seek(0)
with pytest.raises(ProtocolError):
await Mechanism._expect_command(reader=reader, name=b'FOO')
@pytest.mark.asyncio
async def test_read_frame(reader):
reader.write(b'\x00\x03foo')
reader.seek(0)
async def on_command(name, data):
assert False
result = await Mechanism.read(reader=reader, on_command=on_command)
assert result == (b'foo', True)
@pytest.mark.asyncio
async def test_read_frame_large(reader):
reader.write(b'\x02\x00\x00\x00\x00\x00\x00\x00\x03foo')
reader.seek(0)
async def on_command(name, data):
assert False
result = await Mechanism.read(reader=reader, on_command=on_command)
assert result == (b'foo', True)
@pytest.mark.asyncio
async def test_read_command(reader):
reader.write(b'\x04\x09\x03BARhello\x00\x03foo')<|fim▁hole|> async def on_command(name, data):
assert name == b'BAR'
assert data == b'hello'
result = await Mechanism.read(reader=reader, on_command=on_command)
assert result == (b'foo', True)
@pytest.mark.asyncio
async def test_read_invalid_size_type(reader):
reader.write(b'\x09')
reader.seek(0)
async def on_command(name, data):
assert False
with pytest.raises(ProtocolError):
await Mechanism.read(reader=reader, on_command=on_command)<|fim▁end|>
|
reader.seek(0)
|
<|file_name|>fetchProductAndCheckIfFound.js<|end_file_name|><|fim▁begin|>import applicationActions from '../../constants/application';
import productActions from '../../constants/products';<|fim▁hole|>
export default function fetchProductAndCheckIfFound(context, payload, done) {
context.dispatch(productActions.PRODUCTS_ITEM);
context.api.products.get(payload).then(function successFn(result) {
context.dispatch(productActions.PRODUCTS_ITEM_SUCCESS, result);
done && done();
}, function errorFn(err) {
context.dispatch(productActions.PRODUCTS_ITEM_ERROR, err.result);
context.dispatch(applicationActions.APPLICATION_ROUTE_ERROR, err.status);
done && done();
});
}<|fim▁end|>
| |
<|file_name|>content.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export { default } from 'ember-radical/components/rad-dropdown/content'
|
<|file_name|>logs.py<|end_file_name|><|fim▁begin|>import sys
from traceback import format_exception
import colorama
def color(color_, settings):
"""Utility for ability to disabling colored output."""
if settings.no_colors:
return ''
else:
return color_
def exception(title, exc_info, settings):
sys.stderr.write(
u'{warn}[WARN] {title}:{reset}\n{trace}'
u'{warn}----------------------------{reset}\n\n'.format(
warn=color(colorama.Back.RED + colorama.Fore.WHITE
+ colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings),
title=title,
trace=''.join(format_exception(*exc_info))))
def rule_failed(rule, exc_info, settings):
exception('Rule {}'.format(rule.name), exc_info, settings)
def show_command(new_command, settings):
sys.stderr.write('{bold}{command}{reset}\n'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
def confirm_command(new_command, settings):
sys.stderr.write(
'{bold}{command}{reset} [{green}enter{reset}/{red}ctrl+c{reset}]'.format(
command=new_command,
bold=color(colorama.Style.BRIGHT, settings),
green=color(colorama.Fore.GREEN, settings),<|fim▁hole|> sys.stderr.flush()
def failed(msg, settings):
sys.stderr.write('{red}{msg}{reset}\n'.format(
msg=msg,
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))<|fim▁end|>
|
red=color(colorama.Fore.RED, settings),
reset=color(colorama.Style.RESET_ALL, settings)))
|
<|file_name|>_oauth.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|>This module comtains the path hack neccesary for oauthlib to be vendored into requests
while allowing upstream changes.
"""
import os
import sys
try:
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)
except ImportError:
path = os.path.abspath('/'.join(__file__.split('/')[:-1]+['packages']))
sys.path.insert(0, path)
from oauthlib.oauth1 import rfc5849
from oauthlib.common import extract_params
from oauthlib.oauth1.rfc5849 import (Client, SIGNATURE_HMAC, SIGNATURE_TYPE_AUTH_HEADER)<|fim▁end|>
|
"""
requests._oauth
~~~~~~~~~~~~~~~
|
<|file_name|>gregorian.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2011, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
<|fim▁hole|>*/
//>>built
define("dojo/cldr/nls/en-au/gregorian",{"dateFormatItem-yMEd":"E, d/M/y","timeFormat-full":"h:mm:ss a zzzz","timeFormat-medium":"h:mm:ss a","dateFormatItem-MEd":"E, d/M","dateFormat-medium":"dd/MM/y","dateFormatItem-yMd":"d/M/y","dateFormat-full":"EEEE, d MMMM y","timeFormat-long":"h:mm:ss a z","timeFormat-short":"h:mm a","dateFormat-short":"d/MM/yy","dateFormat-long":"d MMMM y","dateFormatItem-MMMEd":"E, d MMM"});<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
<|fim▁hole|>from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['lg_builder'],
package_dir={'': 'src'},
scripts=['scripts/lg-ros-build'],
install_requires=['catkin_pkg', 'python-debian', 'rospkg']
)
setup(**d)<|fim▁end|>
| |
<|file_name|>play_context.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pipes
import random
import re
from six import iteritems
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.template import Templar
from ansible.utils.boolean import boolean
from ansible.utils.unicode import to_unicode
__all__ = ['PlayContext']
# the magic variable mapping dictionary below is used to translate
# host/inventory variables to fields in the PlayContext
# object. The dictionary values are tuples, to account for aliases
# in variable names.
MAGIC_VARIABLE_MAPPING = dict(
connection = ('ansible_connection',),
remote_addr = ('ansible_ssh_host', 'ansible_host'),
remote_user = ('ansible_ssh_user', 'ansible_user'),
port = ('ansible_ssh_port', 'ansible_port'),
password = ('ansible_ssh_pass', 'ansible_password'),
private_key_file = ('ansible_ssh_private_key_file', 'ansible_private_key_file'),
shell = ('ansible_shell_type',),
become = ('ansible_become',),
become_method = ('ansible_become_method',),
become_user = ('ansible_become_user',),
become_pass = ('ansible_become_password','ansible_become_pass'),
become_exe = ('ansible_become_exe',),
become_flags = ('ansible_become_flags',),
sudo = ('ansible_sudo',),
sudo_user = ('ansible_sudo_user',),
sudo_pass = ('ansible_sudo_password', 'ansible_sudo_pass'),
sudo_exe = ('ansible_sudo_exe',),
sudo_flags = ('ansible_sudo_flags',),
su = ('ansible_su',),
su_user = ('ansible_su_user',),
su_pass = ('ansible_su_password', 'ansible_su_pass'),
su_exe = ('ansible_su_exe',),
su_flags = ('ansible_su_flags',),
)
SU_PROMPT_LOCALIZATIONS = [
'Password',
'암호',
'パスワード',
'Adgangskode',
'Contraseña',
'Contrasenya',
'Hasło',
'Heslo',
'Jelszó',
'Lösenord',
'Mật khẩu',
'Mot de passe',
'Parola',
'Parool',
'Pasahitza',
'Passord',
'Passwort',
'Salasana',
'Sandi',
'Senha',
'Wachtwoord',
'ססמה',
'Лозинка',
'Парола',
'Пароль',
'गुप्तशब्द',
'शब्दकूट',
'సంకేతపదము',
'හස්පදය',
'密码',
'密碼',
]
TASK_ATTRIBUTE_OVERRIDES = (
'become',
'become_user',
'become_pass',
'become_method',
'connection',
'delegate_to',
'no_log',
'remote_user',
)
class PlayContext(Base):
'''
This class is used to consolidate the connection information for
hosts in a play and child tasks, where the task may override some
connection/authentication information.
'''
# connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log)
_remote_addr = FieldAttribute(isa='string')
_password = FieldAttribute(isa='string')
_private_key_file = FieldAttribute(isa='string', default=C.DEFAULT_PRIVATE_KEY_FILE)
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
_shell = FieldAttribute(isa='string')
_ssh_extra_args = FieldAttribute(isa='string')
_connection_lockfd= FieldAttribute(isa='int')
# privilege escalation fields
_become = FieldAttribute(isa='bool')
_become_method = FieldAttribute(isa='string')
_become_user = FieldAttribute(isa='string')
_become_pass = FieldAttribute(isa='string')
_become_exe = FieldAttribute(isa='string')
_become_flags = FieldAttribute(isa='string')
_prompt = FieldAttribute(isa='string')<|fim▁hole|> _sudo_exe = FieldAttribute(isa='string')
_sudo_flags = FieldAttribute(isa='string')
_sudo_pass = FieldAttribute(isa='string')
_su_exe = FieldAttribute(isa='string')
_su_flags = FieldAttribute(isa='string')
_su_pass = FieldAttribute(isa='string')
# general flags
_verbosity = FieldAttribute(isa='int', default=0)
_only_tags = FieldAttribute(isa='set', default=set())
_skip_tags = FieldAttribute(isa='set', default=set())
_check_mode = FieldAttribute(isa='bool', default=False)
_force_handlers = FieldAttribute(isa='bool', default=False)
_start_at_task = FieldAttribute(isa='string')
_step = FieldAttribute(isa='bool', default=False)
_diff = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, options=None, passwords=None, connection_lockfd=None):
super(PlayContext, self).__init__()
if passwords is None:
passwords = {}
self.password = passwords.get('conn_pass','')
self.become_pass = passwords.get('become_pass','')
# a file descriptor to be used during locking operations
self.connection_lockfd = connection_lockfd
# set options before play to allow play to override them
if options:
self.set_options(options)
if play:
self.set_play(play)
def set_play(self, play):
'''
Configures this connection information instance with data from
the play class.
'''
if play.connection:
self.connection = play.connection
if play.remote_user:
self.remote_user = play.remote_user
if play.port:
self.port = int(play.port)
if play.become is not None:
self.become = play.become
if play.become_method:
self.become_method = play.become_method
if play.become_user:
self.become_user = play.become_user
# non connection related
self.no_log = play.no_log
if play.force_handlers is not None:
self.force_handlers = play.force_handlers
def set_options(self, options):
'''
Configures this connection information instance with data from
options specified by the user on the command line. These have a
lower precedence than those set on the play or host.
'''
if options.connection:
self.connection = options.connection
self.remote_user = options.remote_user
self.private_key_file = options.private_key_file
self.ssh_extra_args = options.ssh_extra_args
# privilege escalation
self.become = options.become
self.become_method = options.become_method
self.become_user = options.become_user
# general flags (should we move out?)
if options.verbosity:
self.verbosity = options.verbosity
#if options.no_log:
# self.no_log = boolean(options.no_log)
if options.check:
self.check_mode = boolean(options.check)
if hasattr(options, 'force_handlers') and options.force_handlers:
self.force_handlers = boolean(options.force_handlers)
if hasattr(options, 'step') and options.step:
self.step = boolean(options.step)
if hasattr(options, 'start_at_task') and options.start_at_task:
self.start_at_task = to_unicode(options.start_at_task)
if hasattr(options, 'diff') and options.diff:
self.diff = boolean(options.diff)
if hasattr(options, 'timeout') and options.timeout:
self.timeout = int(options.timeout)
# get the tag info from options, converting a comma-separated list
# of values into a proper list if need be. We check to see if the
# options have the attribute, as it is not always added via the CLI
if hasattr(options, 'tags'):
if isinstance(options.tags, list):
self.only_tags.update(options.tags)
elif isinstance(options.tags, basestring):
self.only_tags.update(options.tags.split(','))
if len(self.only_tags) == 0:
self.only_tags = set(['all'])
if hasattr(options, 'skip_tags'):
if isinstance(options.skip_tags, list):
self.skip_tags.update(options.skip_tags)
elif isinstance(options.skip_tags, basestring):
self.skip_tags.update(options.skip_tags.split(','))
def set_task_and_variable_override(self, task, variables):
'''
Sets attributes from the task if they are set, which will override
those from the play.
'''
new_info = self.copy()
# loop through a subset of attributes on the task object and set
# connection fields based on their values
for attr in TASK_ATTRIBUTE_OVERRIDES:
if hasattr(task, attr):
attr_val = getattr(task, attr)
if attr_val is not None:
setattr(new_info, attr, attr_val)
# next, use the MAGIC_VARIABLE_MAPPING dictionary to update this
# connection info object with 'magic' variables from the variable list
for (attr, variable_names) in iteritems(MAGIC_VARIABLE_MAPPING):
for variable_name in variable_names:
if variable_name in variables:
setattr(new_info, attr, variables[variable_name])
# make sure we get port defaults if needed
if new_info.port is None and C.DEFAULT_REMOTE_PORT is not None:
new_info.port = int(C.DEFAULT_REMOTE_PORT)
# become legacy updates
if not new_info.become_pass:
if new_info.become_method == 'sudo' and new_info.sudo_pass:
setattr(new_info, 'become_pass', new_info.sudo_pass)
elif new_info.become_method == 'su' and new_info.su_pass:
setattr(new_info, 'become_pass', new_info.su_pass)
# finally, in the special instance that the task was specified
# as a local action, override the connection in case it was changed
# during some other step in the process
if task._local_action:
setattr(new_info, 'connection', 'local')
return new_info
def make_become_cmd(self, cmd, executable=None):
""" helper function to create privilege escalation commands """
prompt = None
success_key = None
self.prompt = None
if executable is None:
executable = C.DEFAULT_EXECUTABLE
if self.become:
becomecmd = None
randbits = ''.join(chr(random.randint(ord('a'), ord('z'))) for x in xrange(32))
success_key = 'BECOME-SUCCESS-%s' % randbits
success_cmd = pipes.quote('echo %s; %s' % (success_key, cmd))
# set executable to use for the privilege escalation method, with various overrides
exe = self.become_exe or \
getattr(self, '%s_exe' % self.become_method, None) or \
C.DEFAULT_BECOME_EXE or \
getattr(C, 'DEFAULT_%s_EXE' % self.become_method.upper(), None) or \
self.become_method
# set flags to use for the privilege escalation method, with various overrides
flags = self.become_flags or \
getattr(self, '%s_flags' % self.become_method, None) or \
C.DEFAULT_BECOME_FLAGS or \
getattr(C, 'DEFAULT_%s_FLAGS' % self.become_method.upper(), None) or \
''
if self.become_method == 'sudo':
# Rather than detect if sudo wants a password this time, -k makes sudo always ask for
# a password if one is required. Passing a quoted compound command to sudo (or sudo -s)
# directly doesn't work, so we shellquote it with pipes.quote() and pass the quoted
# string to the user's shell. We loop reading output until we see the randomly-generated
# sudo prompt set with the -p option.
# force quick error if password is required but not supplied, should prevent sudo hangs.
if self.become_pass:
prompt = '[sudo via ansible, key=%s] password: ' % randbits
becomecmd = '%s %s -p "%s" -S -u %s %s -c %s' % (exe, flags, prompt, self.become_user, executable, success_cmd)
else:
becomecmd = '%s %s -n -S -u %s %s -c %s' % (exe, flags, self.become_user, executable, success_cmd)
elif self.become_method == 'su':
def detect_su_prompt(data):
SU_PROMPT_LOCALIZATIONS_RE = re.compile("|".join(['(\w+\'s )?' + x + ' ?: ?' for x in SU_PROMPT_LOCALIZATIONS]), flags=re.IGNORECASE)
return bool(SU_PROMPT_LOCALIZATIONS_RE.match(data))
prompt = detect_su_prompt
becomecmd = '%s %s %s -c "%s -c %s"' % (exe, flags, self.become_user, executable, success_cmd)
elif self.become_method == 'pbrun':
prompt='assword:'
becomecmd = '%s -b %s -u %s %s' % (exe, flags, self.become_user, success_cmd)
elif self.become_method == 'pfexec':
# No user as it uses it's own exec_attr to figure it out
becomecmd = '%s %s "%s"' % (exe, flags, success_cmd)
elif self.become_method == 'runas':
raise AnsibleError("'runas' is not yet implemented")
#TODO: figure out prompt
# this is not for use with winrm plugin but if they ever get ssh native on windoez
becomecmd = '%s %s /user:%s "%s"' % (exe, flags, self.become_user, success_cmd)
elif self.become_method == 'doas':
prompt = 'Password:'
exe = self.become_exe or 'doas'
if not self.become_pass:
flags += ' -n '
if self.become_user:
flags += ' -u %s ' % self.become_user
becomecmd = '%s %s echo %s && %s %s env ANSIBLE=true %s' % (exe, flags, success_key, exe, flags, cmd)
else:
raise AnsibleError("Privilege escalation method not found: %s" % self.become_method)
if self.become_pass:
self.prompt = prompt
self.success_key = success_key
return ('%s -c %s' % (executable, pipes.quote(becomecmd)))
return cmd
def update_vars(self, variables):
'''
Adds 'magic' variables relating to connections to the variable dictionary provided.
In case users need to access from the play, this is a legacy from runner.
'''
#FIXME: remove password? possibly add become/sudo settings
for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file']:
if special_var not in variables:
for prop, varnames in MAGIC_VARIABLE_MAPPING.items():
if special_var in varnames:
variables[special_var] = getattr(self, prop)<|fim▁end|>
|
# backwards compatibility fields for sudo/su
|
<|file_name|>read_job.py<|end_file_name|><|fim▁begin|># JoeTraffic - Web-Log Analysis Application utilizing the JoeAgent Framework.
# Copyright (C) 2004 Rhett Garber
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from JoeAgent import job, event
import db_interface
import os, os.path
import logging
import log_parser
LINEINCR = 30
log = logging.getLogger("agent.LogReader")
class ReadLogCompleteEvent(event.Event):
"""Event to indicate the file is completely read. This event will
be caught by the FindLogJob that is watching it. The file will
continue to be checked for modifications"""
pass
class ReadLogContinueEvent(event.Event):
"""Event to indicate we should continue reading the file. Log file
processing will be done in chunks so as not to block the agent for
too long."""
pass
class ReadLogJob(job.Job):
def __init__(self, agent_obj, logfile):
job.Job.__init__(self, agent_obj)
assert os.path.isfile(logfile), "Not a file: %s" % str(logfile)
self._log_size = os.stat(logfile).st_size
log.debug("Log size is %d" % self._log_size)
self._logfile_path = logfile
self._logfile_hndl = open(logfile, 'r')
self._progress = 0 # Data read from file
self._db = db_interface.getDB()
def getFilePath(self):
return self._logfile_path
def getBytesRead(self):
return self._progress
def getBytesTotal(self):
return self._log_size
<|fim▁hole|> evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(evt)
def notify(self, evt):
job.Job.notify(self, evt)
if isinstance(evt, ReadLogContinueEvent) and evt.getSource() == self:
log.debug("Continuing read of file")
# Continue to read the log
try:
self._progress += log_parser.read_log(
self._logfile_hndl, self._db, LINEINCR)
log.debug("Read %d %% of file (%d / %d)" % (self.getProgress(),
self._progress,
self._log_size))
except log_parser.EndOfLogException, e:
self._progress = self._log_size
# Log file is complete, updated the db entry
self._mark_complete()
# Add an event to notify that the file is complete
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
except log_parser.InvalidLogException, e:
log.warning("Invalid log file: %s" % str(e))
self._logfile_hndl.close()
new_evt = ReadLogCompleteEvent(self)
self.getAgent().addEvent(new_evt)
else:
# Add an event to continue reading
new_evt = ReadLogContinueEvent(self)
self.getAgent().addEvent(new_evt)
def _update_db(self):
"""Update the entry in the database for this logfile"""
log.debug("Updating file %s" % self._logfile_path)
pass
def _mark_invalid(self):
"""Update the database to indicate that this is not a valid log file"""
log.debug("Marking file %s invalid" % self._logfile_path)
pass
def _mark_complete(self):
log.debug("Marking file %s complete" % self._logfile_path)
pass
def getProgress(self):
"""Return a percentage complete value"""
if self._log_size == 0:
return 0
return int((float(self._progress) / self._log_size) * 100)<|fim▁end|>
|
def run(self):
|
<|file_name|>union_pooler_test.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2015, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest
import numpy
from union_pooling.union_pooler import UnionPooler
REAL_DTYPE = numpy.float32
class UnionPoolerTest(unittest.TestCase):
def setUp(self):
self.unionPooler = UnionPooler(inputDimensions=5,
columnDimensions=5,
potentialRadius=16,
potentialPct=0.9,
globalInhibition=True,
localAreaDensity=-1.0,
numActiveColumnsPerInhArea=2.0,
stimulusThreshold=2,
synPermInactiveDec=0.01,
synPermActiveInc=0.03,
synPermConnected=0.3,
minPctOverlapDutyCycle=0.001,
minPctActiveDutyCycle=0.001,
dutyCyclePeriod=1000,
maxBoost=1.0,
seed=42,
spVerbosity=0,
wrapAround=True,
# union_pooler.py parameters
activeOverlapWeight=1.0,
predictedActiveOverlapWeight=10.0,
maxUnionActivity=0.20,
exciteFunctionType='Fixed',
decayFunctionType='NoDecay')
def testDecayPoolingActivationDefaultDecayRate(self):
self.unionPooler._poolingActivation = numpy.array([0, 1, 2, 3, 4],
dtype=REAL_DTYPE)
expected = numpy.array([0, 1, 2, 3, 4], dtype=REAL_DTYPE)
result = self.unionPooler._decayPoolingActivation()
print result
self.assertTrue(numpy.array_equal(expected, result))
def testAddToPoolingActivation(self):
activeCells = numpy.array([1, 3, 4])
overlaps = numpy.array([0.123, 0.0, 0.0, 0.456, 0.789])
expected = [0.0, 10.0, 0.0, 10.0, 10.0]
result = self.unionPooler._addToPoolingActivation(activeCells, overlaps)
self.assertTrue(numpy.allclose(expected, result))
def testAddToPoolingActivationExistingActivation(self):
self.unionPooler._poolingActivation = numpy.array([0, 1, 2, 3, 4],
dtype=REAL_DTYPE)
activeCells = numpy.array([1, 3, 4])
# [ 0, 1, 0, 1, 1]
overlaps = numpy.array([0.123, 0.0, 0.0, 0.456, 0.789])
expected = [0.0, 11.0, 2.0, 13, 14]
result = self.unionPooler._addToPoolingActivation(activeCells, overlaps)
self.assertTrue(numpy.allclose(expected, result))
def testGetMostActiveCellsUnionSizeZero(self):
self.unionPooler._poolingActivation = numpy.array([0, 1, 2, 3, 4],
dtype=REAL_DTYPE)
self.unionPooler._maxUnionCells = 0
result = self.unionPooler._getMostActiveCells()
self.assertEquals(len(result), 0)
def testGetMostActiveCellsRegular(self):
self.unionPooler._poolingActivation = numpy.array([0, 1, 2, 3, 4],
dtype=REAL_DTYPE)
result = self.unionPooler._getMostActiveCells()
<|fim▁hole|>
def testGetMostActiveCellsIgnoreZeros(self):
self.unionPooler._poolingActivation = numpy.array([0, 0, 0, 3, 4],
dtype=REAL_DTYPE)
self.unionPooler._maxUnionCells = 3
result = self.unionPooler._getMostActiveCells()
self.assertEquals(len(result), 2)
self.assertEquals(result[0], 3)
self.assertEquals(result[1], 4)
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
self.assertEquals(len(result), 1)
self.assertEquals(result[0], 4)
|
<|file_name|>api.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc.
// Copyright 2015 The Go Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package gcloud
import (
"fmt"
"io/ioutil"
"net/http"
"strings"
"time"
"github.com/coreos/pkg/capnslog"
"google.golang.org/api/compute/v1"
"github.com/coreos/mantle/auth"
"github.com/coreos/mantle/platform"
)
var (
plog = capnslog.NewPackageLogger("github.com/coreos/mantle", "platform/api/gcloud")
)
type Options struct {
Image string
Project string
Zone string
MachineType string
DiskType string
Network string
JSONKeyFile string
ServiceAuth bool
*platform.Options
}
type API struct {
client *http.Client
compute *compute.Service
options *Options
}<|fim▁hole|>
// If the image name isn't a full api endpoint accept a name beginning
// with "projects/" to specify a different project from the instance.
// Also accept a short name and use instance project.
if strings.HasPrefix(opts.Image, "projects/") {
opts.Image = endpointPrefix + opts.Image
} else if !strings.Contains(opts.Image, "/") {
opts.Image = fmt.Sprintf("%sprojects/%s/global/images/%s", endpointPrefix, opts.Project, opts.Image)
} else if !strings.HasPrefix(opts.Image, endpointPrefix) {
return nil, fmt.Errorf("GCE Image argument must be the full api endpoint, begin with 'projects/', or use the short name")
}
var (
client *http.Client
err error
)
if opts.ServiceAuth {
client = auth.GoogleServiceClient()
} else if opts.JSONKeyFile != "" {
b, err := ioutil.ReadFile(opts.JSONKeyFile)
if err != nil {
plog.Fatal(err)
}
client, err = auth.GoogleClientFromJSONKey(b)
} else {
client, err = auth.GoogleClient()
}
if err != nil {
return nil, err
}
capi, err := compute.New(client)
if err != nil {
return nil, err
}
api := &API{
client: client,
compute: capi,
options: opts,
}
return api, nil
}
func (a *API) Client() *http.Client {
return a.client
}
func (a *API) GC(gracePeriod time.Duration) error {
return a.gcInstances(gracePeriod)
}<|fim▁end|>
|
func New(opts *Options) (*API, error) {
const endpointPrefix = "https://www.googleapis.com/compute/v1/"
|
<|file_name|>iosxr_facts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The module file for iosxr_facts
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': [u'preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: iosxr_facts
version_added: 2.2
short_description: Get facts about iosxr devices.
extends_documentation_fragment: iosxr
description:
- Collects facts from network devices running the iosxr operating
system. This module places the facts gathered in the fact tree keyed by the
respective resource name. The facts module will always collect a
base set of facts from the device and can enable or disable
collection of additional facts.
notes:
- Tested against IOS-XR 6.1.3.
- This module works with connection C(network_cli). See L(the IOS-XR Platform Options,../network/user_guide/platform_iosxr.html).
author:
- Ricardo Carrillo Cruz (@rcarrillocruz)
- Nilashish Chakraborty (@Nilashishc)
options:
gather_subset:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all, hardware, config, and interfaces. Can specify a list of
values to include a larger subset. Values can also be used
with an initial C(M(!)) to specify that a specific subset should
not be collected.
required: false
default: '!config'
gather_network_resources:
description:
- When supplied, this argument will restrict the facts collected
to a given subset. Possible values for this argument include
all and the resources like interfaces, lacp etc.
Can specify a list of values to include a larger subset. Values
can also be used with an initial C(M(!)) to specify that a
specific subset should not be collected.
Valid subsets are 'all', 'lacp', 'lacp_interfaces', 'lldp_global',
'lldp_interfaces', 'interfaces', 'l2_interfaces', 'l3_interfaces',
'lag_interfaces', 'acls', 'acl_interfaces', 'static_routes.
required: false<|fim▁hole|>"""
EXAMPLES = """
# Gather all facts
- iosxr_facts:
gather_subset: all
gather_network_resources: all
# Collect only the config and default facts
- iosxr_facts:
gather_subset:
- config
# Do not collect hardware facts
- iosxr_facts:
gather_subset:
- "!hardware"
# Collect only the lacp facts
- iosxr_facts:
gather_subset:
- "!all"
- "!min"
gather_network_resources:
- lacp
# Do not collect lacp_interfaces facts
- iosxr_facts:
gather_network_resources:
- "!lacp_interfaces"
# Collect lacp and minimal default facts
- iosxr_facts:
gather_subset: min
gather_network_resources: lacp
# Collect only the interfaces facts
- iosxr_facts:
gather_subset:
- "!all"
- "!min"
gather_network_resources:
- interfaces
- l2_interfaces
"""
RETURN = """
ansible_net_gather_subset:
description: The list of fact subsets collected from the device
returned: always
type: list
# default
ansible_net_version:
description: The operating system version running on the remote device
returned: always
type: str
ansible_net_hostname:
description: The configured hostname of the device
returned: always
type: str
ansible_net_image:
description: The image file the device is running
returned: always
type: str
ansible_net_api:
description: The name of the transport
returned: always
type: str
ansible_net_python_version:
description: The Python version Ansible controller is using
returned: always
type: str
ansible_net_model:
description: The model name returned from the device
returned: always
type: str
# hardware
ansible_net_filesystems:
description: All file system names available on the device
returned: when hardware is configured
type: list
ansible_net_memfree_mb:
description: The available free memory on the remote device in Mb
returned: when hardware is configured
type: int
ansible_net_memtotal_mb:
description: The total memory on the remote device in Mb
returned: when hardware is configured
type: int
# config
ansible_net_config:
description: The current active config from the device
returned: when config is configured
type: str
# interfaces
ansible_net_all_ipv4_addresses:
description: All IPv4 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_all_ipv6_addresses:
description: All IPv6 addresses configured on the device
returned: when interfaces is configured
type: list
ansible_net_interfaces:
description: A hash of all interfaces running on the system
returned: when interfaces is configured
type: dict
ansible_net_neighbors:
description: The list of LLDP neighbors from the remote device
returned: when interfaces is configured
type: dict
# network resources
ansible_net_gather_network_resources:
description: The list of fact resource subsets collected from the device
returned: always
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.iosxr.iosxr import iosxr_argument_spec
from ansible.module_utils.network.iosxr.argspec.facts.facts import FactsArgs
from ansible.module_utils.network.iosxr.facts.facts import Facts
def main():
"""
Main entry point for module execution
:returns: ansible_facts
"""
argument_spec = FactsArgs.argument_spec
argument_spec.update(iosxr_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = []
if module.params["gather_subset"] == "!config":
warnings.append('default value for `gather_subset` will be changed to `min` from `!config` v2.11 onwards')
result = Facts(module).get_facts()
ansible_facts, additional_warnings = result
warnings.extend(additional_warnings)
module.exit_json(ansible_facts=ansible_facts, warnings=warnings)
if __name__ == '__main__':
main()<|fim▁end|>
|
version_added: "2.9"
|
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8; -*-
"""
Copyright (C) 2007-2013 Guake authors
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation; either version 2 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public
License along with this program; if not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301 USA
"""
import inspect
import time
# You can put calls to p() everywhere in this page to inspect timing
# g_start = time.time()
# def p():
# print(time.time() - g_start, __file__, inspect.currentframe().f_back.f_lineno)
import logging
import os
import signal
import subprocess
import sys
import uuid
from locale import gettext as _
from optparse import OptionParser
log = logging.getLogger(__name__)
from guake.globals import NAME
from guake.globals import bindtextdomain
from guake.support import print_support
from guake.utils import restore_preferences
from guake.utils import save_preferences
# When we are in the document generation on readthedocs, we do not have paths.py generated
try:
from guake.paths import LOCALE_DIR
bindtextdomain(NAME, LOCALE_DIR)
except: # pylint: disable=bare-except
pass
def main():
"""Parses the command line parameters and decide if dbus methods
should be called or not. If there is already a guake instance
running it will be used and a True value will be returned,
otherwise, false will be returned.
"""
# Force to xterm-256 colors for compatibility with some old command line programs
os.environ["TERM"] = "xterm-256color"
# Force use X11 backend underwayland
os.environ["GDK_BACKEND"] = "x11"
# do not use version keywords here, pbr might be slow to find the version of Guake module
parser = OptionParser()
parser.add_option(
'-V',
'--version',
dest='version',
action='store_true',
default=False,
help=_('Show Guake version number and exit')
)
parser.add_option(
'-v',
'--verbose',
dest='verbose',
action='store_true',
default=False,
help=_('Enable verbose logging')
)
parser.add_option(
'-f',
'--fullscreen',
dest='fullscreen',
action='store_true',
default=False,
help=_('Put Guake in fullscreen mode')
)
parser.add_option(
'-t',
'--toggle-visibility',
dest='show_hide',
action='store_true',
default=False,
help=_('Toggles the visibility of the terminal window')
)
parser.add_option(
'--show',
dest="show",
action='store_true',
default=False,
help=_('Shows Guake main window')
)
parser.add_option(
'--hide',
dest='hide',
action='store_true',
default=False,
help=_('Hides Guake main window')
)
parser.add_option(
'-p',
'--preferences',
dest='show_preferences',
action='store_true',
default=False,
help=_('Shows Guake preference window')
)
parser.add_option(
'-a',
'--about',
dest='show_about',
action='store_true',
default=False,
help=_('Shows Guake\'s about info')
)
parser.add_option(
'-n',
'--new-tab',
dest='new_tab',
action='store',
default='',
help=_('Add a new tab (with current directory set to NEW_TAB)')
)
parser.add_option(
'-s',
'--select-tab',
dest='select_tab',
action='store',
default='',
help=_('Select a tab (SELECT_TAB is the index of the tab)')
)
parser.add_option(
'-g',
'--selected-tab',
dest='selected_tab',
action='store_true',
default=False,
help=_('Return the selected tab index.')
)
parser.add_option(
'-l',
'--selected-tablabel',
dest='selected_tablabel',
action='store_true',
default=False,
help=_('Return the selected tab label.')
)
parser.add_option(
'--split-vertical',
dest='split_vertical',
action='store_true',
default=False,
help=_('Split the selected tab vertically.')
)
parser.add_option(
'--split-horizontal',
dest='split_horizontal',
action='store_true',
default=False,
help=_('Split the selected tab horizontally.')
)
parser.add_option(
'-e',
'--execute-command',
dest='command',
action='store',
default='',
help=_('Execute an arbitrary command in the selected tab.')
)
parser.add_option(
'-i',
'--tab-index',
dest='tab_index',
action='store',
default='0',
help=_('Specify the tab to rename. Default is 0. Can be used to select tab by UUID.')
)
parser.add_option(
'--bgcolor',
dest='bgcolor',
action='store',
default='',
help=_('Set the hexadecimal (#rrggbb) background color of '
'the selected tab.')
)
parser.add_option(
'--fgcolor',
dest='fgcolor',
action='store',
default='',
help=_('Set the hexadecimal (#rrggbb) foreground color of the '
'selected tab.')
)
parser.add_option(
'--change-palette',
dest='palette_name',
action='store',
default='',
help=_('Change Guake palette scheme')
)
parser.add_option(
'--rename-tab',
dest='rename_tab',
metavar='TITLE',
action='store',
default='',
help=_(
'Rename the specified tab by --tab-index. Reset to default if TITLE is '
'a single dash "-".'
)
)
parser.add_option(
'-r',
'--rename-current-tab',
dest='rename_current_tab',
metavar='TITLE',
action='store',
default='',
help=_('Rename the current tab. Reset to default if TITLE is a '
'single dash "-".')
)
parser.add_option(
'-q',
'--quit',
dest='quit',
action='store_true',
default=False,
help=_('Says to Guake go away =(')
)
parser.add_option(
'-u',
'--no-startup-script',
dest='execute_startup_script',
action='store_false',
default=True,
help=_('Do not execute the start up script')
)
parser.add_option(
'--save-preferences',
dest='save_preferences',
action='store',
default=None,
help=_('Save Guake preferences to this filename')
)
parser.add_option(
'--restore-preferences',
dest='restore_preferences',
action='store',
default=None,
help=_('Restore Guake preferences from this file')
)
parser.add_option(
'--support',
dest='support',
action='store_true',
default=False,
help=_('Show support infomations')
)
# checking mandatory dependencies
missing_deps = False
try:
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('Gdk', '3.0')
except ValueError:
print("[ERROR] missing mandatory dependency: GtK 3.0")
missing_deps = True
try:
gi.require_version('Vte', '2.91') # vte-0.42
except ValueError:
print("[ERROR] missing mandatory dependency: Vte >= 0.42")
missing_deps = True
try:
gi.require_version('Keybinder', '3.0')<|fim▁hole|>
try:
import cairo
except ImportError:
print("[ERROR] missing mandatory dependency: cairo")
missing_deps = True
if missing_deps:
print(
"[ERROR] missing at least one system dependencies. "
"You need to install additional packages for Guake to run"
)
print(
"[ERROR] On Debian/Ubuntu you need to install the following libraries:\n"
" sudo apt-get install -y --no-install-recommends \\\n"
" gir1.2-keybinder-3.0 \\\n"
" gir1.2-notify-0.7 \\\n"
" gir1.2-vte-2.91 \\\n"
" gir1.2-wnck-3.0 \\\n"
" libkeybinder-3.0-0 \\\n"
" libutempter0 \\\n"
" python3 \\\n"
" python3-cairo \\\n"
" python3-dbus \\\n"
" python3-gi \\\n"
" python3-pbr \\\n"
" python3-pip"
)
sys.exit(1)
options = parser.parse_args()[0]
if options.version:
from guake import gtk_version
from guake import guake_version
from guake import vte_version
from guake import vte_runtime_version
print('Guake Terminal: {}'.format(guake_version()))
print('VTE: {}'.format(vte_version()))
print('VTE runtime: {}'.format(vte_runtime_version()))
print('Gtk: {}'.format(gtk_version()))
sys.exit(0)
if options.save_preferences and options.restore_preferences:
parser.error('options --save-preferences and --restore-preferences are mutually exclusive')
if options.save_preferences:
save_preferences(options.save_preferences)
sys.exit(0)
elif options.restore_preferences:
restore_preferences(options.restore_preferences)
sys.exit(0)
if options.support:
print_support()
sys.exit(0)
import dbus
from guake.dbusiface import DBUS_NAME
from guake.dbusiface import DBUS_PATH
from guake.dbusiface import DbusManager
from guake.guake_logging import setupLogging
instance = None
# Trying to get an already running instance of guake. If it is not
# possible, lets create a new instance. This function will return
# a boolean value depending on this decision.
try:
bus = dbus.SessionBus()
remote_object = bus.get_object(DBUS_NAME, DBUS_PATH)
already_running = True
except dbus.DBusException:
# can now configure the logging
setupLogging(options.verbose)
# COLORTERM is an environment variable set by some terminal emulators such as
# gnome-terminal.
# To avoid confusing applications running inside Guake, clean up COLORTERM at startup.
if "COLORTERM" in os.environ:
del os.environ['COLORTERM']
log.info("Guake not running, starting it")
# late loading of the Guake object, to speed up dbus comm
from guake.guake_app import Guake
instance = Guake()
remote_object = DbusManager(instance)
already_running = False
only_show_hide = True
if options.fullscreen:
remote_object.fullscreen()
if options.show:
remote_object.show_from_remote()
if options.hide:
remote_object.hide_from_remote()
if options.show_preferences:
remote_object.show_prefs()
only_show_hide = options.show
if options.new_tab:
remote_object.add_tab(options.new_tab)
only_show_hide = options.show
if options.select_tab:
selected = int(options.select_tab)
tab_count = int(remote_object.get_tab_count())
if 0 <= selected < tab_count:
remote_object.select_tab(selected)
else:
sys.stderr.write('invalid index: %d\n' % selected)
only_show_hide = options.show
if options.selected_tab:
selected = remote_object.get_selected_tab()
sys.stdout.write('%d\n' % selected)
only_show_hide = options.show
if options.selected_tablabel:
selectedlabel = remote_object.get_selected_tablabel()
sys.stdout.write('%s\n' % selectedlabel)
only_show_hide = options.show
if options.split_vertical:
remote_object.v_split_current_terminal()
only_show_hide = options.show
if options.split_horizontal:
remote_object.h_split_current_terminal()
only_show_hide = options.show
if options.command:
remote_object.execute_command(options.command)
only_show_hide = options.show
if options.tab_index and options.rename_tab:
try:
remote_object.rename_tab_uuid(str(uuid.UUID(options.tab_index)), options.rename_tab)
except ValueError:
remote_object.rename_tab(int(options.tab_index), options.rename_tab)
only_show_hide = options.show
if options.bgcolor:
remote_object.set_bgcolor(options.bgcolor)
only_show_hide = options.show
if options.fgcolor:
remote_object.set_fgcolor(options.fgcolor)
only_show_hide = options.show
if options.palette_name:
remote_object.change_palette_name(options.palette_name)
only_show_hide = options.show
if options.rename_current_tab:
remote_object.rename_current_tab(options.rename_current_tab)
only_show_hide = options.show
if options.show_about:
remote_object.show_about()
only_show_hide = options.show
if options.quit:
try:
remote_object.quit()
return True
except dbus.DBusException:
return True
if already_running and only_show_hide:
# here we know that guake was called without any parameter and
# it is already running, so, lets toggle its visibility.
remote_object.show_hide()
if options.execute_startup_script:
if not already_running:
startup_script = instance.settings.general.get_string("startup-script")
if startup_script:
log.info("Calling startup script: %s", startup_script)
pid = subprocess.Popen([startup_script],
shell=True,
stdin=None,
stdout=None,
stderr=None,
close_fds=True)
log.info("Startup script started with pid: %s", pid)
# Please ensure this is the last line !!!!
else:
log.info("--no-startup-script argument defined, so don't execute the startup script")
if already_running:
log.info("Guake is already running")
return already_running
def exec_main():
if not main():
log.debug("Running main gtk loop")
signal.signal(signal.SIGINT, signal.SIG_DFL)
# Load gi pretty late, to speed up as much as possible the parsing of the option for DBus
# comm through command line
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
Gtk.main()
if __name__ == '__main__':
exec_main()<|fim▁end|>
|
except ValueError:
print("[ERROR] missing mandatory dependency: Keybinder 3")
missing_deps = True
|
<|file_name|>tests_views.py<|end_file_name|><|fim▁begin|>from gevent import monkey
monkey.patch_all()
import pytest
import gevent
import marshmallow
from channelstream.server_state import get_state
from channelstream.channel import Channel
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestConnectViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {}
try:<|fim▁hole|> except marshmallow.exceptions.ValidationError as exc:
assert exc.messages == {"username": ["Missing data for required field."]}
def test_good_json(self, dummy_request, test_uuids):
server_state = get_state()
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {
"username": "username",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
assert server_state.channels == {}
result = connect(dummy_request)
assert len(server_state.channels.keys()) == 2
assert "username" in server_state.users
assert test_uuids[1] in server_state.connections
assert result["channels"] == ["a", "aB"]
assert result["state"] == {"bar": "baz", "key": "foo"}
assert result["conn_id"] == test_uuids[1]
channels_info = result["channels_info"]["channels"]
assert len(channels_info.keys()) == 2
assert channels_info["a"]["total_users"] == 1
assert channels_info["a"]["total_connections"] == 1
assert channels_info["a"]["users"] == ["username"]
assert channels_info["a"]["history"] == []
assert result["channels_info"]["users"] == [
{"state": {"bar": "baz", "key": "foo"}, "user": "username"}
]
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestUserStateViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
dummy_request.json_body = {}
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
user_state(dummy_request)
assert excinfo.value.messages == {"user": ["Missing data for required field."]}
def _connect_user(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
def test_not_found_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
dummy_request.json_body = {"user": "blabla"}
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
user_state(dummy_request)
assert excinfo.value.messages == {"user": ["Unknown user"]}
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
self._connect_user(dummy_request, test_uuids)
dummy_request.json_body = {
"user": "test",
"user_state": {"bar": 2, "private": "im_private"},
"state_public_keys": ["avatar", "bar"],
}
result = user_state(dummy_request)
sorted_keys = sorted(["bar", "key", "private"])
assert sorted_keys == sorted(result["user_state"].keys())
assert result["user_state"]["private"] == "im_private"
sorted_changed = sorted([x["key"] for x in result["changed_state"]])
assert result["public_keys"] == ["avatar", "bar"]
assert sorted_changed == sorted(["bar", "private"])
def test_good_json_no_public_keys(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import user_state
self._connect_user(dummy_request, test_uuids)
dummy_request.json_body = {
"user": "test",
"user_state": {"bar": 2, "private": "im_private"},
}
result = user_state(dummy_request)
sorted_keys = sorted(["bar", "key", "private"])
assert sorted_keys == sorted(result["user_state"].keys())
assert result["user_state"]["private"] == "im_private"
assert result["public_keys"] == ["bar"]
sorted_changed = sorted([x["key"] for x in result["changed_state"]])
assert sorted_changed == sorted(["bar", "private"])
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestSubscribeViews(object):
def test_bad_json(self, dummy_request):
from channelstream.wsgi_views.server import subscribe
dummy_request.json_body = {}
try:
subscribe(dummy_request)
except marshmallow.exceptions.ValidationError as exc:
assert list(sorted(exc.messages.keys())) == ["channels", "conn_id"]
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, subscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {
"conn_id": str(test_uuids[1]),
"channels": ["b"],
"channel_configs": {
"a": {"notify_presence": True},
"b": {"notify_presence": True},
},
}
result = subscribe(dummy_request)
assert sorted(result["channels"]) == sorted(["a", "aB", "b"])
assert result["channels_info"]["users"] == [
{"state": {"bar": "baz", "key": "foo"}, "user": "test"}
]
assert "a" in result["channels_info"]["channels"]
assert "b" in result["channels_info"]["channels"]
assert result["channels_info"]["channels"]["a"]["total_connections"] == 1
assert result["channels_info"]["channels"]["a"]["total_users"] == 1
assert result["channels_info"]["channels"]["a"]["history"] == []
assert result["channels_info"]["channels"]["a"]["users"] == ["test"]
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestUnsubscribeViews(object):
def test_bad_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import unsubscribe
dummy_request.json_body = {}
try:
unsubscribe(dummy_request)
except marshmallow.exceptions.ValidationError as exc:
assert list(sorted(exc.messages.keys())) == ["channels", "conn_id"]
def test_good_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, unsubscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB", "aC"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {
"conn_id": str(test_uuids[1]),
"channels": ["aC", "a"],
}
result = unsubscribe(dummy_request)
assert sorted(result["channels"]) == sorted(["aB"])
def test_non_existing_channel(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, unsubscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB", "aC"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {"conn_id": str(test_uuids[1]), "channels": ["d"]}
result = unsubscribe(dummy_request)
assert sorted(result["channels"]) == sorted(["a", "aB", "aC"])
def test_no_channels(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, unsubscribe
dummy_request.json_body = {
"username": "test",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {"conn_id": str(test_uuids[1]), "channels": ["a"]}
result = unsubscribe(dummy_request)
assert len(result["channels"]) == 0
assert result["channels_info"]["users"] == []
assert result["channels_info"]["channels"] == {}
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestInfoView(object):
def test_empty_json(self, dummy_request):
from channelstream.wsgi_views.server import info
dummy_request.json_body = {}
result = info(dummy_request)
assert result["channels"] == {}
assert result["users"] == []
def test_subscribed_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, info
dummy_request.json_body = {
"username": "test1",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz"},
"state_public_keys": ["bar"],
"channels": ["a", "aB"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {
"username": "test2",
"conn_id": test_uuids[2],
"fresh_user_state": {"key": "foo1"},
"user_state": {"bar": "baz1"},
"state_public_keys": ["key"],
"channels": ["a", "c"],
"channel_configs": {"c": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = {}
result = info(dummy_request)
assert sorted(("a", "aB", "c")) == sorted(result["channels"].keys())
assert result["users"]
comp_a = sorted(result["channels"]["a"]["users"])
comp_b = sorted(["test1", "test2"])
assert comp_a == comp_b
assert result["channels"]["a"]["total_users"] == 2
assert result["channels"]["a"]["total_connections"] == 2
assert result["channels"]["c"]["users"] == ["test2"]
assert result["channels"]["c"]["total_users"] == 1
assert result["channels"]["c"]["total_connections"] == 1
assert result["channels"]["aB"]["users"] == ["test1"]
comp_a = sorted(result["users"], key=lambda x: x["user"])
comp_b = sorted(
[
{"state": {"bar": "baz", "key": "foo"}, "user": "test1"},
{"state": {"bar": "baz1", "key": "foo1"}, "user": "test2"},
],
key=lambda x: x["user"],
)
assert comp_a == comp_b
dummy_request.body = "NOTEMPTY"
dummy_request.json_body = {"info": {"channels": ["a"]}}
result = info(dummy_request)
assert "a" in result["channels"]
assert "aB" not in result["channels"]
def test_detailed_json(self, dummy_request, test_uuids):
from channelstream.wsgi_views.server import connect, info, message
dummy_request.json_body = {
"username": "test1",
"conn_id": str(test_uuids[1]),
"fresh_user_state": {"key": "foo"},
"user_state": {"bar": "baz", "private": "p1"},
"state_public_keys": ["bar"],
"channels": ["a", "aB", "c", "D"],
"channel_configs": {"a": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
dummy_request.json_body = [
{
"type": "message",
"user": "test1",
"channel": "a",
"message": {"text": "test"},
}
]
message(dummy_request)
gevent.sleep(0)
dummy_request.body = "value"
dummy_request.json_body = {
"info": {
"exclude_channels": ["c"],
"include_history": False,
"include_users": True,
"return_public_state": True,
"include_connections": True,
}
}
result = info(dummy_request)
assert sorted(result["channels"].keys()) == sorted(["a", "aB", "D"])
assert "private" not in result["users"][0]["state"]
assert len(result["channels"]["a"]["history"]) == 0
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestMessageViews(object):
def test_empty_json(self, dummy_request):
from channelstream.wsgi_views.server import message
server_state = get_state()
dummy_request.json_body = {}
assert server_state.stats["total_unique_messages"] == 0
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
message(dummy_request)
assert excinfo.value.messages == {"_schema": ["Invalid input type."]}
def test_good_json_no_channel(self, dummy_request):
from channelstream.wsgi_views.server import message
server_state = get_state()
channel = Channel("test")
channel.store_history = True
server_state.channels[channel.name] = channel
msg_payload = {
"type": "message",
"user": "system",
"channel": "test",
"message": {"text": "test"},
}
dummy_request.json_body = [msg_payload]
assert server_state.stats["total_unique_messages"] == 0
assert len(channel.history) == 0
message(dummy_request)
# change context
gevent.sleep(0)
assert server_state.stats["total_unique_messages"] == 1
assert len(channel.history) == 1
msg = channel.history[0]
assert msg["uuid"] is not None
assert msg["user"] == msg_payload["user"]
assert msg["message"] == msg_payload["message"]
assert msg["type"] == msg_payload["type"]
assert msg["channel"] == msg_payload["channel"]
assert msg["timestamp"] is not None
def test_catchup_messages(self, dummy_request):
from channelstream.wsgi_views.server import message, connect
server_state = get_state()
dummy_request.json_body = {
"username": "test1",
"channels": ["test"],
"channel_configs": {"test": {"store_history": True, "history_size": 2}},
}
connect(dummy_request)
msg_payload = {
"type": "message",
"user": "system",
"channel": "test",
"message": {"text": "test3"},
}
dummy_request.json_body = [msg_payload]
message(dummy_request)
# add pm message to non-existing user
wrong_user_msg_payload = {
"type": "message",
"user": "system",
"channel": "test",
"message": {"text": "test1"},
"pm_users": ["test2"],
}
msg_payload = {
"type": "message",
"user": "system",
"channel": "test",
"message": {"text": "test2"},
"pm_users": ["test1"],
}
dummy_request.json_body = [wrong_user_msg_payload, msg_payload]
message(dummy_request)
# change context
gevent.sleep(0)
connection = server_state.users["test1"].connections[0]
messages = connection.get_catchup_messages()
assert len(messages) == 2
assert messages[0]["timestamp"] > connection.last_active
assert messages[0]["message"]["text"] == "test3"
assert messages[1]["timestamp"] > connection.last_active
assert messages[1]["message"]["text"] == "test2"
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestMessageEditViews(object):
def test_empty_json(self, dummy_request):
from channelstream.wsgi_views.server import message
dummy_request.json_body = {}
with pytest.raises(marshmallow.exceptions.ValidationError) as excinfo:
message(dummy_request)
assert excinfo.value.messages == {"_schema": ["Invalid input type."]}
def test_good_json_no_channel(self, dummy_request):
from channelstream.wsgi_views.server import message, messages_patch
server_state = get_state()
channel = Channel("test")
channel.store_history = True
server_state.channels[channel.name] = channel
msg_payload = {"user": "system", "channel": "test", "message": {"text": "test"}}
dummy_request.json_body = [msg_payload]
message(dummy_request)
# change context
gevent.sleep(0)
msg = channel.history[0]
assert msg["message"] == msg_payload["message"]
edit_payload = {
"uuid": msg["uuid"],
"user": "edited_system",
"channel": "test",
"timestamp": "2010-01-01T01:01",
"edited": "2010-01-01T01:02",
"message": {"text": "edited_message"},
}
dummy_request.json_body = [edit_payload]
response = messages_patch(dummy_request)[0]
gevent.sleep(0)
assert msg["user"] == response["user"]
assert msg["message"] == response["message"]
assert msg["edited"] == response["edited"]
assert msg["timestamp"] == response["timestamp"]
frame = channel.frames[0][1]
assert id(frame) == id(msg)
assert frame["user"] == response["user"]
assert frame["message"] == response["message"]
assert frame["edited"] == response["edited"]
assert frame["timestamp"] == response["timestamp"]
class TestMessageDeleteViews(object):
def test_empty_json(self, dummy_request):
from channelstream.wsgi_views.server import messages_delete
dummy_request.json_body = []
result = messages_delete(dummy_request)
assert result == []
def test_good_json_no_channel(self, dummy_request):
from channelstream.wsgi_views.server import message, messages_delete
server_state = get_state()
channel = Channel("test")
channel.store_history = True
server_state.channels[channel.name] = channel
msg_payload = {"user": "system", "channel": "test", "message": {"text": "test"}}
dummy_request.json_body = [msg_payload]
message(dummy_request)
# change context
gevent.sleep(0)
msg = channel.history[0]
assert msg["message"] == msg_payload["message"]
dummy_request.json_body = [{"uuid": str(msg["uuid"]), "channel": "test"}]
response = messages_delete(dummy_request)
gevent.sleep(0)
assert response[0]["uuid"] == msg["uuid"]
assert len(channel.history) == 0
assert len(channel.frames) == 1
assert channel.frames[0][1]["type"] == "message:delete"
@pytest.mark.usefixtures("cleanup_globals", "pyramid_config")
class TestChannelConfigView(object):
def test_empty_json(self, dummy_request):
from channelstream.wsgi_views.server import channel_config
dummy_request.json_body = {}
result = channel_config(dummy_request)
assert result["channels"] == {}
assert result["users"] == []
def test_valid_json(self, dummy_request):
from channelstream.wsgi_views.server import channel_config
dummy_request.json_body = {
"chanx1": {
"notify_presence": True,
"store_history": True,
"history_size": 3,
"broadcast_presence_with_user_lists": True,
"notify_state": True,
"store_frames": False,
}
}
result = channel_config(dummy_request)
channel_settings = result["channels"]["chanx1"]["settings"]
assert channel_settings["notify_presence"] is True
assert channel_settings["store_history"] is True
assert channel_settings["history_size"] == 3
assert channel_settings["broadcast_presence_with_user_lists"] is True
assert channel_settings["notify_state"] is True
assert channel_settings["store_frames"] is False<|fim▁end|>
|
connect(dummy_request)
|
<|file_name|>TestNativeLog1p.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Don't edit this file! It is auto-generated by frameworks/rs/api/generate.sh.
#pragma version(1)
#pragma rs java_package_name(android.renderscript.cts)
float __attribute__((kernel)) testNativeLog1pFloatFloat(float inV) {
return native_log1p(inV);
}
float2 __attribute__((kernel)) testNativeLog1pFloat2Float2(float2 inV) {
return native_log1p(inV);
}
float3 __attribute__((kernel)) testNativeLog1pFloat3Float3(float3 inV) {
return native_log1p(inV);<|fim▁hole|>}
float4 __attribute__((kernel)) testNativeLog1pFloat4Float4(float4 inV) {
return native_log1p(inV);
}
half __attribute__((kernel)) testNativeLog1pHalfHalf(half inV) {
return native_log1p(inV);
}
half2 __attribute__((kernel)) testNativeLog1pHalf2Half2(half2 inV) {
return native_log1p(inV);
}
half3 __attribute__((kernel)) testNativeLog1pHalf3Half3(half3 inV) {
return native_log1p(inV);
}
half4 __attribute__((kernel)) testNativeLog1pHalf4Half4(half4 inV) {
return native_log1p(inV);
}<|fim▁end|>
| |
<|file_name|>AFTResponse.java<|end_file_name|><|fim▁begin|>package com.ricex.aft.android.request;
import org.springframework.http.HttpStatus;
public class AFTResponse<T> {
/** The successful response from the server */
private final T response;
/** The error response from the server */
private final String error;
<|fim▁hole|> /** The HttpStatus code of the response */
private final HttpStatus statusCode;
/** Whether or not this response is valid */
private final boolean valid;
/** Creates a new instance of AFTResponse, representing a successful response
*
* @param response The parsed response received from the server
* @param statusCode The status code of the response
*/
public AFTResponse(T response, HttpStatus statusCode) {
this.response = response;
this.statusCode = statusCode;
this.error = null;
valid = true;
}
/** Creates a new instance of AFTResponse, representing an invalid (error) response
*
* @param response The response (if any) received from the server
* @param error The error message received from the server
* @param statusCode The status code of the response
*/
public AFTResponse(T response, String error, HttpStatus statusCode) {
this.response = response;
this.error = error;
this.statusCode = statusCode;
valid = false;
}
/** Whether or not this response is valid
*
* @return True if the server returned Http OK (200), otherwise false
*/
public boolean isValid() {
return valid;
}
/** The response from the server if valid
*
* @return The response from the server
*/
public T getResponse() {
return response;
}
/** Returns the error received by the server, if invalid response
*
* @return The error the server returned
*/
public String getError() {
return error;
}
/** Return the status code received from the server
*
* @return the status code received from the server
*/
public HttpStatus getStatusCode() {
return statusCode;
}
}<|fim▁end|>
| |
<|file_name|>_0382LinkedListRandomNode.java<|end_file_name|><|fim▁begin|>import java.util.Random;
public class _0382LinkedListRandomNode {
ListNode head;
Random r;
public _0382LinkedListRandomNode(ListNode head) {
this.head = head;
r = new Random();
}
public int getRandom() {
ListNode thisN = head;
ListNode result = null;
for (int n = 1; thisN != null; n++) {
if (r.nextInt(n) == 0) result = thisN;
thisN = thisN.next;
}
return result.val;
}
<|fim▁hole|> int val;
ListNode next;
ListNode(int x) { val = x; }
}
}<|fim▁end|>
|
public class ListNode {
|
<|file_name|>listing4.01.js<|end_file_name|><|fim▁begin|>/* Get Programming with JavaScript
* Listing 4.01
* Displaying an object's properties on the console
*/
var movie1;
movie1 = {
title: "Inside Out",
actors: "Amy Poehler, Bill Hader",
directors: "Pete Doctor, Ronaldo Del Carmen"
};
console.log("Movie information for " + movie1.title);
console.log("------------------------------");
console.log("Actors: " + movie1.actors);
console.log("Directors: " + movie1.directors);
console.log("------------------------------");
/* Further Adventures
*
* 1) Add a second movie and display the same info for it.
*<|fim▁hole|> *
*/<|fim▁end|>
|
* 2) Create an object to represent a blog post.
*
* 3) Write code to display info about the blog post.
|
<|file_name|>defaults.js<|end_file_name|><|fim▁begin|>/*
* measured-elasticsearch
*
* Copyright (c) 2015 Maximilian Antoni <[email protected]>
*
* @license MIT
*/
'use strict';
exports.index = 'metrics-1970.01';
exports.timestamp = '1970-01-01T00:00:00.000Z';
function header(type) {
return {
index : { _type : type}
};
}
exports.headerCounter = header('counter');<|fim▁hole|>exports.headerMeter = header('meter');
exports.headerHistogram = header('histogram');
exports.headerGauge = header('gauge');<|fim▁end|>
|
exports.headerTimer = header('timer');
|
<|file_name|>flags.go<|end_file_name|><|fim▁begin|>package agent
import (
"flag"
)
func (this *Agent) BindFlags() {
flag.BoolVar(&this.selfRegister, "self_register", true, "Registers self with the registry.")
flag.IntVar(&this.ListenPort, "port", 25657, "Listening port for agent")<|fim▁hole|> flag.StringVar(&this.UiDocRoot, "ui_docroot", "", "UI DocRoot")
}<|fim▁end|>
|
flag.StringVar(&this.StatusPubsubTopic, "status_topic", "", "Status pubsub topic")
flag.BoolVar(&this.EnableUI, "enable_ui", false, "Enables UI")
flag.IntVar(&this.DockerUIPort, "dockerui_port", 25658, "Listening port for dockerui")
|
<|file_name|>JTitlePanel.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>com.toedter.components.JTitlePanel
com.toedter.components.JTitlePanel$GradientPanel<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""The function module of dolfin"""<|fim▁hole|>from dolfin.functions import function
from dolfin.functions import constant
from dolfin.functions import expression
from dolfin.functions import specialfunctions
from .multimeshfunction import *
from .functionspace import *
from .function import *
from .constant import *
from .expression import *
from .specialfunctions import *
# NOTE: The automatic documentation system in DOLFIN requires to _not_ define
# NOTE: classes or functions within this file. Use separate modules for that
# NOTE: purpose.
__all__ = functionspace.__all__ + function.__all__ + constant.__all__ + \
expression.__all__ + specialfunctions.__all__ + \
multimeshfunction.__all__<|fim▁end|>
|
from dolfin.functions import multimeshfunction
from dolfin.functions import functionspace
|
<|file_name|>test_session.py<|end_file_name|><|fim▁begin|>"""test building messages with streamsession"""
#-------------------------------------------------------------------------------
# Copyright (C) 2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import os
import uuid
from datetime import datetime
import zmq
from zmq.tests import BaseZMQTestCase
from zmq.eventloop.zmqstream import ZMQStream
from IPython.kernel.zmq import session as ss
from IPython.testing.decorators import skipif, module_not_available
from IPython.utils.py3compat import string_types
from IPython.utils import jsonutil
def _bad_packer(obj):
raise TypeError("I don't work")
def _bad_unpacker(bytes):
raise TypeError("I don't work either")
class SessionTestCase(BaseZMQTestCase):
def setUp(self):
BaseZMQTestCase.setUp(self)
self.session = ss.Session()
class TestSession(SessionTestCase):
def test_msg(self):
"""message format"""
msg = self.session.msg('execute')
thekeys = set('header parent_header metadata content msg_type msg_id'.split())
s = set(msg.keys())
self.assertEqual(s, thekeys)
self.assertTrue(isinstance(msg['content'],dict))
self.assertTrue(isinstance(msg['metadata'],dict))
self.assertTrue(isinstance(msg['header'],dict))
self.assertTrue(isinstance(msg['parent_header'],dict))
self.assertTrue(isinstance(msg['msg_id'],str))
self.assertTrue(isinstance(msg['msg_type'],str))
self.assertEqual(msg['header']['msg_type'], 'execute')
self.assertEqual(msg['msg_type'], 'execute')
def test_serialize(self):
msg = self.session.msg('execute', content=dict(a=10, b=1.1))
msg_list = self.session.serialize(msg, ident=b'foo')
ident, msg_list = self.session.feed_identities(msg_list)
new_msg = self.session.unserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
# ensure floats don't come out as Decimal:
self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
def test_send(self):
ctx = zmq.Context.instance()
A = ctx.socket(zmq.PAIR)
B = ctx.socket(zmq.PAIR)
A.bind("inproc://test")
B.connect("inproc://test")
msg = self.session.msg('execute', content=dict(a=10))
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.unserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['buffers'],[b'bar'])
content = msg['content']
header = msg['header']
parent = msg['parent_header']
metadata = msg['metadata']
msg_type = header['msg_type']
self.session.send(A, None, content=content, parent=parent,
header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
ident, msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.unserialize(msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['buffers'],[b'bar'])
self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
ident, new_msg = self.session.recv(B)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_id'],msg['msg_id'])
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])<|fim▁hole|>
A.close()
B.close()
ctx.term()
def test_args(self):
"""initialization arguments for Session"""
s = self.session
self.assertTrue(s.pack is ss.default_packer)
self.assertTrue(s.unpack is ss.default_unpacker)
self.assertEqual(s.username, os.environ.get('USER', u'username'))
s = ss.Session()
self.assertEqual(s.username, os.environ.get('USER', u'username'))
self.assertRaises(TypeError, ss.Session, pack='hi')
self.assertRaises(TypeError, ss.Session, unpack='hi')
u = str(uuid.uuid4())
s = ss.Session(username=u'carrot', session=u)
self.assertEqual(s.session, u)
self.assertEqual(s.username, u'carrot')
def test_tracking(self):
"""test tracking messages"""
a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
s = self.session
s.copy_threshold = 1
stream = ZMQStream(a)
msg = s.send(a, 'hello', track=False)
self.assertTrue(msg['tracker'] is ss.DONE)
msg = s.send(a, 'hello', track=True)
self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
M = zmq.Message(b'hi there', track=True)
msg = s.send(a, 'hello', buffers=[M], track=True)
t = msg['tracker']
self.assertTrue(isinstance(t, zmq.MessageTracker))
self.assertRaises(zmq.NotDone, t.wait, .1)
del M
t.wait(1) # this will raise
def test_unique_msg_ids(self):
"""test that messages receive unique ids"""
ids = set()
for i in range(2**12):
h = self.session.msg_header('test')
msg_id = h['msg_id']
self.assertTrue(msg_id not in ids)
ids.add(msg_id)
def test_feed_identities(self):
"""scrub the front for zmq IDENTITIES"""
theids = "engine client other".split()
content = dict(code='whoda',stuff=object())
themsg = self.session.msg('execute',content=content)
pmsg = theids
def test_session_id(self):
session = ss.Session()
# get bs before us
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session()
# get us before bs
us = session.session
bs = session.bsession
self.assertEqual(us.encode('ascii'), bs)
# change propagates:
session.session = 'something else'
bs = session.bsession
us = session.session
self.assertEqual(us.encode('ascii'), bs)
session = ss.Session(session='stuff')
# get us before bs
self.assertEqual(session.bsession, session.session.encode('ascii'))
self.assertEqual(b'stuff', session.bsession)
def test_zero_digest_history(self):
session = ss.Session(digest_history_size=0)
for i in range(11):
session._add_digest(uuid.uuid4().bytes)
self.assertEqual(len(session.digest_history), 0)
def test_cull_digest_history(self):
session = ss.Session(digest_history_size=100)
for i in range(100):
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 100)
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 91)
for i in range(9):
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 100)
session._add_digest(uuid.uuid4().bytes)
self.assertTrue(len(session.digest_history) == 91)
def test_bad_pack(self):
try:
session = ss.Session(pack=_bad_packer)
except ValueError as e:
self.assertIn("could not serialize", str(e))
self.assertIn("don't work", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_unpack(self):
try:
session = ss.Session(unpack=_bad_unpacker)
except ValueError as e:
self.assertIn("could not handle output", str(e))
self.assertIn("don't work either", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_packer(self):
try:
session = ss.Session(packer=__name__ + '._bad_packer')
except ValueError as e:
self.assertIn("could not serialize", str(e))
self.assertIn("don't work", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_unpacker(self):
try:
session = ss.Session(unpacker=__name__ + '._bad_unpacker')
except ValueError as e:
self.assertIn("could not handle output", str(e))
self.assertIn("don't work either", str(e))
else:
self.fail("Should have raised ValueError")
def test_bad_roundtrip(self):
with self.assertRaises(ValueError):
session = ss.Session(unpack=lambda b: 5)
def _datetime_test(self, session):
content = dict(t=datetime.now())
metadata = dict(t=datetime.now())
p = session.msg('msg')
msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
smsg = session.serialize(msg)
msg2 = session.unserialize(session.feed_identities(smsg)[1])
assert isinstance(msg2['header']['date'], datetime)
self.assertEqual(msg['header'], msg2['header'])
self.assertEqual(msg['parent_header'], msg2['parent_header'])
self.assertEqual(msg['parent_header'], msg2['parent_header'])
assert isinstance(msg['content']['t'], datetime)
assert isinstance(msg['metadata']['t'], datetime)
assert isinstance(msg2['content']['t'], string_types)
assert isinstance(msg2['metadata']['t'], string_types)
self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
def test_datetimes(self):
self._datetime_test(self.session)
def test_datetimes_pickle(self):
session = ss.Session(packer='pickle')
self._datetime_test(session)
@skipif(module_not_available('msgpack'))
def test_datetimes_msgpack(self):
session = ss.Session(packer='msgpack.packb', unpacker='msgpack.unpackb')
self._datetime_test(session)
def test_send_raw(self):
ctx = zmq.Context.instance()
A = ctx.socket(zmq.PAIR)
B = ctx.socket(zmq.PAIR)
A.bind("inproc://test")
B.connect("inproc://test")
msg = self.session.msg('execute', content=dict(a=10))
msg_list = [self.session.pack(msg[part]) for part in
['header', 'parent_header', 'metadata', 'content']]
self.session.send_raw(A, msg_list, ident=b'foo')
ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
new_msg = self.session.unserialize(new_msg_list)
self.assertEqual(ident[0], b'foo')
self.assertEqual(new_msg['msg_type'],msg['msg_type'])
self.assertEqual(new_msg['header'],msg['header'])
self.assertEqual(new_msg['parent_header'],msg['parent_header'])
self.assertEqual(new_msg['content'],msg['content'])
self.assertEqual(new_msg['metadata'],msg['metadata'])
A.close()
B.close()
ctx.term()<|fim▁end|>
|
self.assertEqual(new_msg['buffers'],[b'bar'])
|
<|file_name|>WeatherForecasts.ts<|end_file_name|><|fim▁begin|>import { fetch, addTask } from 'domain-task';
import { Action, Reducer, ActionCreator } from 'redux';
import { AppThunkAction } from './';
// -----------------
// STATE - This defines the type of data maintained in the Redux store.
export interface WeatherForecastsState {
isLoading: boolean;
startDateIndex: number;
forecasts: WeatherForecast[];
}
export interface WeatherForecast {
dateFormatted: string;
temperatureC: number;
temperatureF: number;
summary: string;
}
// -----------------
// ACTIONS - These are serializable (hence replayable) descriptions of state transitions.
// They do not themselves have any side-effects; they just describe something that is going to happen.
interface RequestWeatherForecastsAction {
type: 'REQUEST_WEATHER_FORECASTS',
startDateIndex: number;
}
interface ReceiveWeatherForecastsAction {
type: 'RECEIVE_WEATHER_FORECASTS',
startDateIndex: number;
forecasts: WeatherForecast[]
}
// Declare a 'discriminated union' type. This guarantees that all references to 'type' properties contain one of the
// declared type strings (and not any other arbitrary string).
type KnownAction = RequestWeatherForecastsAction | ReceiveWeatherForecastsAction;
// ----------------
// ACTION CREATORS - These are functions exposed to UI components that will trigger a state transition.
// They don't directly mutate state, but they can have external side-effects (such as loading data).
export const actionCreators = {
requestWeatherForecasts: (startDateIndex: number): AppThunkAction<KnownAction> => (dispatch, getState) => {
// Only load data if it's something we don't already have (and are not already loading)
if (startDateIndex !== getState().weatherForecasts.startDateIndex) {
let fetchTask = fetch(`/api/SampleData/WeatherForecasts?startDateIndex=${ startDateIndex }`)
.then(response => response.json() as Promise<WeatherForecast[]>)
.then(data => {
dispatch({ type: 'RECEIVE_WEATHER_FORECASTS', startDateIndex: startDateIndex, forecasts: data });
});
addTask(fetchTask); // Ensure server-side prerendering waits for this to complete
dispatch({ type: 'REQUEST_WEATHER_FORECASTS', startDateIndex: startDateIndex });
}
}
};
// ----------------
// REDUCER - For a given state and action, returns the new state. To support time travel, this must not mutate the old state.
const unloadedState: WeatherForecastsState = { startDateIndex: null, forecasts: [], isLoading: false };
export const reducer: Reducer<WeatherForecastsState> = (state: WeatherForecastsState, action: KnownAction) => {
switch (action.type) {
case 'REQUEST_WEATHER_FORECASTS':
return {
startDateIndex: action.startDateIndex,
forecasts: state.forecasts,
isLoading: true
};
case 'RECEIVE_WEATHER_FORECASTS':
// Only accept the incoming data if it matches the most recent request. This ensures we correctly
// handle out-of-order responses.
if (action.startDateIndex === state.startDateIndex) {
return {
startDateIndex: action.startDateIndex,<|fim▁hole|> }
break;
default:
// The following line guarantees that every action in the KnownAction union has been covered by a case above
const exhaustiveCheck: never = action;
}
return state || unloadedState;
};<|fim▁end|>
|
forecasts: action.forecasts,
isLoading: false
};
|
<|file_name|>StringChains.java<|end_file_name|><|fim▁begin|>package company;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
public class StringChains {
private int longestChain(String[] words) {
if (words == null || words.length == 0)
return 0;
int chainLength = 0;
Arrays.sort(words, new Comparator<String>() {
public int compare(String first, String second) {
return first.length() - second.length();
}
});
Map<String, Integer> wordMap = new HashMap<>();
for (int i = 0; i < words.length; i++) {
if (wordMap.containsKey(words[i]))
continue;
wordMap.put(words[i], 1);
for (int j = 0; j < words[i].length(); j++) {
String temp = words[i].substring(0, j) + words[i].substring(j + 1);
if (wordMap.containsKey(temp) && wordMap.get(temp) + 1 > wordMap.get(words[i])) {
wordMap.put(words[i], wordMap.get(temp) + 1);
}
}
if (wordMap.get(words[i]) > chainLength)
chainLength = wordMap.get(words[i]);
}
return chainLength;
}<|fim▁hole|> public static void main(String[] args) {
StringChains sc = new StringChains();
System.out.println(sc.longestChain(new String[] { "a", "b", "ba", "bca", "bda", "bdca" }));
System.out.println(sc.longestChain(new String[] {}));
System.out.println(sc.longestChain(null));
System.out.println(sc.longestChain(new String[] { "bc", "abc" }));
}
}<|fim▁end|>
| |
<|file_name|>rpcblockchain.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "main.h"
#include "bitcoinrpc.h"
using namespace json_spirit;
using namespace std;
extern void TxToJSON(const CTransaction& tx, const uint256 hashBlock, json_spirit::Object& entry);
extern enum Checkpoints::CPMode CheckpointsMode;
double GetDifficulty(const CBlockIndex* blockindex)
{
// Floating point number that is a multiple of the minimum difficulty,
// minimum difficulty = 1.0.
if (blockindex == NULL)
{
if (pindexBest == NULL)
return 1.0;
else
blockindex = GetLastBlockIndex(pindexBest, false);
}
int nShift = (blockindex->nBits >> 24) & 0xff;
double dDiff =
(double)0x0000ffff / (double)(blockindex->nBits & 0x00ffffff);
while (nShift < 29)<|fim▁hole|> nShift++;
}
while (nShift > 29)
{
dDiff /= 256.0;
nShift--;
}
return dDiff;
}
double GetPoWMHashPS()
{
int nPoWInterval = 72;
int64_t nTargetSpacingWorkMin = 30, nTargetSpacingWork = 30;
CBlockIndex* pindex = pindexGenesisBlock;
CBlockIndex* pindexPrevWork = pindexGenesisBlock;
while (pindex)
{
if (pindex->IsProofOfWork())
{
int64_t nActualSpacingWork = pindex->GetBlockTime() - pindexPrevWork->GetBlockTime();
nTargetSpacingWork = ((nPoWInterval - 1) * nTargetSpacingWork + nActualSpacingWork + nActualSpacingWork) / (nPoWInterval + 1);
nTargetSpacingWork = max(nTargetSpacingWork, nTargetSpacingWorkMin);
pindexPrevWork = pindex;
}
pindex = pindex->pnext;
}
return GetDifficulty() * 4294.967296 / nTargetSpacingWork;
}
double GetPoSKernelPS()
{
int nPoSInterval = 72;
double dStakeKernelsTriedAvg = 0;
int nStakesHandled = 0, nStakesTime = 0;
CBlockIndex* pindex = pindexBest;;
CBlockIndex* pindexPrevStake = NULL;
while (pindex && nStakesHandled < nPoSInterval)
{
if (pindex->IsProofOfStake())
{
dStakeKernelsTriedAvg += GetDifficulty(pindex) * 4294967296.0;
nStakesTime += pindexPrevStake ? (pindexPrevStake->nTime - pindex->nTime) : 0;
pindexPrevStake = pindex;
nStakesHandled++;
}
pindex = pindex->pprev;
}
return nStakesTime ? dStakeKernelsTriedAvg / nStakesTime : 0;
}
Object blockToJSON(const CBlock& block, const CBlockIndex* blockindex, bool fPrintTransactionDetail)
{
Object result;
result.push_back(Pair("hash", block.GetHash().GetHex()));
CMerkleTx txGen(block.vtx[0]);
txGen.SetMerkleBranch(&block);
result.push_back(Pair("confirmations", (int)txGen.GetDepthInMainChain()));
result.push_back(Pair("size", (int)::GetSerializeSize(block, SER_NETWORK, PROTOCOL_VERSION)));
result.push_back(Pair("height", blockindex->nHeight));
result.push_back(Pair("version", block.nVersion));
result.push_back(Pair("merkleroot", block.hashMerkleRoot.GetHex()));
result.push_back(Pair("mint", ValueFromAmount(blockindex->nMint)));
result.push_back(Pair("time", (boost::int64_t)block.GetBlockTime()));
result.push_back(Pair("nonce", (boost::uint64_t)block.nNonce));
result.push_back(Pair("bits", HexBits(block.nBits)));
result.push_back(Pair("difficulty", GetDifficulty(blockindex)));
result.push_back(Pair("blocktrust", leftTrim(blockindex->GetBlockTrust().GetHex(), '0')));
result.push_back(Pair("chaintrust", leftTrim(blockindex->nChainTrust.GetHex(), '0')));
if (blockindex->pprev)
result.push_back(Pair("previousblockhash", blockindex->pprev->GetBlockHash().GetHex()));
if (blockindex->pnext)
result.push_back(Pair("nextblockhash", blockindex->pnext->GetBlockHash().GetHex()));
result.push_back(Pair("flags", strprintf("%s%s", blockindex->IsProofOfStake()? "proof-of-stake" : "proof-of-work", blockindex->GeneratedStakeModifier()? " stake-modifier": "")));
result.push_back(Pair("proofhash", blockindex->IsProofOfStake()? blockindex->hashProofOfStake.GetHex() : blockindex->GetBlockHash().GetHex()));
result.push_back(Pair("entropybit", (int)blockindex->GetStakeEntropyBit()));
result.push_back(Pair("modifier", strprintf("%016"PRIx64, blockindex->nStakeModifier)));
result.push_back(Pair("modifierchecksum", strprintf("%08x", blockindex->nStakeModifierChecksum)));
Array txinfo;
BOOST_FOREACH (const CTransaction& tx, block.vtx)
{
if (fPrintTransactionDetail)
{
Object entry;
entry.push_back(Pair("txid", tx.GetHash().GetHex()));
TxToJSON(tx, 0, entry);
txinfo.push_back(entry);
}
else
txinfo.push_back(tx.GetHash().GetHex());
}
result.push_back(Pair("tx", txinfo));
if (block.IsProofOfStake())
result.push_back(Pair("signature", HexStr(block.vchBlockSig.begin(), block.vchBlockSig.end())));
return result;
}
Value getbestblockhash(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getbestblockhash\n"
"Returns the hash of the best block in the longest block chain.");
return hashBestChain.GetHex();
}
Value getblockcount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getblockcount\n"
"Returns the number of blocks in the longest block chain.");
return nBestHeight;
}
Value getdifficulty(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getdifficulty\n"
"Returns the difficulty as a multiple of the minimum difficulty.");
Object obj;
obj.push_back(Pair("proof-of-work", GetDifficulty()));
obj.push_back(Pair("proof-of-stake", GetDifficulty(GetLastBlockIndex(pindexBest, true))));
obj.push_back(Pair("search-interval", (int)nLastCoinStakeSearchInterval));
return obj;
}
Value settxfee(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 1 || AmountFromValue(params[0]) < MIN_TX_FEE)
throw runtime_error(
"settxfee <amount>\n"
"<amount> is a real and is rounded to the nearest 0.01");
nTransactionFee = AmountFromValue(params[0]);
nTransactionFee = (nTransactionFee / CENT) * CENT; // round to cent
return true;
}
Value getrawmempool(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getrawmempool\n"
"Returns all transaction ids in memory pool.");
vector<uint256> vtxid;
mempool.queryHashes(vtxid);
Array a;
BOOST_FOREACH(const uint256& hash, vtxid)
a.push_back(hash.ToString());
return a;
}
Value getblockhash(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"getblockhash <index>\n"
"Returns hash of block in best-block-chain at <index>.");
int nHeight = params[0].get_int();
if (nHeight < 0 || nHeight > nBestHeight)
throw runtime_error("Block number out of range.");
CBlockIndex* pblockindex = FindBlockByHeight(nHeight);
return pblockindex->phashBlock->GetHex();
}
Value getblock(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getblock <hash> [txinfo]\n"
"txinfo optional to print more detailed tx info\n"
"Returns details of a block with given block-hash.");
std::string strHash = params[0].get_str();
uint256 hash(strHash);
if (mapBlockIndex.count(hash) == 0)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Block not found");
CBlock block;
CBlockIndex* pblockindex = mapBlockIndex[hash];
block.ReadFromDisk(pblockindex, true);
return blockToJSON(block, pblockindex, params.size() > 1 ? params[1].get_bool() : false);
}
Value getblockbynumber(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getblock <number> [txinfo]\n"
"txinfo optional to print more detailed tx info\n"
"Returns details of a block with given block-number.");
int nHeight = params[0].get_int();
if (nHeight < 0 || nHeight > nBestHeight)
throw runtime_error("Block number out of range.");
CBlock block;
CBlockIndex* pblockindex = mapBlockIndex[hashBestChain];
while (pblockindex->nHeight > nHeight)
pblockindex = pblockindex->pprev;
uint256 hash = *pblockindex->phashBlock;
pblockindex = mapBlockIndex[hash];
block.ReadFromDisk(pblockindex, true);
return blockToJSON(block, pblockindex, params.size() > 1 ? params[1].get_bool() : false);
}
// nitrous: get information of sync-checkpoint
Value getcheckpoint(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 0)
throw runtime_error(
"getcheckpoint\n"
"Show info of synchronized checkpoint.\n");
Object result;
CBlockIndex* pindexCheckpoint;
result.push_back(Pair("synccheckpoint", Checkpoints::hashSyncCheckpoint.ToString().c_str()));
pindexCheckpoint = mapBlockIndex[Checkpoints::hashSyncCheckpoint];
result.push_back(Pair("height", pindexCheckpoint->nHeight));
result.push_back(Pair("timestamp", DateTimeStrFormat(pindexCheckpoint->GetBlockTime()).c_str()));
// Check that the block satisfies synchronized checkpoint
if (CheckpointsMode == Checkpoints::STRICT)
result.push_back(Pair("policy", "strict"));
if (CheckpointsMode == Checkpoints::ADVISORY)
result.push_back(Pair("policy", "advisory"));
if (CheckpointsMode == Checkpoints::PERMISSIVE)
result.push_back(Pair("policy", "permissive"));
if (mapArgs.count("-checkpointkey"))
result.push_back(Pair("checkpointmaster", true));
return result;
}<|fim▁end|>
|
{
dDiff *= 256.0;
|
<|file_name|>TestASTFilterNormalizationOptimizer.java<|end_file_name|><|fim▁begin|>/**
Copyright (C) SYSTAP, LLC 2006-2015. All rights reserved.
Contact:
SYSTAP, LLC
2501 Calvert ST NW #106
Washington, DC 20008
[email protected]
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
/*
* Created on June 10, 2015
*/
package com.bigdata.rdf.sparql.ast.optimizers;
import java.util.ArrayList;
import java.util.List;
import org.openrdf.query.algebra.StatementPattern.Scope;
import com.bigdata.bop.IBindingSet;
import com.bigdata.bop.bindingSet.ListBindingSet;
import com.bigdata.rdf.internal.IV;
import com.bigdata.rdf.model.BigdataURI;
import com.bigdata.rdf.model.BigdataValue;
import com.bigdata.rdf.model.BigdataValueFactory;
import com.bigdata.rdf.sparql.ast.ASTContainer;
import com.bigdata.rdf.sparql.ast.AbstractASTEvaluationTestCase;
import com.bigdata.rdf.sparql.ast.ConstantNode;
import com.bigdata.rdf.sparql.ast.FilterNode;
import com.bigdata.rdf.sparql.ast.FunctionNode;
import com.bigdata.rdf.sparql.ast.FunctionRegistry;
import com.bigdata.rdf.sparql.ast.IQueryNode;
import com.bigdata.rdf.sparql.ast.IValueExpressionNode;
import com.bigdata.rdf.sparql.ast.JoinGroupNode;
import com.bigdata.rdf.sparql.ast.ProjectionNode;
import com.bigdata.rdf.sparql.ast.QueryHints;
import com.bigdata.rdf.sparql.ast.QueryNodeWithBindingSet;
import com.bigdata.rdf.sparql.ast.QueryRoot;
import com.bigdata.rdf.sparql.ast.QueryType;
import com.bigdata.rdf.sparql.ast.StatementPatternNode;
import com.bigdata.rdf.sparql.ast.StaticAnalysis;
import com.bigdata.rdf.sparql.ast.ValueExpressionNode;
import com.bigdata.rdf.sparql.ast.VarNode;
import com.bigdata.rdf.sparql.ast.eval.AST2BOpContext;
/**
* Test suite for the {@link ASTFilterNormalizationOptimizer} class and associated
* utility methods in {@link StaticAnalysis}.
*
* @author <a href="mailto:[email protected]">Michael Schmidt</a>
*/
@SuppressWarnings({ "rawtypes" })
public class TestASTFilterNormalizationOptimizer extends AbstractASTEvaluationTestCase {
public TestASTFilterNormalizationOptimizer() {
}
public TestASTFilterNormalizationOptimizer(String name) {
super(name);
}
/**
* Test the {@link ASTFilterNormalizationOptimizer#extractToplevelConjuncts(
* com.bigdata.rdf.sparql.ast.IValueExpressionNode, List)} method.
*/
public void testExtractTopLevelConjunctsMethod() {
// conjunct 1
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
// conjunct 2
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode not1 = FunctionNode.NOT(bound2);
// conjunct 3
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode or1 =
FunctionNode.OR(FunctionNode.AND(bound3,bound4), bound5);
// conjunct 4
final FunctionNode bound6 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final FunctionNode toCheck =
FunctionNode.AND(bound1,
FunctionNode.AND(
not1, FunctionNode.AND(or1, bound6)));
final List<IValueExpressionNode> actual =
StaticAnalysis.extractToplevelConjuncts(
toCheck, new ArrayList<IValueExpressionNode>());
assertFalse(StaticAnalysis.isCNF(toCheck));
assertEquals(actual.size(), 4);
assertEquals(actual.get(0), bound1);
assertEquals(actual.get(1), not1);
assertEquals(actual.get(2), or1);
assertEquals(actual.get(3), bound6);
}
/**
* Test the {@link ASTFilterNormalizationOptimizer#constructFiltersForValueExpressionNode(
* IValueExpressionNode, List)} method.
*/
public void testConstructFiltersForValueExpressionNodeMethod() {
// conjunct 1
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode or1 =
FunctionNode.OR(FunctionNode.AND(bound3,bound4), bound5);
// conjunct 2
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode not1 = FunctionNode.NOT(bound2);
// conjunct 3
final FunctionNode bound6 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
// conjunct 4
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode base =
FunctionNode.AND(
FunctionNode.AND(
or1, FunctionNode.AND(not1, bound6)),bound1);
final ASTFilterNormalizationOptimizer filterOptimizer = new ASTFilterNormalizationOptimizer();
final List<FilterNode> filters =
filterOptimizer.constructFiltersForValueExpressionNode(
base, new ArrayList<FilterNode>());
assertFalse(StaticAnalysis.isCNF(base));
assertEquals(filters.size(), 4);
assertEquals(filters.get(0), new FilterNode(or1));
assertEquals(filters.get(1), new FilterNode(not1));
assertEquals(filters.get(2), new FilterNode(bound6));
assertEquals(filters.get(3), new FilterNode(bound1));
}
/**
* Test the {@link ASTFilterNormalizationOptimizer#toConjunctiveValueExpression(List)}
* method.
*/
public void testToConjunctiveValueExpressionMethod() {
// conjunct 1
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
// conjunct 2
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode not1 = FunctionNode.NOT(bound2);
// conjunct 3
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode or1 =
FunctionNode.OR(FunctionNode.AND(bound3,bound4), bound5);
// conjunct 4
final FunctionNode bound6 =
new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final List<IValueExpressionNode> baseConjuncts =
new ArrayList<IValueExpressionNode>();
baseConjuncts.add(bound1);
baseConjuncts.add(not1);
baseConjuncts.add(or1);
baseConjuncts.add(bound6);
final IValueExpressionNode expected =
FunctionNode.AND(
FunctionNode.AND(
FunctionNode.AND(bound1, not1),
or1),
bound6);
final IValueExpressionNode actual =
StaticAnalysis.toConjunctiveValueExpression(baseConjuncts);
assertFalse(StaticAnalysis.isCNF(actual));
assertEquals(expected, actual);
}
/**
* The FILTER
*
* <pre>
* SELECT ?s where { ?s ?p ?o . FILTER(?s=?o) }
* </pre>
*
* is not being modified.
*/
public void testFilterDecompositionNoOp() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
given.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
whereClause.addChild(
new FilterNode(
FunctionNode.EQ(new VarNode("s"), new VarNode("o"))));
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
expected.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
final FilterNode filterNode =
new FilterNode(
FunctionNode.EQ(new VarNode("s"), new VarNode("o")));
assertTrue(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* The FILTER
*
* <pre>
* SELECT ?s where { ?s ?p ?o . FILTER(?s=?o && ?s!=<http://www.test.com>) }
* </pre>
*
* is rewritten as
*
* <pre>
* SELECT ?s where { ?s ?p ?o . FILTER(?s=?o) . FILTER(?s!=<http://www.test.com>) }
* </pre>
*
*/
public void testSimpleConjunctiveFilter() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final BigdataValueFactory f = store.getValueFactory();
final BigdataURI testUri = f.createURI("http://www.test.com");
final IV test = makeIV(testUri);
final BigdataValue[] values = new BigdataValue[] { testUri };
store.getLexiconRelation()
.addTerms(values, values.length, false/* readOnly */);
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
final FilterNode filterNode =
new FilterNode(
FunctionNode.AND(
FunctionNode.EQ(new VarNode("s"), new VarNode("o")),
FunctionNode.NE(new VarNode("s"), new ConstantNode(test))));
assertTrue(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
whereClause.addChild(
new FilterNode(
FunctionNode.EQ(new VarNode("s"), new VarNode("o"))));
whereClause.addChild(
new FilterNode(
FunctionNode.NE(new VarNode("s"), new ConstantNode(test))));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* The FILTER
*
* <pre>
* SELECT ?s where { FILTER(NOT(?s<?o || BOUND(?o))) . OPTIONAL { ?s ?p ?o } }
* </pre>
*
* is rewritten as
*
* <pre>
* SELECT ?s where { OPTIONAL { ?s ?p ?o } . FILTER(?s>=?o) . FILTER(!BOUND(?s) }
* </pre>
*
*/
public void testSimpleDisjunctiveFilter() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.OR(
FunctionNode.LT(new VarNode("s"), new VarNode("o")),
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") }))));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
spn.setOptional(true);
whereClause.addChild(spn);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
spn.setOptional(true);
whereClause.addChild(spn);
whereClause.addChild(
new FilterNode(
FunctionNode.GE(new VarNode("s"), new VarNode("o"))));
whereClause.addChild(
new FilterNode(
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") }))));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test rewriting of negated leaves, such as !(?x=?y) -> ?x!=?y,
* !(?a<?b) -> ?a>=?b, etc. in
*/
public void testNegationLeafRewriting01() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final FunctionNode filterEq = FunctionNode.EQ(new VarNode("s"), new VarNode("o"));
final FunctionNode filterNeq = FunctionNode.NE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLe = FunctionNode.LE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLt = FunctionNode.LT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGe = FunctionNode.GE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGt = FunctionNode.GT(new VarNode("s"), new VarNode("o"));
final FunctionNode comb1 = FunctionNode.AND(filterEq, filterNeq);
final FunctionNode comb2 = FunctionNode.AND(filterLe, filterLt);
final FunctionNode comb3 = FunctionNode.AND(filterGt, filterGe);
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.AND(FunctionNode.AND(comb1, comb2),comb3)));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final FunctionNode filterEqInv = FunctionNode.NE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterNeqInv = FunctionNode.EQ(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLeInv = FunctionNode.GT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLtInv = FunctionNode.GE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGeInv = FunctionNode.LT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGtInv = FunctionNode.LE(new VarNode("s"), new VarNode("o"));
final FunctionNode comb1 = FunctionNode.OR(filterEqInv, filterNeqInv);
final FunctionNode comb2 = FunctionNode.OR(filterLeInv, filterLtInv);
final FunctionNode comb3 = FunctionNode.OR(filterGtInv, filterGeInv);
final FilterNode filterNode =
new FilterNode(
FunctionNode.OR(FunctionNode.OR(comb1, comb2),comb3));
assertTrue(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test rewriting of negated leaves, such as !(?x=?y) -> ?x!=?y,
* !(?a<?b) -> ?a>=?b, etc. (differs from v01 in tree shape).
*/
public void testNegationLeafRewriting02() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final FunctionNode filterEq = FunctionNode.EQ(new VarNode("s"), new VarNode("o"));
final FunctionNode filterNeq = FunctionNode.NE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLe = FunctionNode.LE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLt = FunctionNode.LT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGe = FunctionNode.GE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGt = FunctionNode.GT(new VarNode("s"), new VarNode("o"));
final FunctionNode comb1 = FunctionNode.AND(filterEq, filterNeq);
final FunctionNode comb2 = FunctionNode.AND(filterLe, filterLt);
final FunctionNode comb3 = FunctionNode.AND(filterGt, filterGe);
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.AND(comb1, FunctionNode.AND(comb2,comb3))));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final FunctionNode filterEqInv = FunctionNode.NE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterNeqInv = FunctionNode.EQ(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLeInv = FunctionNode.GT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterLtInv = FunctionNode.GE(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGeInv = FunctionNode.LT(new VarNode("s"), new VarNode("o"));
final FunctionNode filterGtInv = FunctionNode.LE(new VarNode("s"), new VarNode("o"));
final FunctionNode comb1 = FunctionNode.OR(filterEqInv, filterNeqInv);
final FunctionNode comb2 = FunctionNode.OR(filterLeInv, filterLtInv);
final FunctionNode comb3 = FunctionNode.OR(filterGtInv, filterGeInv);
final FilterNode filterNode =
new FilterNode(
FunctionNode.OR(comb1, FunctionNode.OR(comb2,comb3)));
assertTrue(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test level three pushing of negation.
*/
public void testNestedNegationRewriting() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final FunctionNode filterANot1 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") }));
final FunctionNode filterANot2 = FunctionNode.NOT(filterANot1);
final FunctionNode filterANot3 = FunctionNode.NOT(filterANot2);
final FunctionNode filterBNot1 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.EQ, null,
new ValueExpressionNode[] { new VarNode("s"), new VarNode("o") }));
final FunctionNode filterBNot2 = FunctionNode.NOT(filterBNot1);
final FunctionNode filterBNot3 = FunctionNode.NOT(filterBNot2);
final FunctionNode filterBNot4 = FunctionNode.NOT(filterBNot3);
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.AND(filterANot3, filterBNot4)));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final FunctionNode bound =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") });
final FunctionNode neq =
new FunctionNode(FunctionRegistry.NE, null,
new ValueExpressionNode[] { new VarNode("s"), new VarNode("o") });
FilterNode filterNode = new FilterNode(FunctionNode.OR(bound, neq));
assertTrue(StaticAnalysis.isCNF(filterNode));
// all NOT nodes should be resolved
whereClause.addChild(filterNode);
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test level three pushing of negation.
*/
public void testNestedNegationRewritingAndSplit() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final FunctionNode filterANot1 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") }));
final FunctionNode filterANot2 = FunctionNode.NOT(filterANot1);
final FunctionNode filterANot3 = FunctionNode.NOT(filterANot2);
final FunctionNode filterBNot1 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.EQ, null,
new ValueExpressionNode[] { new VarNode("s"), new VarNode("o") }));
final FunctionNode filterBNot2 = FunctionNode.NOT(filterBNot1);
final FunctionNode filterBNot3 = FunctionNode.NOT(filterBNot2);
final FunctionNode filterBNot4 = FunctionNode.NOT(filterBNot3);
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.OR(filterANot3, filterBNot4)));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final FunctionNode bound =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("o") });
final FunctionNode neq =
new FunctionNode(FunctionRegistry.NE, null,
new ValueExpressionNode[] { new VarNode("s"), new VarNode("o") });
whereClause.addChild(new FilterNode(bound));
whereClause.addChild(new FilterNode(neq));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test switch of OR over AND expression expression.
*/
public void testSimpleOrAndSwitch() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s5") });
final FilterNode filterNode =
new FilterNode(
FunctionNode.OR(
FunctionNode.AND(bound1, bound2),
FunctionNode.AND(bound3,
FunctionNode.AND(bound4, bound5))));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode and1 = FunctionNode.OR(bound1, bound3);
final FunctionNode and2 = FunctionNode.OR(bound1, bound4);
final FunctionNode and3 = FunctionNode.OR(bound1, bound5);
final FunctionNode and4 = FunctionNode.OR(bound2, bound3);
final FunctionNode and5 = FunctionNode.OR(bound2, bound4);
final FunctionNode and6 = FunctionNode.OR(bound2, bound5);
// after splitting, we should get the following conjuncts
whereClause.addChild(new FilterNode(and1));
whereClause.addChild(new FilterNode(and2));
whereClause.addChild(new FilterNode(and3));
whereClause.addChild(new FilterNode(and4));
whereClause.addChild(new FilterNode(and5));
whereClause.addChild(new FilterNode(and6));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test switch of OR over AND expression with top-level negation expression.
*/
public void testOrAndSwitchWithNegation() {
final ASTFilterNormalizationOptimizer rewriter =
new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setQueryHint(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode notBound1 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s1") }));
final FunctionNode notBound2 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s2") }));
final FunctionNode notBound3 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s3") }));
final FunctionNode notBound4 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s4") }));
final FunctionNode notBound5 =
FunctionNode.NOT(
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s5") }));
final FilterNode filterNode =
new FilterNode(
FunctionNode.NOT(
FunctionNode.AND(
FunctionNode.OR(notBound1, notBound2),
FunctionNode.OR(notBound3,
FunctionNode.OR(notBound4, notBound5)))));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setQueryHint(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 =
new FunctionNode(FunctionRegistry.BOUND, null,
new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode or1 = FunctionNode.OR(bound1, bound3);
final FunctionNode or2 = FunctionNode.OR(bound1, bound4);
final FunctionNode or3 = FunctionNode.OR(bound1, bound5);
final FunctionNode or4 = FunctionNode.OR(bound2, bound3);
final FunctionNode or5 = FunctionNode.OR(bound2, bound4);
final FunctionNode or6 = FunctionNode.OR(bound2, bound5);
// after splitting, we should get the following conjuncts
whereClause.addChild(new FilterNode(or1));
whereClause.addChild(new FilterNode(or2));
whereClause.addChild(new FilterNode(or3));
whereClause.addChild(new FilterNode(or4));
whereClause.addChild(new FilterNode(or5));
whereClause.addChild(new FilterNode(or6));
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test recursive optimization of OR - AND - OR - AND pattern.
*/
public void testOrAndSwitchRecursive() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode bound6 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final FunctionNode bound7 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s7") });
final FunctionNode bound8 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s8") });
final FunctionNode bound9 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s9") });
final FunctionNode bound10 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s10") });
final FunctionNode bound11 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s11") });
final FunctionNode bound12 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s12") });
final FunctionNode bound13 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s13") });
final FunctionNode bound14 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s14") });
final FunctionNode bound15 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s15") });
final FunctionNode bound16 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s16") });
final FilterNode filterNode =
new FilterNode(
FunctionNode.OR(
FunctionNode.AND(
FunctionNode.OR(
FunctionNode.AND(bound1, bound2),
FunctionNode.AND(bound3, bound4)
),
FunctionNode.OR(
FunctionNode.AND(bound5, bound6),
FunctionNode.AND(bound7, bound8)
)
),
FunctionNode.AND(
FunctionNode.OR(
FunctionNode.AND(bound9, bound10),
FunctionNode.AND(bound11, bound12)
),
FunctionNode.OR(
FunctionNode.AND(bound13, bound14),
FunctionNode.AND(bound15, bound16)
))));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode bound6 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final FunctionNode bound7 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s7") });
final FunctionNode bound8 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s8") });
final FunctionNode bound9 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s9") });
final FunctionNode bound10 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s10") });
final FunctionNode bound11 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s11") });
final FunctionNode bound12 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s12") });
final FunctionNode bound13 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s13") });
final FunctionNode bound14 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s14") });
final FunctionNode bound15 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s15") });
final FunctionNode bound16 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s16") });
/**
* Sketch of intended rewriting process (bottom-up)
*
* ### STEP 1: for the four leaf nodes we get the following.
*
* FunctionNode.OR(
* FunctionNode.AND(
* FunctionNode.AND(
* FunctionNode.OR(bound1,bound3)
* FunctionNode.OR(bound1,bound4)
* FunctionNode.OR(bound2,bound3)
* FunctionNode.OR(bound2,bound4)
* ),
* FunctionNode.AND(
* FunctionNode.OR(bound5,bound7)
* FunctionNode.OR(bound5,bound8)
* FunctionNode.OR(bound6,bound7)
* FunctionNode.OR(bound6,bound8)
* )
* ),
* FunctionNode.AND(
* FunctionNode.AND(
* FunctionNode.OR(bound9,bound11)
* FunctionNode.OR(bound9,bound12)
* FunctionNode.OR(bound10,bound11)
* FunctionNode.OR(bound10,bound12)
* ),
* FunctionNode.AND(
* FunctionNode.OR(bound13,bound15)
* FunctionNode.OR(bound13,bound16)
* FunctionNode.OR(bound14,bound15)
* FunctionNode.OR(bound14,bound16)
* )
* )
* )
*
* ### STEP 2: pushing down the top-level OR, we compute the cross
* product of the left and right disjuncts (we flatten
* out the and in the representation below):
*
* FunctionNode.AND(
* FunctionNode.AND(
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound3),
* FunctionNode.OR(bound9,bound11)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound3),
* FunctionNode.OR(bound9,bound12)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound3),
* FunctionNode.OR(bound10,bound11)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound3),
* FunctionNode.OR(bound10,bound12)
* ),
*
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound4),
* FunctionNode.OR(bound9,bound11)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound4),
* FunctionNode.OR(bound9,bound12)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound4),
* FunctionNode.OR(bound10,bound11)
* ),
* FunctionNode.OR(
* FunctionNode.OR(bound1,bound4),
* FunctionNode.OR(bound10,bound12)
* ),
*
* ...
*
* Each of those topmost OR expression gives us one FILTER expression
* in the end, resulting in 8x8 = 64 FILTERs. We construct them
* schematically below.
*/
final List<FunctionNode> lefts = new ArrayList<FunctionNode>();
lefts.add(FunctionNode.OR(bound1,bound3));
lefts.add(FunctionNode.OR(bound1,bound4));
lefts.add(FunctionNode.OR(bound2,bound3));
lefts.add(FunctionNode.OR(bound2,bound4));
lefts.add(FunctionNode.OR(bound5,bound7));
lefts.add(FunctionNode.OR(bound5,bound8));
lefts.add(FunctionNode.OR(bound6,bound7));
lefts.add(FunctionNode.OR(bound6,bound8));
final List<FunctionNode> rights = new ArrayList<FunctionNode>();
rights.add(FunctionNode.OR(bound9,bound11));
rights.add(FunctionNode.OR(bound9,bound12));
rights.add(FunctionNode.OR(bound10,bound11));
rights.add(FunctionNode.OR(bound10,bound12));
rights.add(FunctionNode.OR(bound13,bound15));
rights.add(FunctionNode.OR(bound13,bound16));
rights.add(FunctionNode.OR(bound14,bound15));
rights.add(FunctionNode.OR(bound14,bound16));
for (final FunctionNode left : lefts) {
for (final FunctionNode right : rights) {
whereClause.addChild(
new FilterNode(FunctionNode.OR(left, right)));
}
}
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test recursive optimization of OR - OR - AND pattern.
*/
public void testOrOrAndSwitch() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode bound6 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final FunctionNode bound7 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s7") });
final FunctionNode bound8 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s8") });
final FilterNode filterNode = new FilterNode(
FunctionNode.OR(
FunctionNode.OR(
FunctionNode.AND(bound1, bound2),
FunctionNode.AND(bound3, bound4)
),
FunctionNode.OR(
FunctionNode.AND(bound5, bound6),
FunctionNode.AND(bound7, bound8)
)));
assertFalse(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spn =
new StatementPatternNode(
new VarNode("s"), new VarNode("p"), new VarNode("o"),
null, Scope.DEFAULT_CONTEXTS);
whereClause.addChild(spn);
final FunctionNode bound1 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") });
final FunctionNode bound2 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s2") });
final FunctionNode bound3 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s3") });
final FunctionNode bound4 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s4") });
final FunctionNode bound5 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s5") });
final FunctionNode bound6 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s6") });
final FunctionNode bound7 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s7") });
final FunctionNode bound8 = new FunctionNode(FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s8") });
/**
*
* ### STEP 1: generates to OR connected leafs in CNF
*
* FunctionNode.OR(
* FunctionNode.AND(
* FunctionNode.AND(
* FunctionNode.OR(bound1, bound3),
* FunctionNode.OR(bound1, bound4)
* ),
* FunctionNode.AND(
* FunctionNode.OR(bound2, bound3),
* FunctionNode.OR(bound2, bound4)
* )
* ),
* FunctionNode.AND(
* FunctionNode.AND(
* FunctionNode.OR(bound5, bound7),
* FunctionNode.OR(bound5, bound8)
* ),
* FunctionNode.AND(
* FunctionNode.OR(bound6, bound7),<|fim▁hole|> * FunctionNode.OR(bound6, bound8)
* )
* )
* )
*
* ### STEP 2: pushes down the uppermost OR expression
*
* Considers all OR-leafs in the left top-level AND expression
* and joins them with OR-leafs in the right top-level AND expression.
* After decomposing, this actually gives us 4x4 = 16 FILTERs.
*
*/
final List<FunctionNode> lefts = new ArrayList<FunctionNode>();
lefts.add(FunctionNode.OR(bound1,bound3));
lefts.add(FunctionNode.OR(bound1,bound4));
lefts.add(FunctionNode.OR(bound2,bound3));
lefts.add(FunctionNode.OR(bound2,bound4));
final List<FunctionNode> rights = new ArrayList<FunctionNode>();
rights.add(FunctionNode.OR(bound5,bound7));
rights.add(FunctionNode.OR(bound5,bound8));
rights.add(FunctionNode.OR(bound6,bound7));
rights.add(FunctionNode.OR(bound6,bound8));
for (final FunctionNode left : lefts) {
for (final FunctionNode right : rights) {
whereClause.addChild(
new FilterNode(FunctionNode.OR(left, right)));
}
}
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test removal of duplicate filter.
*/
public void testRemoveDuplicateFilter() {
final ASTFilterNormalizationOptimizer rewriter = new ASTFilterNormalizationOptimizer();
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s1"),
new VarNode("p1"), new VarNode("o1"), null/* c */,
Scope.DEFAULT_CONTEXTS)); // just noise
// two times exactly the same pattern
final FunctionNode simpleFunctionNode1 =
FunctionNode.NE(new VarNode("s1"), new VarNode("s2"));
final FunctionNode simpleFunctionNode2 =
FunctionNode.NE(new VarNode("s1"), new VarNode("s2"));
// three times the same pattern
final FunctionNode complexFunctionNode1 =
FunctionNode.OR(
FunctionNode.NE(new VarNode("s1"), new VarNode("s2")),
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") })));
final FunctionNode complexFunctionNode2 =
FunctionNode.OR(
FunctionNode.NE(new VarNode("s1"), new VarNode("s2")),
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") })));
final FunctionNode complexFunctionNode3 =
FunctionNode.OR(
FunctionNode.NE(new VarNode("s1"), new VarNode("s2")),
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") })));
whereClause.addChild(new FilterNode(simpleFunctionNode1));
whereClause.addChild(new FilterNode(simpleFunctionNode2));
whereClause.addChild(new FilterNode(complexFunctionNode1));
whereClause.addChild(new FilterNode(complexFunctionNode2));
whereClause.addChild(new StatementPatternNode(new VarNode("s2"),
new VarNode("p2"), new VarNode("o2"), null/* c */,
Scope.DEFAULT_CONTEXTS)); // just noise
whereClause.addChild(new FilterNode(complexFunctionNode3));
assertTrue(StaticAnalysis.isCNF(simpleFunctionNode1));
assertTrue(StaticAnalysis.isCNF(simpleFunctionNode2));
assertTrue(StaticAnalysis.isCNF(complexFunctionNode1));
assertTrue(StaticAnalysis.isCNF(complexFunctionNode2));
assertTrue(StaticAnalysis.isCNF(complexFunctionNode3));
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s1"),
new VarNode("p1"), new VarNode("o1"), null/* c */,
Scope.DEFAULT_CONTEXTS)); // just noise
// the simple function node
final FunctionNode simpleFunctionNode =
FunctionNode.NE(new VarNode("s1"), new VarNode("s2"));
// the complex function node
final FunctionNode complexFunctionNode =
FunctionNode.OR(
FunctionNode.NE(new VarNode("s1"), new VarNode("s2")),
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND,
null, new ValueExpressionNode[] { new VarNode("s1") })));
whereClause.addChild(new FilterNode(simpleFunctionNode));
whereClause.addChild(new StatementPatternNode(new VarNode("s2"),
new VarNode("p2"), new VarNode("o2"), null/* c */,
Scope.DEFAULT_CONTEXTS)); // just noise
whereClause.addChild(new FilterNode(complexFunctionNode));
assertTrue(StaticAnalysis.isCNF(simpleFunctionNode));
assertTrue(StaticAnalysis.isCNF(complexFunctionNode));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test removal of duplicate filter, where the duplicate is introduced
* through the CNF based decomposition process. This is a variant of test
* {@link TestASTFilterNormalizationOptimizer#testSimpleConjunctiveFilter()},
* where we just add a duplicate.
*/
public void testRemoveDuplicateGeneratedFilter() {
/*
* Note: DO NOT share structures in this test!!!!
*/
final BigdataValueFactory f = store.getValueFactory();
final BigdataURI testUri = f.createURI("http://www.test.com");
final IV test = makeIV(testUri);
final BigdataValue[] values = new BigdataValue[] { testUri };
store.getLexiconRelation()
.addTerms(values, values.length, false/* readOnly */);
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
final FilterNode filterNode =
new FilterNode(
FunctionNode.AND(
FunctionNode.EQ(new VarNode("s"), new VarNode("o")),
FunctionNode.NE(new VarNode("s"), new ConstantNode(test))));
// difference towards base test: this is the duplicate to be dropped
whereClause.addChild(
new FilterNode(
FunctionNode.EQ(new VarNode("s"), new VarNode("o"))));
assertTrue(StaticAnalysis.isCNF(filterNode));
whereClause.addChild(filterNode);
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
whereClause.addChild(new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o"), null/* c */,
Scope.DEFAULT_CONTEXTS));
whereClause.addChild(
new FilterNode(
FunctionNode.EQ(new VarNode("s"), new VarNode("o"))));
whereClause.addChild(
new FilterNode(
FunctionNode.NE(new VarNode("s"), new ConstantNode(test))));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final ASTFilterNormalizationOptimizer rewriter =
new ASTFilterNormalizationOptimizer();
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
/**
* Test removal of unsatisfiable filters. More precisely, the query
*
* SELECT ?s WHERE {
* ?s ?p ?o1 .
* { ?s ?p ?o2 }
* OPTIONAL { ?s ?p ?o3 }
*
* FILTER(bound(?o1))
* FILTER(bound(?o2))
* FILTER(bound(?o3))
*
* FILTER(!bound(?o1))
* FILTER(!bound(?o2))
* FILTER(!bound(?o3))
* FILTER(!bound(?o4))
*
* // some duplicates (which should be dropped)
* FILTER(!bound(?o2))
* FILTER(!bound(?o3))
* }
*
* will be rewritten to
*
* SELECT ?s WHERE {
* ?s ?p ?o1 .
* { ?s ?p ?o2 }
* OPTIONAL { ?s ?p ?o3 }
*
* // ?o1 and ?o2 are definitely bound, so we can't optimize away
* FILTER(bound(?o3))
*
* // ?o4 is the only variable that is definitely not bound
* FILTER(!bound(?o1))
* FILTER(!bound(?o2))
* FILTER(!bound(?o3))
* }
*
*/
public void testRemoveUnsatisfiableFilters() {
/*
* Note: DO NOT share structures in this test!!!!
*/
final IBindingSet[] bsets = new IBindingSet[] { new ListBindingSet() };
// The source AST.
final QueryRoot given = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
given.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
given.setWhereClause(whereClause);
final StatementPatternNode spo1 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o1"), null/* c */,
Scope.DEFAULT_CONTEXTS);
final StatementPatternNode spo2 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o2"), null/* c */,
Scope.DEFAULT_CONTEXTS);
final JoinGroupNode jgn = new JoinGroupNode(spo2);
final StatementPatternNode spo3 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o3"), null/* c */,
Scope.DEFAULT_CONTEXTS);
spo3.setOptional(true);
whereClause.addChild(spo1);
whereClause.addChild(jgn);
whereClause.addChild(spo3);
final FunctionNode filterBound1 =
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o1")});
final FunctionNode filterBound2 =
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o2")});
final FunctionNode filterBound3 =
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o3")});
final FunctionNode filterNotBound1 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o1")}));
final FunctionNode filterNotBound2 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o2")}));
final FunctionNode filterNotBound3 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o3")}));
final FunctionNode filterNotBound4 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o4")}));
whereClause.addChild(new FilterNode(filterBound1));
whereClause.addChild(new FilterNode(filterBound2));
whereClause.addChild(new FilterNode(filterBound3));
whereClause.addChild(new FilterNode(filterNotBound1));
whereClause.addChild(new FilterNode(filterNotBound2));
whereClause.addChild(new FilterNode(filterNotBound3));
whereClause.addChild(new FilterNode(filterNotBound4));
// add some duplicates (they should be removed)
whereClause.addChild(new FilterNode(filterNotBound2));
whereClause.addChild(new FilterNode(filterNotBound3));
}
// The expected AST after the rewrite.
final QueryRoot expected = new QueryRoot(QueryType.SELECT);
{
final ProjectionNode projection = new ProjectionNode();
expected.setProjection(projection);
projection.addProjectionVar(new VarNode("s"));
final JoinGroupNode whereClause = new JoinGroupNode();
whereClause.setProperty(QueryHints.NORMALIZE_FILTER_EXPRESSIONS, "true");
expected.setWhereClause(whereClause);
final StatementPatternNode spo1 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o1"), null/* c */,
Scope.DEFAULT_CONTEXTS);
final StatementPatternNode spo2 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o2"), null/* c */,
Scope.DEFAULT_CONTEXTS);
final JoinGroupNode jgn = new JoinGroupNode(spo2);
final StatementPatternNode spo3 =
new StatementPatternNode(new VarNode("s"),
new VarNode("p"), new VarNode("o3"), null/* c */,
Scope.DEFAULT_CONTEXTS);
spo3.setOptional(true);
whereClause.addChild(spo1);
whereClause.addChild(jgn);
whereClause.addChild(spo3);
final FunctionNode filterBound3 =
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o3")});
final FunctionNode filterNotBound1 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o1")}));
final FunctionNode filterNotBound2 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o2")}));
final FunctionNode filterNotBound3 =
FunctionNode.NOT(
new FunctionNode(
FunctionRegistry.BOUND, null/* scalarValues */,
new ValueExpressionNode[] {
new VarNode("o3")}));
whereClause.addChild(new FilterNode(filterBound3));
whereClause.addChild(new FilterNode(filterNotBound1));
whereClause.addChild(new FilterNode(filterNotBound2));
whereClause.addChild(new FilterNode(filterNotBound3));
}
final AST2BOpContext context =
new AST2BOpContext(new ASTContainer(given), store);
final ASTFilterNormalizationOptimizer rewriter =
new ASTFilterNormalizationOptimizer();
final IQueryNode actual =
rewriter.optimize(context, new QueryNodeWithBindingSet(given, bsets)).
getQueryNode();
assertSameAST(expected, actual);
}
}<|fim▁end|>
| |
<|file_name|>NotificationQueue.cpp<|end_file_name|><|fim▁begin|>//
// NotificationQueue.cpp
//
// $Id: //poco/1.4/Foundation/src/NotificationQueue.cpp#1 $
//
// Library: Foundation
// Package: Notifications
// Module: NotificationQueue
//
// Copyright (c) 2004-2006, Applied Informatics Software Engineering GmbH.
// and Contributors.
//
// Permission is hereby granted, free of charge, to any person or organization
// obtaining a copy of the software and accompanying documentation covered by
// this license (the "Software") to use, reproduce, display, distribute,
// execute, and transmit the Software, and to prepare derivative works of the
// Software, and to permit third-parties to whom the Software is furnished to
// do so, all subject to the following:
//
// The copyright notices in the Software and this entire statement, including
// the above license grant, this restriction and the following disclaimer,
// must be included in all copies of the Software, in whole or in part, and
// all derivative works of the Software, unless such copies or derivative
// works are solely in the form of machine-executable object code generated by
// a source language processor.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
// SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
// FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
// ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
#include "Poco/NotificationQueue.h"
#include "Poco/NotificationCenter.h"
#include "Poco/Notification.h"
#include "Poco/SingletonHolder.h"
namespace Poco {
NotificationQueue::NotificationQueue()
{
}
NotificationQueue::~NotificationQueue()
{
clear();
}
void NotificationQueue::enqueueNotification(Notification::Ptr pNotification)
{
poco_check_ptr (pNotification);
FastMutex::ScopedLock lock(_mutex);
if (_waitQueue.empty())
{
_nfQueue.push_back(pNotification);
}
else
{
WaitInfo* pWI = _waitQueue.front();
_waitQueue.pop_front();
pWI->pNf = pNotification;
pWI->nfAvailable.set();
}
}
void NotificationQueue::enqueueUrgentNotification(Notification::Ptr pNotification)
{
poco_check_ptr (pNotification);
FastMutex::ScopedLock lock(_mutex);
if (_waitQueue.empty())
{
_nfQueue.push_front(pNotification);
}
else
{
WaitInfo* pWI = _waitQueue.front();
_waitQueue.pop_front();
pWI->pNf = pNotification;
pWI->nfAvailable.set();
}
}
Notification* NotificationQueue::dequeueNotification()
{
FastMutex::ScopedLock lock(_mutex);
return dequeueOne().duplicate();
}
Notification* NotificationQueue::waitDequeueNotification()
{
Notification::Ptr pNf;
WaitInfo* pWI = 0;
{
FastMutex::ScopedLock lock(_mutex);
pNf = dequeueOne();
if (pNf) return pNf.duplicate();
pWI = new WaitInfo;
_waitQueue.push_back(pWI);
}
pWI->nfAvailable.wait();
pNf = pWI->pNf;
delete pWI;
return pNf.duplicate();
}
Notification* NotificationQueue::waitDequeueNotification(long milliseconds)
{
Notification::Ptr pNf;
WaitInfo* pWI = 0;
{
FastMutex::ScopedLock lock(_mutex);
pNf = dequeueOne();
if (pNf) return pNf.duplicate();
pWI = new WaitInfo;
<|fim▁hole|> if (pWI->nfAvailable.tryWait(milliseconds))
{
pNf = pWI->pNf;
}
else
{
FastMutex::ScopedLock lock(_mutex);
pNf = pWI->pNf;
for (WaitQueue::iterator it = _waitQueue.begin(); it != _waitQueue.end(); ++it)
{
if (*it == pWI)
{
_waitQueue.erase(it);
break;
}
}
}
delete pWI;
return pNf.duplicate();
}
void NotificationQueue::dispatch(NotificationCenter& notificationCenter)
{
FastMutex::ScopedLock lock(_mutex);
Notification::Ptr pNf = dequeueOne();
while (pNf)
{
notificationCenter.postNotification(pNf);
pNf = dequeueOne();
}
}
void NotificationQueue::wakeUpAll()
{
FastMutex::ScopedLock lock(_mutex);
for (WaitQueue::iterator it = _waitQueue.begin(); it != _waitQueue.end(); ++it)
{
(*it)->nfAvailable.set();
}
_waitQueue.clear();
}
bool NotificationQueue::empty() const
{
FastMutex::ScopedLock lock(_mutex);
return _nfQueue.empty();
}
int NotificationQueue::size() const
{
FastMutex::ScopedLock lock(_mutex);
return static_cast<int>(_nfQueue.size());
}
void NotificationQueue::clear()
{
FastMutex::ScopedLock lock(_mutex);
_nfQueue.clear();
}
bool NotificationQueue::hasIdleThreads() const
{
FastMutex::ScopedLock lock(_mutex);
return !_waitQueue.empty();
}
Notification::Ptr NotificationQueue::dequeueOne()
{
Notification::Ptr pNf;
if (!_nfQueue.empty())
{
pNf = _nfQueue.front();
_nfQueue.pop_front();
}
return pNf;
}
namespace
{
static SingletonHolder<NotificationQueue> sh_nfq;
}
NotificationQueue& NotificationQueue::defaultQueue()
{
return *sh_nfq.get();
}
} // namespace Poco<|fim▁end|>
|
_waitQueue.push_back(pWI);
}
|
<|file_name|>geo_referencing.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python2.7
# encoding: utf-8
from __future__ import division
import numpy as np
# from pyproj import Proj, pj_list, pj_ellps
import cv2
def geo_ref_tracks(tracks, frame, uav, debug=False):
"""
Geo-references tracks'points
:param tracks: list of drifters' trajectories
:param frame: CV2 frame
:param uav: UAV class object
:return: geo-referenced tracks in degrees and tracks relative to center point in meters
"""
# Meter per pixel ratio
# TODO: Lens correction could be needed here
diagLength = 2.0 * np.tan(np.deg2rad(uav.FOV/2.0)) * uav.altitude
nx = float(frame.shape[1])
ny = float(frame.shape[0])
phi = np.arctan(ny / nx)
horiMpP = diagLength * np.cos(phi) / nx # horizontal meters per pixel ratio
vertiMpP = diagLength * np.sin(phi) / ny # vertical meters per pixel ratio.
if uav.yaw < 0.0: # UAV convention
alibi = True
else:
alibi = False
yaw = np.abs(np.deg2rad(uav.yaw))
# Need this before of tuples
tracksInDeg = []
tracksInRelativeM = []
for tr in tracks:
tracksInDeg.append([])
tracksInRelativeM.append([])
# Relative distance
for tr, TR in zip(tracks, tracksInRelativeM):
for pt in tr:
pt = list(pt)
x = (pt[0] - (nx/2.0)) * horiMpP
y = ((ny - pt[1]) - (ny/2.0)) * vertiMpP # Origin frame is top left corner
if alibi:
# Correction with Active (aka Alibi) transformation
xr = x * np.cos(yaw) - y * np.sin(yaw)
yr = x * np.sin(yaw) + y * np.cos(yaw)
else:
# Correction with Passive (aka Alias) transformation
xr = x*np.cos(yaw) + y*np.sin(yaw)
yr = y*np.cos(yaw) - x*np.sin(yaw)
TR.append([xr, yr])
# Conversion deg. to m. / Version 2.0
y2lat = 1.0 / (110.54 * 1000.0)
x2lon = 1.0 / (111.320 * 1000.0 * np.cos(np.deg2rad(uav.centreCoordinates[1])))
lonC, latC = uav.centreCoordinates[0], uav.centreCoordinates[1]
for tr, trM in zip(tracksInDeg, tracksInRelativeM):
for ptM in trM:
lon, lat = lonC + (ptM[0] * x2lon), latC + (ptM[1] * y2lat)
tr.append([lon, lat])
# Conversion deg. to m. / version 1.0
# proj = raw_input("Use default projection UTM/WGS84 (yes/no)?: ").upper()
# if proj in "YES":
# myproj = Proj(proj='utm', ellps='WGS84') # LatLon with WGS84 datum used by GPS units
# else:<|fim▁hole|> # print "Choose a coordinate ellipse from the following list:"
# for key in pj_list:
# print key + ": " + pj_list[key]
# ellps = raw_input("Type in the coordinate ellipse: ")
# myproj = Proj(proj=proj, ellps=ellps)
# xc, yc = myproj(uav.centreCoordinates[0], uav.centreCoordinates[1])
# # Absolute distance and conversion m. to deg.
# for tr, trM in zip(tracksInDeg, tracksInRelativeM):
# for ptM in trM:
# x, y = xc + ptM[0], yc + ptM[1]
# lon, lat = myproj(x, y, inverse=True)
# tr.append([lon, lat])
# # Recompute relative distance in new referential
# tracksInRelativeM = []
# for tr in tracks:
# tracksInRelativeM.append([])
# lat2m = 110.54 * 1000.0
# lon2m = 111.320 * 1000.0 * np.cos(np.deg2rad(uav.centreCoordinates[1]))
# for tr, trM in zip(tracksInDeg, tracksInRelativeM):
# for pt in tr:
# x = lon2m * (pt[0] - uav.centreCoordinates[0])
# y = lat2m * (pt[1] - uav.centreCoordinates[1])
# trM.append([x, y])
return tracksInDeg, tracksInRelativeM
# TODO: def geo_ref_contours
def geo_ref_contours(surfTurbArea, uav, debug=False):
"""
Geo-references surface turbulence areas
:param surfTurbArea: frame of surface turbulence areas
:param uav: UAV object
:return: geo-referenced contours
"""
# Find contours from white areas
imgray = cv2.cvtColor(surfTurbArea,cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray,127,255,0)
im2, contours, hierarchy = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
if debug:
im = cv2.drawContours(surfTurbArea, contours, -1, (0,255,0), 3)
cv2.namedWindow('Areas & contours', cv2.WINDOW_NORMAL)
cv2.resizeWindow('Areas & contours', 1200, 1200)
cv2.imshow('Areas & contours', im)
# Reformating
contoursList = []
for cnt in contours:
coordsList = []
for coords in cnt:
coordsList.append(tuple(coords[0]))
contoursList.append(coordsList)
# Georeference contours
contoursInDeg, contoursInM = geo_ref_tracks(contoursList, surfTurbArea, uav, debug=debug)
return contoursInDeg<|fim▁end|>
|
# print "Choose a coordinate projection from the following list:"
# for key in pj_list:
# print key + ": " + pj_list[key]
# proj = raw_input("Type in the coordinate projection: ")
|
<|file_name|>bitcoin_da.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="da" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Saffroncoin</source>
<translation>Om Saffroncoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>Saffroncoin</b> version</source>
<translation><b>Saffroncoin</b> version</translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Dette program er ekperimentielt.
Det er gjort tilgængeligt under MIT/X11-softwarelicensen. Se den tilhørende fil "COPYING" eller http://www.opensource.org/licenses/mit-license.php.
Produktet indeholder software som er udviklet af OpenSSL Project til brug i OpenSSL Toolkit (http://www.openssl.org/), kryptografisk software skrevet af Eric Young ([email protected]) og UPnP-software skrevet af Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Copyright</translation>
</message>
<message>
<location line="+0"/>
<source>The Saffroncoin developers</source>
<translation>Saffroncoin-udviklerne</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Adressebog</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Dobbeltklik for at redigere adresse eller mærkat</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Opret en ny adresse</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopier den valgte adresse til systemets udklipsholder</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>Ny adresse</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Saffroncoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Dette er dine Saffroncoin-adresser til at modtage betalinger med. Du kan give en forskellig adresse til hver afsender, så du kan holde styr på, hvem der betaler dig.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>Kopier adresse</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Vis QR-kode</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Saffroncoin address</source>
<translation>Underskriv en besked for at bevise, at en Saffroncoin-adresse tilhører dig</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Underskriv besked</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Slet den markerede adresse fra listen</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Eksportér den aktuelle visning til en fil</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>Eksporter</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Saffroncoin address</source>
<translation>Efterprøv en besked for at sikre, at den er underskrevet med den angivne Saffroncoin-adresse</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>Efterprøv besked</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>Slet</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Saffroncoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Disse er dine Saffroncoin-adresser for at sende betalinger. Tjek altid beløb og modtageradresse, inden du sender saffroncoins.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Kopier mærkat</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>Rediger</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Send saffroncoins</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Eksporter adressebogsdata</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommasepareret fil (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Fejl under eksport</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Kunne ikke skrive til filen %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Mærkat</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(ingen mærkat)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Adgangskodedialog</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Indtast adgangskode</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Ny adgangskode</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Gentag ny adgangskode</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Indtast den nye adgangskode til tegnebogen.<br/>Brug venligst en adgangskode på <b>10 eller flere tilfældige tegn</b> eller <b>otte eller flere ord</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Krypter tegnebog</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Denne funktion har brug for din tegnebogs adgangskode for at låse tegnebogen op.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Lås tegnebog op</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Denne funktion har brug for din tegnebogs adgangskode for at dekryptere tegnebogen.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dekrypter tegnebog</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Skift adgangskode</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Indtast den gamle og den nye adgangskode til tegnebogen.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Bekræft tegnebogskryptering</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR SAFFRONCOINS</b>!</source>
<translation>Advarsel: Hvis du krypterer din tegnebog og mister din adgangskode, vil du <b>MISTE ALLE DINE SAFFRONCOINS</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Er du sikker på, at du ønsker at kryptere din tegnebog?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>VIGTIGT: Enhver tidligere sikkerhedskopi, som du har lavet af tegnebogsfilen, bør blive erstattet af den nyligt genererede, krypterede tegnebogsfil. Af sikkerhedsmæssige årsager vil tidligere sikkerhedskopier af den ikke-krypterede tegnebogsfil blive ubrugelig i det øjeblik, du starter med at anvende den nye, krypterede tegnebog.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Advarsel: Caps Lock-tasten er aktiveret!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Tegnebog krypteret</translation>
</message>
<message>
<location line="-56"/>
<source>Saffroncoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your saffroncoins from being stolen by malware infecting your computer.</source>
<translation>Saffroncoin vil nu lukke for at gennemføre krypteringsprocessen. Husk på, at kryptering af din tegnebog vil ikke beskytte dine saffroncoins fuldt ud mod at blive stjålet af malware på din computer.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Tegnebogskryptering mislykkedes</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Tegnebogskryptering mislykkedes på grund af en intern fejl. Din tegnebog blev ikke krypteret.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>De angivne adgangskoder stemmer ikke overens.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Tegnebogsoplåsning mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Den angivne adgangskode for tegnebogsdekrypteringen er forkert.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Tegnebogsdekryptering mislykkedes</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Tegnebogens adgangskode blev ændret.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Underskriv besked...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Synkroniserer med netværk...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>Oversigt</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Vis generel oversigt over tegnebog</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>Transaktioner</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Gennemse transaktionshistorik</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Rediger listen over gemte adresser og mærkater</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Vis listen over adresser for at modtage betalinger</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>Luk</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Afslut program</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Saffroncoin</source>
<translation>Vis informationer om Saffroncoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Om Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Vis informationer om Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>Indstillinger...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>Krypter tegnebog...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>Sikkerhedskopier tegnebog...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>Skift adgangskode...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importerer blokke fra disken...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Genindekserer blokke på disken...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Saffroncoin address</source>
<translation>Send saffroncoins til en Saffroncoin-adresse</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Saffroncoin</source>
<translation>Rediger konfigurationsindstillinger af Saffroncoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Lav sikkerhedskopi af tegnebogen til et andet sted</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Skift adgangskode anvendt til tegnebogskryptering</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>Fejlsøgningsvindue</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Åbn fejlsøgnings- og diagnosticeringskonsollen</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>Efterprøv besked...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Saffroncoin</source>
<translation>Saffroncoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Tegnebog</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>Send</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>Modtag</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>Adresser</translation>
</message>
<message>
<location line="+22"/>
<source>&About Saffroncoin</source>
<translation>Om Saffroncoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>Vis/skjul</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Vis eller skjul hovedvinduet</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Krypter de private nøgler, der hører til din tegnebog</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Saffroncoin addresses to prove you own them</source>
<translation>Underskriv beskeder med dine Saffroncoin-adresser for at bevise, at de tilhører dig</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Saffroncoin addresses</source>
<translation>Efterprøv beskeder for at sikre, at de er underskrevet med de(n) angivne Saffroncoin-adresse(r)</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>Fil</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>Indstillinger</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>Hjælp</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Faneværktøjslinje</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnetværk]</translation>
</message>
<message>
<location line="+47"/>
<source>Saffroncoin client</source>
<translation>Saffroncoin-klient</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Saffroncoin network</source>
<translation><numerusform>%n aktiv(e) forbindelse(r) til Saffroncoin-netværket</numerusform><numerusform>%n aktiv(e) forbindelse(r) til Saffroncoin-netværket</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation>Ingen blokkilde tilgængelig...</translation>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>%1 ud af %2 (estimeret) blokke af transaktionshistorikken er blevet behandlet.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>%1 blokke af transaktionshistorikken er blevet behandlet.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n time(r)</numerusform><numerusform>%n time(r)</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n dag(e)</numerusform><numerusform>%n dag(e)</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n uge(r)</numerusform><numerusform>%n uge(r)</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>%1 bagefter</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>Senest modtagne blok blev genereret for %1 siden.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>Transaktioner herefter vil endnu ikke være synlige.</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Fejl</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Advarsel</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Transaktionen overskrider størrelsesgrænsen. Du kan stadig sende den for et gebyr på %1, hvilket går til de knuder, der behandler din transaktion og hjælper med at understøtte netværket. Vil du betale gebyret?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Opdateret</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>Indhenter...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Bekræft transaktionsgebyr</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Afsendt transaktion</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Indgående transaktion</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Dato: %1
Beløb: %2
Type: %3
Adresse: %4
</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>URI-håndtering</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Saffroncoin address or malformed URI parameters.</source>
<translation>URI kan ikke fortolkes! Dette kan skyldes en ugyldig Saffroncoin-adresse eller misdannede URI-parametre.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Tegnebog er <b>krypteret</b> og i øjeblikket <b>ulåst</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Tegnebog er <b>krypteret</b> og i øjeblikket <b>låst</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Saffroncoin can no longer continue safely and will quit.</source>
<translation>Der opstod en fatal fejl. Saffroncoin kan ikke længere fortsætte sikkert og vil afslutte.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Netværksadvarsel</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Rediger adresse</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>Mærkat</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>Mærkaten forbundet med denne post i adressebogen</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>Adressen tilknyttet til denne post i adressebogen. Dette kan kun ændres for afsendelsesadresser.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Ny modtagelsesadresse</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Ny afsendelsesadresse</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Rediger modtagelsesadresse</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Rediger afsendelsesadresse</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Den indtastede adresse "%1" er allerede i adressebogen.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Saffroncoin address.</source>
<translation>Den indtastede adresse "%1" er ikke en gyldig Saffroncoin-adresse.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Kunne ikke låse tegnebog op.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Ny nøglegenerering mislykkedes.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Saffroncoin-Qt</source>
<translation>Saffroncoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>version</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Anvendelse:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>kommandolinjetilvalg</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>Brugergrænsefladeindstillinger</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Angiv sprog, f.eks "de_DE" (standard: systemlokalitet)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Start minimeret</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Vis opstartsbillede ved start (standard: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Indstillinger</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>Generelt</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation>Valgfrit transaktionsgebyr pr. kB, der hjælper dine transaktioner med at blive behandlet hurtigt. De fleste transaktioner er på 1 kB.</translation>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Betal transaktionsgebyr</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Saffroncoin after logging in to the system.</source>
<translation>Start Saffroncoin automatisk, når der logges ind på systemet</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Saffroncoin on system login</source>
<translation>Start Saffroncoin, når systemet startes</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Nulstil alle klientindstillinger til deres standard.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>Nulstil indstillinger</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>Netværk</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Saffroncoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Åbn Saffroncoin-klientens port på routeren automatisk. Dette virker kun, når din router understøtter UPnP og UPnP er aktiveret.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Konfigurer port vha. UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Saffroncoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Opret forbindelse til Saffroncoin-netværket via en SOCKS-proxy (f.eks. ved tilslutning gennem Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>Forbind gennem SOCKS-proxy:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxy-IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>IP-adressen på proxyen (f.eks. 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>Port:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porten på proxyen (f.eks. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS-version</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>SOCKS-version af proxyen (f.eks. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>Vindue</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Vis kun et statusikon efter minimering af vinduet.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>Minimer til statusfeltet i stedet for proceslinjen</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimer i stedet for at afslutte programmet, når vinduet lukkes. Når denne indstilling er valgt, vil programmet kun blive lukket, når du har valgt Afslut i menuen.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>Minimer ved lukning</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>Visning</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>Brugergrænsefladesprog:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Saffroncoin.</source>
<translation>Brugergrænsefladesproget kan angives her. Denne indstilling træder først i kraft, når Saffroncoin genstartes.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>Enhed at vise beløb i:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Vælg den standard underopdelingsenhed, som skal vises i brugergrænsefladen og ved afsendelse af saffroncoins.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Saffroncoin addresses in the transaction list or not.</source>
<translation>Afgør hvorvidt Saffroncoin-adresser skal vises i transaktionslisten eller ej.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>Vis adresser i transaktionsliste</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>Annuller</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>Anvend</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>standard</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Bekræft nulstilling af indstillinger</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Nogle indstillinger kan kræve, at klienten genstartes, før de træder i kraft.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Ønsker du at fortsætte?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Advarsel</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Saffroncoin.</source>
<translation>Denne indstilling træder i kraft, efter Saffroncoin genstartes.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>Ugyldig proxy-adresse</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formular</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Saffroncoin network after a connection is established, but this process has not completed yet.</source>
<translation>Den viste information kan være forældet. Din tegnebog synkroniserer automatisk med Saffroncoin-netværket, når en forbindelse etableres, men denne proces er ikke gennemført endnu.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Ubekræftede:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Tegnebog</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Umodne:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Udvunden saldo, som endnu ikke er modnet</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Nyeste transaktioner</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Din nuværende saldo</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Summen af transaktioner, der endnu ikke er bekræftet og endnu ikke er inkluderet i den nuværende saldo</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>ikke synkroniseret</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start saffroncoin: click-to-pay handler</source>
<translation>Kan ikke starte saffroncoin: click-to-pay-håndtering</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>QR-kode-dialog</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Anmod om betaling</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Beløb:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Mærkat:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Besked:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>Gem som...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Fejl ved kodning fra URI til QR-kode</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>Det indtastede beløb er ugyldig, tjek venligst.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resulterende URI var for lang; prøv at forkorte teksten til mærkaten/beskeden.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Gem QR-kode</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>PNG-billeder (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Klientnavn</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Klientversion</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>Information</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Anvendt OpenSSL-version</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Opstartstid</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Netværk</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Antal forbindelser</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Tilsluttet testnetværk</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Blokkæde</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Nuværende antal blokke</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Estimeret antal blokke</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Tidsstempel for seneste blok</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>Åbn</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>Kommandolinjetilvalg</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Saffroncoin-Qt help message to get a list with possible Saffroncoin command-line options.</source>
<translation>Vis Saffroncoin-Qt-hjælpebeskeden for at få en liste over de tilgængelige Saffroncoin-kommandolinjeindstillinger.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>Vis</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>Konsol</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Byggedato</translation>
</message>
<message>
<location line="-104"/>
<source>Saffroncoin - Debug window</source>
<translation>Saffroncoin - Fejlsøgningsvindue</translation>
</message>
<message>
<location line="+25"/>
<source>Saffroncoin Core</source>
<translation>Saffroncoin Core</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>Fejlsøgningslogfil</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Saffroncoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Åbn Saffroncoin-fejlsøgningslogfilen fra det nuværende datakatalog. Dette kan tage nogle få sekunder for en store logfiler.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Ryd konsol</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Saffroncoin RPC console.</source>
<translation>Velkommen til Saffroncoin RPC-konsollen</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Brug op og ned-piletasterne til at navigere historikken og <b>Ctrl-L</b> til at rydde skærmen.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Tast <b>help</b> for en oversigt over de tilgængelige kommandoer.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Send saffroncoins</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Send til flere modtagere på en gang</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>Tilføj modtager</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Fjern alle transaktionsfelter</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Ryd alle</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123,456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Bekræft afsendelsen</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>Afsend</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> til %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Bekræft afsendelse af saffroncoins</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Er du sikker på, at du vil sende %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> og </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Modtagerens adresse er ikke gyldig. Tjek venligst adressen igen.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Beløbet til betaling skal være større end 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Beløbet overstiger din saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Totalen overstiger din saldo, når %1 transaktionsgebyr er inkluderet.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Duplikeret adresse fundet. Du kan kun sende til hver adresse en gang pr. afsendelse.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Fejl: Oprettelse af transaktionen mislykkedes!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Fejl: Transaktionen blev afvist. Dette kan ske, hvis nogle af dine saffroncoins i din tegnebog allerede er brugt, som hvis du brugte en kopi af wallet.dat og dine saffroncoins er blevet brugt i kopien, men ikke er markeret som brugt her.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Formular</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Beløb:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Betal til:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Saffroncoin-adressen som betalingen skal sendes til (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Indtast en mærkat for denne adresse for at føje den til din adressebog</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>Mærkat:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Vælg adresse fra adressebog</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Indsæt adresse fra udklipsholderen</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Fjern denne modtager</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Saffroncoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Indtast en Saffroncoin-adresse (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Underskrifter - Underskriv/efterprøv en besked</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>Underskriv besked</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Du kan underskrive beskeder med dine Saffroncoin-adresser for at bevise, at de tilhører dig. Pas på ikke at underskrive noget vagt, da phisingangreb kan narre dig til at overdrage din identitet. Underskriv kun fuldt detaljerede udsagn, du er enig i.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Saffroncoin-adressen som beskeden skal underskrives med (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Vælg adresse fra adressebog</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Indsæt adresse fra udklipsholderen</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Indtast beskeden, du ønsker at underskrive</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Underskrift</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Kopier den nuværende underskrift til systemets udklipsholder</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Saffroncoin address</source>
<translation>Underskriv denne besked for at bevise, at Saffroncoin-adressen tilhører dig</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Underskriv besked</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Nulstil alle underskriv besked-indtastningsfelter</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Ryd alle</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>Efterprøv besked</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Indtast den underskrevne adresse, beskeden (inkluder linjeskift, mellemrum mv. nøjagtigt, som de fremgår) og underskriften for at efterprøve beskeden. Vær forsigtig med ikke at lægge mere i underskriften end besked selv, så du undgår at blive narret af et man-in-the-middle-angreb.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Saffroncoin-adressen som beskeden er underskrevet med (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Saffroncoin address</source>
<translation>Efterprøv beskeden for at sikre, at den er underskrevet med den angivne Saffroncoin-adresse</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>Efterprøv besked</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Nulstil alle efterprøv besked-indtastningsfelter</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Saffroncoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>Indtast en Saffroncoin-adresse (f.eks. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Klik "Underskriv besked" for at generere underskriften</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Saffroncoin signature</source>
<translation>Indtast Saffroncoin-underskriften</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>Den indtastede adresse er ugyldig.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Tjek venligst adressen, og forsøg igen.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>Den indtastede adresse henviser ikke til en nøgle.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Tegnebogsoplåsning annulleret.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>Den private nøgle for den indtastede adresse er ikke tilgængelig.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Underskrivning af besked mislykkedes.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Besked underskrevet.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Underskriften kunne ikke afkodes.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Tjek venligst underskriften, og forsøg igen.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>Underskriften matcher ikke beskedens indhold.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Efterprøvelse af besked mislykkedes.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Besked efterprøvet.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+25"/>
<source>The Saffroncoin developers</source>
<translation>Saffroncoin-udviklerne</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Åben indtil %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/ubekræftet</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 bekræftelser</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, transmitteret igennem %n knude(r)</numerusform><numerusform>, transmitteret igennem %n knude(r)</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Kilde</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Genereret</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Fra</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Til</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>egen adresse</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>mærkat</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Kredit</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>modner efter yderligere %n blok(ke)</numerusform><numerusform>modner efter yderligere %n blok(ke)</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>ikke accepteret</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debet</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Transaktionsgebyr</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Nettobeløb</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Besked</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Kommentar</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>Transaktionens ID</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Genererede saffroncoins skal vente 120 blokke, før de kan blive brugt. Da du genererede denne blok, blev den transmitteret til netværket for at blive føjet til blokkæden. Hvis det mislykkes at komme ind i kæden, vil den skifte til "ikke godkendt" og ikke blive kunne bruges. Dette kan lejlighedsvis ske, hvis en anden knude genererer en blok inden for få sekunder af din.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Fejlsøgningsinformation</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transaktion</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Input</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Beløb</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>sand</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falsk</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, er ikke blevet transmitteret endnu</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Åben %n blok yderligere</numerusform><numerusform>Åben %n blokke yderligere</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>ukendt</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Transaktionsdetaljer</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Denne rude viser en detaljeret beskrivelse af transaktionen</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Beløb</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Åben %n blok(ke) yderligere</numerusform><numerusform>Åben %n blok(ke) yderligere</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Åben indtil %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 bekræftelser)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Ubekræftet (%1 af %2 bekræftelser)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Bekræftet (%1 bekræftelser)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Udvunden saldo, som vil være tilgængelig, når den modner efter yderligere %n blok(ke)</numerusform><numerusform>Udvunden saldo, som vil være tilgængelig, når den modner efter yderligere %n blok(ke)</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Denne blok blev ikke modtaget af nogen andre knuder og vil formentlig ikke blive accepteret!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Genereret, men ikke accepteret</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Modtaget med</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Modtaget fra</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Sendt til</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Betaling til dig selv</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Udvundne</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaktionsstatus. Hold musen over dette felt for at vise antallet af bekræftelser.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Dato og klokkeslæt for modtagelse af transaktionen.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Transaktionstype.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Destinationsadresse for transaktion.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Beløb fjernet eller tilføjet balance.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Alle</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>I dag</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Denne uge</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Denne måned</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Sidste måned</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Dette år</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Interval...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Modtaget med</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Sendt til</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Til dig selv</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Udvundne</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Andet</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Indtast adresse eller mærkat for at søge</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Minimumsbeløb</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopier adresse</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopier mærkat</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopier beløb</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>Kopier transaktionens ID</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Rediger mærkat</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Vis transaktionsdetaljer</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Eksporter transaktionsdata</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Kommasepareret fil (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Bekræftet</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Dato</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Type</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Mærkat</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Beløb</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Fejl under eksport</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Kunne ikke skrive til filen %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Interval:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>til</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Send saffroncoins</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation>Eksporter</translation>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Eksportér den aktuelle visning til en fil</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>Sikkerhedskopier tegnebog</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>Tegnebogsdata (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Foretagelse af sikkerhedskopi fejlede</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>Der opstod en fejl i forbindelse med at gemme tegnebogsdata til det nye sted</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Sikkerhedskopieret problemfri</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Tegnebogsdata blev problemfrit gemt til det nye sted.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Saffroncoin version</source>
<translation>Saffroncoin-version</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Anvendelse:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or saffroncoind</source>
<translation>Send kommando til -server eller saffroncoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Liste over kommandoer</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Få hjælp til en kommando</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Indstillinger:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: saffroncoin.conf)</source>
<translation>Angiv konfigurationsfil (standard: saffroncoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: saffroncoind.pid)</source>
<translation>Angiv pid-fil (default: saffroncoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Angiv datakatalog</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Angiv databasecachestørrelse i megabytes (standard: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 8333 or testnet: 18333)</source>
<translation>Lyt til forbindelser på <port> (standard: 8333 eller testnetværk: 18333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Oprethold højest <n> forbindelser til andre i netværket (standard: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Forbind til en knude for at modtage adresse, og afbryd</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Angiv din egen offentlige adresse</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Grænse for afbrydelse til dårlige forbindelser (standard: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Antal sekunder dårlige forbindelser skal vente før reetablering (standard: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Der opstod en fejl ved angivelse af RPC-porten %u til at lytte på IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 8332 or testnet: 18332)</source>
<translation>Lyt til JSON-RPC-forbindelser på <port> (standard: 8332 eller testnetværk: 18332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Accepter kommandolinje- og JSON-RPC-kommandoer</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Kør i baggrunden som en service, og accepter kommandoer</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Brug testnetværket</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Accepter forbindelser udefra (standard: 1 hvis hverken -proxy eller -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=saffroncoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Saffroncoin Alert" [email protected]
</source>
<translation>%s, du skal angive en RPC-adgangskode i konfigurationsfilen:
%s
Det anbefales, at du bruger nedenstående, tilfældige adgangskode:
rpcuser=saffroncoinrpc
rpcpassword=%s
(du behøver ikke huske denne adgangskode)
Brugernavnet og adgangskode MÅ IKKE være det samme.
Hvis filen ikke eksisterer, opret den og giv ingen andre end ejeren læserettighed.
Det anbefales også at angive alertnotify, så du påmindes om problemer;
f.eks.: alertnotify=echo %%s | mail -s "Saffroncoin Alert" [email protected]
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Der opstod en fejl ved angivelse af RPC-porten %u til at lytte på IPv6, falder tilbage til IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Tildel til den givne adresse og lyt altid på den. Brug [vært]:port-notation for IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Saffroncoin is probably already running.</source>
<translation>Kan ikke opnå lås på datakatalog %s. Saffroncoin er sandsynligvis allerede startet.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Fejl: Transaktionen blev afvist. Dette kan ske, hvis nogle af dine saffroncoins i din tegnebog allerede er brugt, som hvis du brugte en kopi af wallet.dat og dine saffroncoins er blevet brugt i kopien, men ikke er markeret som brugt her.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Fejl: Denne transaktion kræver et transaktionsgebyr på minimum %s pga. dens størrelse, kompleksitet eller anvendelse af nyligt modtagne saffroncoins!</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>Udfør kommando, når en relevant advarsel modtages (%s i kommandoen erstattes med beskeden)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Udfør kommando, når en transaktion i tegnebogen ændres (%s i kommandoen erstattes med TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Angiv maksimumstørrelse for høj prioritet/lavt gebyr-transaktioner i bytes (standard: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Dette er en foreløbig testudgivelse - brug på eget ansvar - brug ikke til udvinding eller handelsprogrammer</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Advarsel: -paytxfee er sat meget højt! Dette er det gebyr du vil betale, hvis du sender en transaktion.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Advarsel: Viste transaktioner kan være ukorrekte! Du eller andre knuder kan have behov for at opgradere.</translation>
</message>
<message><|fim▁hole|> <translation>Advarsel: Undersøg venligst, at din computers dato og klokkeslæt er korrekt indstillet! Hvis der er fejl i disse, vil Saffroncoin ikke fungere korrekt.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Advarsel: fejl under læsning af wallet.dat! Alle nøgler blev læst korrekt, men transaktionsdata eller adressebogsposter kan mangle eller være forkerte.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Advarsel: wallet.dat ødelagt, data reddet! Oprindelig wallet.net gemt som wallet.{timestamp}.bak i %s; hvis din saldo eller dine transaktioner er forkert, bør du genskabe fra en sikkerhedskopi.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Forsøg at genskabe private nøgler fra ødelagt wallet.dat</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Blokoprettelsestilvalg:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Tilslut kun til de(n) angivne knude(r)</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Ødelagt blokdatabase opdaget</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Find egen IP-adresse (standard: 1 når lytter og ingen -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Ønsker du at genbygge blokdatabasen nu?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>Klargøring af blokdatabase mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>Klargøring af tegnebogsdatabasemiljøet %s mislykkedes!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Indlæsning af blokdatabase mislykkedes</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Åbning af blokdatabase mislykkedes</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Fejl: Mangel på ledig diskplads!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Fejl: Tegnebog låst, kan ikke oprette transaktion!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Fejl: systemfejl: </translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Lytning på enhver port mislykkedes. Brug -listen=0, hvis du ønsker dette.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Læsning af blokinformation mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Læsning af blok mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>Synkronisering af blokindeks mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>Skrivning af blokindeks mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Skrivning af blokinformation mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Skrivning af blok mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Skriving af filinformation mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Skrivning af saffroncoin-database mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>Skrivning af transaktionsindeks mislykkedes</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>Skrivning af genskabelsesdata mislykkedes</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Find ligeværdige ved DNS-opslag (standard: 1 hvis ikke -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation>Generer saffroncoins (standard: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Antal blokke som tjekkes ved opstart (0=alle, standard: 288)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>Grundighed af efterprøvning af blokke (0-4, standard: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation>For få tilgængelige fildeskriptorer.</translation>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Genbyg blokkædeindeks fra nuværende blk000??.dat filer</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>Angiv antallet af tråde til at håndtere RPC-kald (standard: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Efterprøver blokke...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Efterprøver tegnebog...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importerer blokke fra ekstern blk000??.dat fil</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation>Angiv nummeret af skriptefterprøvningstråde (op til 16, 0 = automatisk, <0 = efterlad det antal kerner tilgængelige, standard: 0)</translation>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Information</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Ugyldig -tor adresse: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Ugyldigt beløb til -minrelaytxfee=<beløb>:'%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Ugyldigt beløb til -mintxfee=<beløb>:'%s'</translation>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>Vedligehold et komplet transaktionsindeks (standard: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Maksimum for modtagelsesbuffer pr. forbindelse, <n>*1000 bytes (standard: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Maksimum for afsendelsesbuffer pr. forbindelse, <n>*1000 bytes (standard: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>Accepter kun blokkæde, som matcher indbyggede kontrolposter (standard: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Tilslut kun til knuder i netværk <net> (IPv4, IPv6 eller Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Skriv ekstra fejlsøgningsinformation. Indebærer alle andre -debug* tilvalg</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Skriv ekstra netværksfejlsøgningsinformation</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Tilføj fejlsøgningsoutput med tidsstempel</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>SSL-indstillinger: (se Bitcoin Wiki for SSL-opsætningsinstruktioner)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Angiv version af SOCKS-proxyen (4-5, standard: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Send sporings-/fejlsøgningsinformation til konsollen i stedet for debug.log filen</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Send sporings-/fejlsøgningsinformation til fejlsøgningprogrammet</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Angiv maksimumblokstørrelse i bytes (standard: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Angiv minimumsblokstørrelse i bytes (standard: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Formindsk debug.log filen ved klientopstart (standard: 1 hvis ikke -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation>Underskrift af transaktion mislykkedes</translation>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Angiv tilslutningstimeout i millisekunder (standard: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Systemfejl: </translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation>Transaktionsbeløb er for lavt</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation>Transaktionsbeløb skal være positive</translation>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation>Transaktionen er for stor</translation>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Forsøg at bruge UPnP til at konfigurere den lyttende port (standard: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Forsøg at bruge UPnP til at konfigurere den lyttende port (standard: 1 når lytter)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Brug proxy til at tilgå Tor Hidden Services (standard: som -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Brugernavn til JSON-RPC-forbindelser</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Advarsel</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Advarsel: Denne version er forældet, opgradering påkrævet!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>Du skal genbygge databaserne med -reindex for at ændre -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat ødelagt, redning af data mislykkedes</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Adgangskode til JSON-RPC-forbindelser</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Tillad JSON-RPC-forbindelser fra bestemt IP-adresse</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Send kommandoer til knude, der kører på <ip> (standard: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Udfør kommando, når den bedste blok ændres (%s i kommandoen erstattes med blokhash)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Opgrader tegnebog til seneste format</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Angiv nøglepoolstørrelse til <n> (standard: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Gennemsøg blokkæden for manglende tegnebogstransaktioner</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Brug OpenSSL (https) for JSON-RPC-forbindelser</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Servercertifikat-fil (standard: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Serverens private nøgle (standard: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Acceptable ciphers (standard: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Denne hjælpebesked</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Kunne ikke tildele %s på denne computer (bind returnerede fejl %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Tilslut via SOCKS-proxy</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Tillad DNS-opslag for -addnode, -seednode og -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Indlæser adresser...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Fejl ved indlæsning af wallet.dat: Tegnebog ødelagt</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Saffroncoin</source>
<translation>Fejl ved indlæsning af wallet.dat: Tegnebog kræver en nyere version af Saffroncoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Saffroncoin to complete</source>
<translation>Det var nødvendigt at genskrive tegnebogen: genstart Saffroncoin for at gennemføre</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Fejl ved indlæsning af wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Ugyldig -proxy adresse: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Ukendt netværk anført i -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Ukendt -socks proxy-version: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kan ikke finde -bind adressen: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kan ikke finde -externalip adressen: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ugyldigt beløb for -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Ugyldigt beløb</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Manglende dækning</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Indlæser blokindeks...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Tilføj en knude til at forbinde til og forsøg at holde forbindelsen åben</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Saffroncoin is probably already running.</source>
<translation>Kunne ikke tildele %s på denne computer. Saffroncoin kører sikkert allerede.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Gebyr pr. kB, som skal tilføjes til transaktioner, du sender</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Indlæser tegnebog...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Kan ikke nedgradere tegnebog</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Kan ikke skrive standardadresse</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Genindlæser...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Indlæsning gennemført</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>For at bruge %s mulighed</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Fejl</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Du skal angive rpcpassword=<password> i konfigurationsfilen:
%s
Hvis filen ikke eksisterer, opret den og giv ingen andre end ejeren læserettighed.</translation>
</message>
</context>
</TS><|fim▁end|>
|
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Saffroncoin will not work properly.</source>
|
<|file_name|>twig.functions.js<|end_file_name|><|fim▁begin|>// ## twig.functions.js
//
// This file handles parsing filters.
module.exports = function (Twig) {
/**
* @constant
* @type {string}
*/
var TEMPLATE_NOT_FOUND_MESSAGE = 'Template "{name}" is not defined.';
// Determine object type
function is(type, obj) {
var clas = Object.prototype.toString.call(obj).slice(8, -1);
return obj !== undefined && obj !== null && clas === type;
}
Twig.functions = {
// attribute, block, constant, date, dump, parent, random,.
// Range function from http://phpjs.org/functions/range:499
// Used under an MIT License
range: function (low, high, step) {
// http://kevin.vanzonneveld.net
// + original by: Waldo Malqui Silva
// * example 1: range ( 0, 12 );
// * returns 1: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
// * example 2: range( 0, 100, 10 );
// * returns 2: [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
// * example 3: range( 'a', 'i' );
// * returns 3: ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']
// * example 4: range( 'c', 'a' );
// * returns 4: ['c', 'b', 'a']
var matrix = [];
var inival, endval, plus;
var walker = step || 1;
var chars = false;
if (!isNaN(low) && !isNaN(high)) {
inival = parseInt(low, 10);
endval = parseInt(high, 10);
} else if (isNaN(low) && isNaN(high)) {
chars = true;
inival = low.charCodeAt(0);
endval = high.charCodeAt(0);
} else {
inival = (isNaN(low) ? 0 : low);
endval = (isNaN(high) ? 0 : high);
}
plus = ((inival > endval) ? false : true);
if (plus) {
while (inival <= endval) {
matrix.push(((chars) ? String.fromCharCode(inival) : inival));
inival += walker;
}
} else {
while (inival >= endval) {
matrix.push(((chars) ? String.fromCharCode(inival) : inival));
inival -= walker;
}
}
return matrix;
},
cycle: function(arr, i) {
var pos = i % arr.length;
return arr[pos];
},
dump: function() {
var EOL = '\n',
indentChar = ' ',
indentTimes = 0,
out = '',
args = Array.prototype.slice.call(arguments),
indent = function(times) {
var ind = '';
while (times > 0) {
times--;
ind += indentChar;
}
return ind;
},
displayVar = function(variable) {
out += indent(indentTimes);
if (typeof(variable) === 'object') {
dumpVar(variable);
} else if (typeof(variable) === 'function') {
out += 'function()' + EOL;
} else if (typeof(variable) === 'string') {
out += 'string(' + variable.length + ') "' + variable + '"' + EOL;
} else if (typeof(variable) === 'number') {
out += 'number(' + variable + ')' + EOL;
} else if (typeof(variable) === 'boolean') {
out += 'bool(' + variable + ')' + EOL;
}
},
dumpVar = function(variable) {
var i;
if (variable === null) {
out += 'NULL' + EOL;
} else if (variable === undefined) {
out += 'undefined' + EOL;
} else if (typeof variable === 'object') {
out += indent(indentTimes) + typeof(variable);
indentTimes++;
out += '(' + (function(obj) {
var size = 0, key;
for (key in obj) {
if (obj.hasOwnProperty(key)) {
size++;
}
}
return size;
})(variable) + ') {' + EOL;
for (i in variable) {
out += indent(indentTimes) + '[' + i + ']=> ' + EOL;
displayVar(variable[i]);
}
indentTimes--;
out += indent(indentTimes) + '}' + EOL;
} else {
displayVar(variable);
}
};
// handle no argument case by dumping the entire render context
if (args.length == 0) args.push(this.context);
Twig.forEach(args, function(variable) {
dumpVar(variable);
});
return out;
},
date: function(date, time) {
var dateObj;
if (date === undefined) {
dateObj = new Date();
} else if (Twig.lib.is("Date", date)) {
dateObj = date;
} else if (Twig.lib.is("String", date)) {
if (date.match(/^[0-9]+$/)) {
dateObj = new Date(date * 1000);
}
else {
dateObj = new Date(Twig.lib.strtotime(date) * 1000);
}
} else if (Twig.lib.is("Number", date)) {<|fim▁hole|> // timestamp
dateObj = new Date(date * 1000);
} else {
throw new Twig.Error("Unable to parse date " + date);
}
return dateObj;
},
block: function(block) {
if (this.originalBlockTokens[block]) {
return Twig.logic.parse.apply(this, [this.originalBlockTokens[block], this.context]).output;
} else {
return this.blocks[block];
}
},
parent: function() {
// Add a placeholder
return Twig.placeholders.parent;
},
attribute: function(object, method, params) {
if (Twig.lib.is('Object', object)) {
if (object.hasOwnProperty(method)) {
if (typeof object[method] === "function") {
return object[method].apply(undefined, params);
}
else {
return object[method];
}
}
}
// Array will return element 0-index
return object[method] || undefined;
},
max: function(values) {
if(Twig.lib.is("Object", values)) {
delete values["_keys"];
return Twig.lib.max(values);
}
return Twig.lib.max.apply(null, arguments);
},
min: function(values) {
if(Twig.lib.is("Object", values)) {
delete values["_keys"];
return Twig.lib.min(values);
}
return Twig.lib.min.apply(null, arguments);
},
template_from_string: function(template) {
if (template === undefined) {
template = '';
}
return Twig.Templates.parsers.twig({
options: this.options,
data: template
});
},
random: function(value) {
var LIMIT_INT31 = 0x80000000;
function getRandomNumber(n) {
var random = Math.floor(Math.random() * LIMIT_INT31);
var limits = [0, n];
var min = Math.min.apply(null, limits),
max = Math.max.apply(null, limits);
return min + Math.floor((max - min + 1) * random / LIMIT_INT31);
}
if(Twig.lib.is("Number", value)) {
return getRandomNumber(value);
}
if(Twig.lib.is("String", value)) {
return value.charAt(getRandomNumber(value.length-1));
}
if(Twig.lib.is("Array", value)) {
return value[getRandomNumber(value.length-1)];
}
if(Twig.lib.is("Object", value)) {
var keys = Object.keys(value);
return value[keys[getRandomNumber(keys.length-1)]];
}
return getRandomNumber(LIMIT_INT31-1);
},
/**
* Returns the content of a template without rendering it
* @param {string} name
* @param {boolean} [ignore_missing=false]
* @returns {string}
*/
source: function(name, ignore_missing) {
var templateSource;
var templateFound = false;
var isNodeEnvironment = typeof module !== 'undefined' && typeof module.exports !== 'undefined' && typeof window === 'undefined';
var loader;
var path;
//if we are running in a node.js environment, set the loader to 'fs' and ensure the
// path is relative to the CWD of the running script
//else, set the loader to 'ajax' and set the path to the value of name
if (isNodeEnvironment) {
loader = 'fs';
path = __dirname + '/' + name;
} else {
loader = 'ajax';
path = name;
}
//build the params object
var params = {
id: name,
path: path,
method: loader,
parser: 'source',
async: false,
fetchTemplateSource: true
};
//default ignore_missing to false
if (typeof ignore_missing === 'undefined') {
ignore_missing = false;
}
//try to load the remote template
//
//on exception, log it
try {
templateSource = Twig.Templates.loadRemote(name, params);
//if the template is undefined or null, set the template to an empty string and do NOT flip the
// boolean indicating we found the template
//
//else, all is good! flip the boolean indicating we found the template
if (typeof templateSource === 'undefined' || templateSource === null) {
templateSource = '';
} else {
templateFound = true;
}
} catch (e) {
Twig.log.debug('Twig.functions.source: ', 'Problem loading template ', e);
}
//if the template was NOT found AND we are not ignoring missing templates, return the same message
// that is returned by the PHP implementation of the twig source() function
//
//else, return the template source
if (!templateFound && !ignore_missing) {
return TEMPLATE_NOT_FOUND_MESSAGE.replace('{name}', name);
} else {
return templateSource;
}
}
};
Twig._function = function(_function, value, params) {
if (!Twig.functions[_function]) {
throw "Unable to find function " + _function;
}
return Twig.functions[_function](value, params);
};
Twig._function.extend = function(_function, definition) {
Twig.functions[_function] = definition;
};
return Twig;
};<|fim▁end|>
| |
<|file_name|>chunk.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::fmt;
use std::ops::{Deref, DerefMut};
use ::aux::ReadExt;
static NAME_LENGTH: u32 = 8;
pub struct Root<R> {
pub name: String,
input: R,
buffer: Vec<u8>,
position: u32,
}
impl<R: io::Seek> Root<R> {
pub fn tell(&mut self) -> u32 {
self.input.seek(io::SeekFrom::Current(0)).unwrap() as u32
}
}
impl<R: io::Read> Root<R> {
pub fn new(name: String, input: R) -> Root<R> {
Root {
name: name,
input: input,
buffer: Vec::new(),
position: 0,
}
}
pub fn get_pos(&self) -> u32 {
self.position
}
fn skip(&mut self, num: u32) {
self.read_bytes(num);
}
pub fn read_bytes(&mut self, num: u32) -> &[u8] {
self.position += num;
self.buffer.clear();
for _ in (0.. num) {
let b = self.input.read_u8().unwrap();
self.buffer.push(b);
}<|fim▁hole|>
pub fn read_u8(&mut self) -> u8 {
self.position += 1;
self.input.read_u8().unwrap()
}
pub fn read_u32(&mut self) -> u32 {
self.position += 4;
self.input.read_u32().unwrap()
}
pub fn read_bool(&mut self) -> bool {
self.position += 1;
self.input.read_u8().unwrap() != 0
}
pub fn read_str(&mut self) -> &str {
use std::str::from_utf8;
let size = self.input.read_u8().unwrap() as u32;
self.position += 1;
let buf = self.read_bytes(size);
from_utf8(buf).unwrap()
}
pub fn enter<'b>(&'b mut self) -> Chunk<'b, R> {
let name = {
let raw = self.read_bytes(NAME_LENGTH);
let buf = match raw.iter().position(|b| *b == 0) {
Some(p) => &raw[..p],
None => raw,
};
String::from_utf8_lossy(buf)
.into_owned()
};
debug!("Entering chunk {}", name);
let size = self.read_u32();
Chunk {
name: name,
size: size,
end_pos: self.position + size,
root: self,
}
}
}
pub struct Chunk<'a, R: io::Read + 'a> {
name: String,
size: u32,
end_pos: u32,
root: &'a mut Root<R>,
}
impl<'a, R: io::Read> fmt::Display for Chunk<'a, R> {
fn fmt(&self, fm: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fm, "Chunk({}, {} left)", self.name, self.size)
}
}
impl<'a, R: io::Read> Chunk<'a, R> {
pub fn get_name(&self) -> &str {
&self.name
}
pub fn has_more(&self)-> bool {
self.root.get_pos() < self.end_pos
}
pub fn ignore(self) {
let left = self.end_pos - self.root.get_pos();
self.root.skip(left)
}
}
impl<'a, R: io::Read> Drop for Chunk<'a, R> {
fn drop(&mut self) {
debug!("Leaving chunk");
assert!(!self.has_more())
}
}
impl<'a, R: io::Read> Deref for Chunk<'a, R> {
type Target = Root<R>;
fn deref(&self) -> &Root<R> {
self.root
}
}
impl<'a, R: io::Read> DerefMut for Chunk<'a, R> {
fn deref_mut(&mut self) -> &mut Root<R> {
self.root
}
}<|fim▁end|>
|
&self.buffer
}
|
<|file_name|>largereq.js<|end_file_name|><|fim▁begin|>module.exports = {
'name': 'largereq',
'category': 'Operators',
'syntax': [
'x >= y',
'largereq(x, y)'<|fim▁hole|> 'examples': [
'2 > 1+1',
'2 >= 1+1',
'a = 3.2',
'b = 6-2.8',
'(a > b)'
],
'seealso': [
'equal', 'unequal', 'smallereq', 'smaller', 'largereq', 'compare'
]
};<|fim▁end|>
|
],
'description':
'Check if value x is larger or equal to y. Returns true if x is larger or equal to y, and false if not.',
|
<|file_name|>task_2.py<|end_file_name|><|fim▁begin|>import sys
import math
# dataset_3363_3
name = input().strip() + ".txt"
lines = ''
with open(name, 'r') as r:
for line in r:<|fim▁hole|>
from collections import Counter
result = Counter(string).most_common(1)[0]
#print(result)
output = "output_2.txt"
with open(output, 'w') as out:
out.write("{} {}\n".format(result[0], result[1]))<|fim▁end|>
|
lines += line.strip() + " "
string = [i.lower() for i in lines.split()]
|
<|file_name|>darwin.js<|end_file_name|><|fim▁begin|>'use strict';
const {app} = require('electron');
const appName = app.getName();
module.exports = {
label: appName,
submenu: [{
label: 'About ' + appName,
role: 'about',
params: {
version: '1.0.0'
}
}, {
type: 'separator'
}, {
label: 'Preferences',
event: 'prefer',
params: 'optional params'
}, {
type: 'separator'
}, {
label: 'Hide ' + appName,
accelerator: 'Command+H',
role: 'hide'
}, {<|fim▁hole|> label: 'Hide Others',
accelerator: 'Command+Shift+H',
role: 'hideothers'
}, {
label: 'Show All',
role: 'unhide'
}, {
type: 'separator'
}, {
label: 'Quit',
accelerator: 'Command+Q',
click: () => app.quit()
}]
};<|fim▁end|>
| |
<|file_name|>decompositions.rs<|end_file_name|><|fim▁begin|>use traits::operations::{Transpose, ApproxEq};
use traits::structure::{ColSlice, Eye, Indexable, Diag, SquareMat, BaseFloat};
use traits::geometry::Norm;
use std::cmp::min;
use std::ops::{Mul, Add, Sub};
/// Get the householder matrix corresponding to a reflexion to the hyperplane
/// defined by `vec`. It can be a reflexion contained in a subspace.
///
/// # Arguments
/// * `dim` - the dimension of the space the resulting matrix operates in
/// * `start` - the starting dimension of the subspace of the reflexion
/// * `vec` - the vector defining the reflection.
pub fn householder_matrix<N, V, M>(dim: usize, start: usize, vec: V) -> M
where N: BaseFloat,
M: Eye + Indexable<(usize, usize), N>,
V: Indexable<usize, N> {
let mut qk : M = Eye::new_identity(dim);
let subdim = vec.shape();
let stop = subdim + start;
assert!(dim >= stop);
for j in (start .. stop) {
for i in (start .. stop) {
unsafe {
let vv = vec.unsafe_at(i - start) * vec.unsafe_at(j - start);
let qkij = qk.unsafe_at((i, j));
qk.unsafe_set((i, j), qkij - vv - vv);
}
}
}
qk
}
/// QR decomposition using Householder reflections.
///
/// # Arguments
/// * `m` - matrix to decompose
pub fn qr<N, V, M>(m: &M) -> (M, M)
where N: BaseFloat,
V: Indexable<usize, N> + Norm<N>,
M: Copy + Eye + ColSlice<V> + Transpose + Indexable<(usize, usize), N> +
Mul<M, Output = M> {
let (rows, cols) = m.shape();
assert!(rows >= cols);
let mut q : M = Eye::new_identity(rows);
let mut r = *m;
for ite in 0..min(rows - 1, cols) {
let mut v = r.col_slice(ite, ite, rows);
let alpha =
if unsafe { v.unsafe_at(ite) } >= ::zero() {
-Norm::norm(&v)
}
else {
Norm::norm(&v)
};
unsafe {
let x = v.unsafe_at(0);
v.unsafe_set(0, x - alpha);
}
if !::is_zero(&v.normalize_mut()) {
let qk: M = householder_matrix(rows, ite, v);
r = qk * r;
q = q * Transpose::transpose(&qk);
}
}
(q, r)
}
/// Eigendecomposition of a square matrix using the qr algorithm.
pub fn eigen_qr<N, V, VS, M>(m: &M, eps: &N, niter: usize) -> (M, V)
where N: BaseFloat,
VS: Indexable<usize, N> + Norm<N>,<|fim▁hole|> let mut eigenvalues = *m;
// let mut shifter: M = Eye::new_identity(rows);
let mut iter = 0;
for _ in 0..niter {
let mut stop = true;
for j in 0..::dim::<M>() {
for i in 0..j {
if unsafe { eigenvalues.unsafe_at((i, j)) }.abs() >= *eps {
stop = false;
break;
}
}
for i in j + 1..::dim::<M>() {
if unsafe { eigenvalues.unsafe_at((i, j)) }.abs() >= *eps {
stop = false;
break;
}
}
}
if stop {
break;
}
iter = iter + 1;
let (q, r) = qr(&eigenvalues);;
eigenvalues = r * q;
eigenvectors = eigenvectors * q;
}
(eigenvectors, eigenvalues.diag())
}
/// Cholesky decomposition G of a square symmetric positive definite matrix A, such that A = G * G^T
///
/// # Arguments
/// * `m` - square symmetric positive definite matrix to decompose
pub fn cholesky<N, V, VS, M>(m: &M) -> Result<M, &'static str>
where N: BaseFloat,
VS: Indexable<usize, N> + Norm<N>,
M: Indexable<(usize, usize), N> + SquareMat<N, V> + Add<M, Output = M> +
Sub<M, Output = M> + ColSlice<VS> +
ApproxEq<N> + Copy {
let mut out = m.clone().transpose();
if !ApproxEq::approx_eq(&out, &m) {
return Err("Cholesky: Input matrix is not symmetric");
}
for i in 0 .. out.nrows() {
for j in 0 .. (i + 1) {
let mut sum: N = out[(i, j)];
for k in 0 .. j {
sum = sum - out[(i, k)] * out[(j, k)];
}
if i > j {
out[(i, j)] = sum / out[(j, j)];
}
else if sum > N::zero() {
out[(i, i)] = sum.sqrt();
}
else {
return Err("Cholesky: Input matrix is not positive definite to machine precision");
}
}
}
for i in 0 .. out.nrows() {
for j in i + 1 .. out.ncols() {
out[(i, j)] = N::zero();
}
}
return Ok(out);
}<|fim▁end|>
|
M: Indexable<(usize, usize), N> + SquareMat<N, V> + Add<M, Output = M> +
Sub<M, Output = M> + ColSlice<VS> +
ApproxEq<N> + Copy {
let mut eigenvectors: M = ::one::<M>();
|
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>extern crate compiler;
use compiler::lexer::tokenize;
use compiler::parser::parse;
#[test]
fn parser_accepts_parameterless_function_with_empty_block() {
let tokens = tokenize("fn func() { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_errors_on_parameterless_function_without_identifier() {
let tokens = tokenize("fn () { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:4"));
}
}
}
#[test]
fn parser_errors_on_parameterless_function_without_opened_block() {
let tokens = tokenize("fn func() }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:11"));
}
}
}
#[test]
fn parser_errors_on_parameterless_function_without_closed_block() {
let tokens = tokenize("fn func() { ").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("end-of-file"));
}
}
}
#[test]
fn parser_errors_on_parameterless_function_without_left_parenthesis() {
let tokens = tokenize("fn func) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:8"));
}
}
}
#[test]
fn parser_errors_on_parameterless_function_without_right_parenthesis() {
let tokens = tokenize("fn func( { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:10"));
}
}
}
#[test]
fn parser_accepts_function_with_single_parameter() {
let tokens = tokenize("fn func(a:int) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_function_with_multiple_parameters() {
let tokens = tokenize("fn func(a:int, b:double, c:float, d:bool) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_function_with_void_type() {
let tokens = tokenize("fn func(a:int, b:double, c:float, d:bool) : void { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_function_with_int_type() {
let tokens = tokenize("fn func(a:int, b:double, c:float, d:bool) : int { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_function_with_bool_type() {
let tokens = tokenize("fn func(a:int, b:double, c:float, d:bool) : bool { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_function_with_string_type() {
let tokens = tokenize("fn func(a:int, b:double, c:float, d:bool) : string { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_errors_on_function_with_missing_parameter() {
let tokens = tokenize("fn func(a:int, ) {}").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:16"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameter_separator_but_no_parameters() {
let tokens = tokenize("fn func(,) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(2, errors.len());
assert!(errors[0].contains("1:9"));
assert!(errors[1].contains("1:10"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameter_missing_type_and_colon() {
let tokens = tokenize("fn func(a:int, b:double, c, d:bool) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:27"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameter_missing_type() {
let tokens = tokenize("fn func(a:int, b:double, c:, d:bool) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:28"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameter_missing_colon() {
let tokens = tokenize("fn func(a:int, bdouble ) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:24"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameters_and_missing_left_parenthesis() {
let tokens = tokenize("fn func a:int, b:double, c:float, d:bool) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:9"));
}
}
}
#[test]
fn parser_errors_on_function_with_parameters_and_missing_right_parenthesis() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("1:43"));
}
}
}
#[test]
fn parser_errors_with_correct_errors_with_multiple_errors_in_declaration() {
let tokens = tokenize("fn (aint, b:, float, d:bool { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(5, errors.len());
assert!(errors[0].contains("1:5"));
assert!(errors[1].contains("1:10"));
assert!(errors[2].contains("1:14"));
assert!(errors[3].contains("1:16"));
assert!(errors[4].contains("1:30"));
}
}
}
#[test]
fn parse_parses_single_variable_declaration_with_constant_value_correctly() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) { let a:int = 5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parse_parses_multiple_variable_declarations_with_constant_values_correctly() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool)
{ let a:int = 5; let b:double = 0.434; let c:float = .343f;
let d:string = \"dasdad\"; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parse_errors_on_variable_declaration_with_missing_semicolon() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) {\n let a:int = 5 }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("2:17"));
}
}
}
#[test]
fn parse_errors_on_variable_declaration_with_missing_type() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) {\n let a = 5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("2:9"));
}
}
}
#[test]
fn parse_errors_on_variable_declaration_with_missing_name() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) {\n let :int = 5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("2:7"));
}
}
}
#[test]
fn parse_errors_on_variable_declaration_with_missing_colon() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) {\n let aint = 5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),<|fim▁hole|> Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("2:12"));
}
}
}
#[test]
fn parse_errors_on_variable_declaration_with_missing_let() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) {\n a:int = 5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(errors) => {
assert_eq!(1, errors.len());
assert!(errors[0].contains("2:4"));
}
}
}
#[test]
fn parser_parses_blocks_inside_blocks_correctly() {
let tokens = tokenize("fn func (a:int, b:double, c:float, d:bool) { let a:int = 5; { let b:double = \"as\"; { } { let c:float=.232; }}}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_gives_corret_error_messages_on_two_different_invalid_function_declarations() {
let tokens = tokenize("fn invalid_dec(b:int, ) {\n let a:int = 5; }\nfn (a:int) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("1:23"));
assert!(err[1].contains("3:4"));
}
}
}
#[test]
fn parser_gives_correct_error_message_on_invalid_function_argument_definition_and_invalid_variable_declaration() {
let tokens = tokenize("fn invalid_dec(b:int, ) {\n let a:= 5; }\nfn func (a:int) { }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("1:23"));
assert!(err[1].contains("2:8"));
}
}
}
#[test]
fn parser_gives_correct_error_message_on_invalid_function_definition_and_invalid_variable_declaration_in_next_function() {
let tokens = tokenize("invalid_dec(b:int) {\n }\nfn func (a:int) {\n let a; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("1:1"));
assert!(err[1].contains("4:7"));
}
}
}
#[test]
fn parser_accepts_arithmetic_expression() {
let tokens = tokenize("fn foo() { let a:int = 4 + +2 - -5 + 6*(7+1) - b; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(err) => assert!(false),
}
}
#[test]
fn parser_errors_on_arithmetic_expression_with_missing_operator() {
let tokens = tokenize("fn foo() { let a:int = 5 6*(7+1) - b; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
println!("{}", err[0]);
assert_eq!(1, err.len());
assert!(err[0].contains("1:26"));
}
}
}
#[test]
fn parser_errors_on_arithmetic_expression_with_too_many_left_parenthesis() {
let tokens = tokenize("fn foo() { let a:int = 5 + 6*((7+1) - b; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("1:40"));
}
}
}
#[test]
fn parser_errors_on_arithmetic_expression_with_too_many_right_parenthesis() {
let tokens = tokenize("fn foo() { let a:int = 5 + 6*(7+1)) - b; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("1:35"));
}
}
}
#[test]
fn parser_gives_correct_error_messages_on_two_different_arithmetic_expression_with_errors() {
let tokens = tokenize("fn foo() { let a:int = 5 + 6*(7+1)) - b;\nlet b:int = 5 }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("1:35"));
assert!(err[1].contains("2:15"));
}
}
}
#[test]
fn parser_accepts_variable_assignments() {
let tokens = tokenize("fn foo() { \na = 5;\nb = 4*a-5*(3*(7-4)); } ").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_gives_correct_error_messages_on_invalid_assignments() {
let tokens = tokenize("fn foo() { \ninvalid = ;\ncorrect=23;\ninvalid 4*a-5*(3*(7-4));\ncorrect=321;\ninvalid=23 }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(3, err.len());
assert!(err[0].contains("2:11"));
assert!(err[1].contains("4:9"));
assert!(err[2].contains("6:12"));
}
}
}
#[test]
fn parser_accepts_expression_with_equality_operator() {
let tokens = tokenize("fn foo() { b = a + 5*(2+3) == y - 3;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_accepts_expression_with_greater_than_operator() {
let tokens = tokenize("fn foo() { b = a + 5*(2+3) > y - 3;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_accepts_expression_with_lesser_than_operator() {
let tokens = tokenize("fn foo() { b = a + 5*(2+3) < y - 3;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_accepts_expression_with_greater_or_equal_operator() {
let tokens = tokenize("fn foo() { b = a + 5*(2+3) >= y - 3;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_accepts_expression_with_smaller_or_equal_operator() {
let tokens = tokenize("fn foo() { b = a + 5*(2+3) >= y - 3;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_accepts_expression_with_order_comparison_and_equality_operator() {
let tokens = tokenize("fn foo() { b = (a + 5*(2+3) >= y - 3) == false;}").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_errors_correctly_when_syntax_error_is_after_equality_operator() {
let tokens = tokenize("fn foo() {\na = 5 == 3*7+; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:14"));
}
}
}
#[test]
fn parser_errors_correctly_when_syntax_error_is_after_greater_than_operator() {
let tokens = tokenize("fn foo() {\na = 5 > 3*7+; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:13"));
}
}
}
#[test]
fn parser_accepts_function_call_syntax() {
let tokens = tokenize("fn foo() { bar(); bar(1); bar(5, 6, 7, 8); bar(5*5+a-b/C, 2); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false),
}
}
#[test]
fn parser_errors_on_missing_left_parenthesis_with_function_call() {
let tokens = tokenize("fn foo() { \nbar5); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:5"));
}
}
}
#[test]
fn parser_errors_on_missing_right_parenthesis_with_function_call() {
let tokens = tokenize("fn foo() { \nbar(5; }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:6"));
}
}
}
#[test]
fn parser_errors_on_missing_identifier_with_function_call() {
let tokens = tokenize("fn foo() { \n(5); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:1"));
}
}
}
#[test]
fn parser_errors_on_missing_parameter_with_function_call() {
let tokens = tokenize("fn foo() { \nbar(5,); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:7"));
}
}
}
#[test]
fn parser_errors_when_only_comma_present_with_function_call() {
let tokens = tokenize("fn foo() { \nbar(,); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("2:5"));
assert!(err[1].contains("2:6"));
}
}
}
#[test]
fn parser_gives_error_messages_for_multiple_issues_with_arguments() {
let tokens = tokenize("fn foo() { \nbar(a+*3, a-b+, vava+,); }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(4, err.len());
assert!(err[0].contains("2:7"));
assert!(err[1].contains("2:15"));
assert!(err[2].contains("2:22"));
assert!(err[3].contains("2:23"));
}
}
}
#[test]
fn parser_accepts_for_loop() {
let tokens = tokenize("fn foo() { for (let a:int = 0; a < 10; a = a + 1) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_for_loop_without_initialization_or_expressions() {
let tokens = tokenize("fn foo() { for (;;) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_for_loop_with_variable_assignment() {
let tokens = tokenize("fn foo() { for (a=5;;) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn for_loop_with_various_parse_errors_is_reported_correctly() {
let tokens = tokenize("fn foo() {\nfor (let a:int 5; a < 5+; a=a+) {\n let a = 5; } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(4, err.len());
assert!(err[0].contains("2:16"));
assert!(err[1].contains("2:25"));
assert!(err[2].contains("2:31"));
assert!(err[3].contains("3:8"));
}
}
}
#[test]
fn parser_accepts_if_statement() {
let tokens = tokenize("fn foo() { if (a == 5) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_if_elseif_statement() {
let tokens = tokenize("fn foo() { if (a == 5) { } elif(a == 6) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_if_else_statement() {
let tokens = tokenize("fn foo() { if (a == 5) { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_if_elseif_else_statement() {
let tokens = tokenize("fn foo() { if (a == 5) { } elif(a == 6) { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_accepts_if_multiple_elseif_else_statement() {
let tokens = tokenize("fn foo() { if (a == 5) { } elif(a == 6) { } elif(a == 7) { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(true),
Err(..) => assert!(false)
}
}
#[test]
fn parser_errors_on_else_with_condition() {
let tokens = tokenize("fn foo() {\nif (a == 5) { } else(a == 6) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:21"));
}
}
}
#[test]
fn parser_errors_on_multiple_else_blocks() {
let tokens = tokenize("fn foo() {\nif (a == 5) { } else { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:26"));
}
}
}
#[test]
fn parser_errors_if_else_if_and_else_blocks_are_in_wrong_order() {
let tokens = tokenize("fn foo() {\nif (a == 5) { } else { } elif(a==6) { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:26"));
}
}
}
#[test]
fn parser_errors_on_missing_expression_with_if_block() {
let tokens = tokenize("fn foo() {\nif { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(2, err.len());
assert!(err[0].contains("2:4"));
assert!(err[1].contains("2:4"));
}
}
}
#[test]
fn parser_errors_on_missing_expression_with_elif_block() {
let tokens = tokenize("fn foo() {\nif (a == 5) { } elif { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert!(err.len() > 0);
assert!(err[0].contains("2:22"));
}
}
}
#[test]
fn parser_errors_on_missing_if_block_portion() {
let tokens = tokenize("fn foo() {\nif (a == 5) elif(a == 5) { } else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:13"));
}
}
}
#[test]
fn parser_errors_on_missing_elif_block_portion() {
let tokens = tokenize("fn foo() {if (a == 5) { } \nelif(a == 5) else { } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:14"));
}
}
}
#[test]
fn parser_errors_on_missing_else_block_portion() {
let tokens = tokenize("fn foo() {if (a == 5) { } elif(a == 6) { } \nelse }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => {
assert_eq!(1, err.len());
assert!(err[0].contains("2:6"));
}
}
}
// this test is kinda questionable; error messages are all over the place
#[test]
fn parser_detects_multiple_errors_correctly_on_if_elif_else_statement() {
let tokens = tokenize("fn foo() {\nif (a == ) \n{ let a = 5; } \nelif(== 6) \n{ let b = 4; } \nelif (a == elif(b == 6) \n{ let f = 4; }\nelse } }").unwrap();
match parse(tokens) {
Ok(..) => assert!(false),
Err(err) => assert!(true),
}
}<|fim▁end|>
| |
<|file_name|>bonus.py<|end_file_name|><|fim▁begin|>import os
import common_pygame
import random
pygame = common_pygame.pygame
screen = common_pygame.screen
<|fim▁hole|>class Bonus():
def __init__(self, sounds, menu):
self.menu = menu
self.sounds = sounds
self.bonusType = 0
self.bonusAnim = 0
self.font = pygame.font.Font(None, 64)
self.bonusList = list()
self.bonusList.append(self.font.render(
str("plasma gun !"), True, (255, 255, 0)))
self.score = 0
self.bonuscount = 1
def ProcessBonus(self, ship):
# if ship.score %200 ==0 and ship.weapon==1 and ship.score>0:
if ship.score > 400 * self.bonuscount and self.score < 400 * self.bonuscount:
self.menu.play_sound(self.sounds["plasmagun.wav"])
ship.setWeapon(2)
self.bonusType = 0
self.bonusAnim = 30
self.score = ship.score
self.bonuscount = self.bonuscount + 1
if self.bonusAnim > 0:
self.bonusAnim = self.bonusAnim - 1
# show bonus for the plasma weapon
if self.bonusType == 0:
screen.blit(self.bonusList[0], (250, 250))<|fim▁end|>
| |
<|file_name|>video.rs<|end_file_name|><|fim▁begin|>use sdl2;
pub fn main() {
sdl2::init(sdl2::InitVideo);
let window = match sdl2::video::Window::new("rust-sdl2 demo: Video", sdl2::video::PosCentered, sdl2::video::PosCentered, 800, 600, sdl2::video::OpenGL) {
Ok(window) => window,
Err(err) => fail!(format!("failed to create window: {}", err))
};
let renderer = match sdl2::render::Renderer::from_window(window, sdl2::render::DriverAuto, sdl2::render::Accelerated) {
Ok(renderer) => renderer,
Err(err) => fail!(format!("failed to create renderer: {}", err))
};
let _ = renderer.set_draw_color(sdl2::pixels::RGB(255, 0, 0));
let _ = renderer.clear();
renderer.present();
'main : loop {
'event : loop {
match sdl2::event::poll_event() {
sdl2::event::QuitEvent(_) => break 'main,
sdl2::event::KeyDownEvent(_, _, key, _, _) => {
if key == sdl2::keycode::EscapeKey {
break 'main
}<|fim▁hole|> },
sdl2::event::NoEvent => break 'event,
_ => {}
}
}
}
sdl2::quit();
}<|fim▁end|>
| |
<|file_name|>dirichlet.py<|end_file_name|><|fim▁begin|>from __future__ import division
import math
import numpy as np
from time import time
import sympy as sp
import mpmath as mp
from mpmath.ctx_mp_python import mpf
from scipy.misc import factorial
from scipy.special import gamma
precision = 53
mp.prec = precision
mp.pretty = True
def calculate_factorial_ratio(n, i):
# This function calculates (n + i - 1)! / (n - i)!
mp.dps = 50
k = (n - i)
result = 1
for j in range(k + 2*i - 1, k, -1):
result = mp.fmul(result, j)
return result
def n_choose_k(n, k):
j = n - k
numerator = 1
for i in range(1, k + 1):
numerator *= (j + i)
denominator = factorial(k)
return numerator / denominator
def dirichlet_eta(s, N):
def calculate_d_n(n):
total = 0.0
for k in range(n + 1):
if k % 2 == 0:
alternating_factor = 1
else:
alternating_factor = -1
total += alternating_factor * n_choose_k(n, k) / ( k + 1)**s
return total
eta = 0.0
for n in range(N + 1):
d_n = calculate_d_n(n)
eta += d_n / (2**(n + 1))
return eta
<|fim▁hole|>
return zeta
def riemann_siegel_theta(t):
first_term = np.angle( gamma( (2.j*t + 1) / 4) )
second_term = t * np.log(np.pi) / 2
return first_term - second_term
def zeta_function(s, N):
z = alternating_series(s, N)
return z
def z_function(t, N=100000):
zeta = zeta_function(1/2 + (1.j)*t, N)
return mp.re( np.exp( 1.j * riemann_siegel_theta(t) ) * zeta )
def calculate_z(t): # Convenient wrapper to use for roots.py
return z_function(t, N=25)
if __name__ == '__main__':
# print zeta_function(s=1/2 + 25.j, N=1000)
# print z_function(t=18, N=100)
start = time()
eta = dirichlet_eta(1, N=25)
print eta
print abs(eta - np.log(2))
end = time()
print "Calculated using alternating series in {:.4f} seconds.".format(float(end - start))<|fim▁end|>
|
def alternating_series(s, N):
eta = dirichlet_eta(s, N)
denominator = 1 - 2**(1 - s)
zeta = eta / denominator
|
<|file_name|>tab_search_item_test.ts<|end_file_name|><|fim▁begin|>// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'chrome://webui-test/mojo_webui_test_support.js';
import {RecentlyClosedTab, Tab, TabAlertState, TabData, TabGroup, TabGroupColor, TabItemType, TabSearchItem} from 'chrome://tab-search.top-chrome/tab_search.js';
import {assertDeepEquals, assertEquals, assertNotEquals} from 'chrome://webui-test/chai_assert.js';
import {flushTasks} from 'chrome://webui-test/test_util.js';
import {createTab, sampleToken} from './tab_search_test_data.js';
suite('TabSearchItemTest', () => {
let tabSearchItem: TabSearchItem;
async function setupTest(data: TabData) {
tabSearchItem = document.createElement('tab-search-item');
tabSearchItem.data = data;
document.body.innerHTML = '';
document.body.appendChild(tabSearchItem);
await flushTasks();
}
async function assertTabSearchItemHighlights(
text: string,
fieldHighlightRanges: Array<{start: number, length: number}>|null,
expected: string[]) {
const data = new TabData(
createTab({
active: true,
isDefaultFavicon: true,
showIcon: true,
title: text,
}),
TabItemType.OPEN_TAB, text);
if (fieldHighlightRanges !== null) {
data.highlightRanges = {
'tab.title': fieldHighlightRanges,
hostname: fieldHighlightRanges,
};
}
await setupTest(data);
assertHighlight(tabSearchItem.$['primaryText'], expected);
assertHighlight(tabSearchItem.$['secondaryText'], expected);
}
function assertHighlight(node: HTMLElement, expected: string[]) {
assertDeepEquals(
expected,
Array.from(node.querySelectorAll('.search-highlight-hit'))
.map(e => e ? e.textContent : ''));
}
test('Highlight', async () => {
const text = 'Make work better';
await assertTabSearchItemHighlights(text, null, []);
await assertTabSearchItemHighlights(
text, [{start: 0, length: text.length}], ['Make work better']);
await assertTabSearchItemHighlights(
text, [{start: 0, length: 4}], ['Make']);
await assertTabSearchItemHighlights(
text, [{start: 0, length: 4}, {start: 10, length: 6}],
['Make', 'better']);
await assertTabSearchItemHighlights(
text, [{start: 5, length: 4}], ['work']);
});
test('CloseButtonPresence', async () => {
await setupTest(new TabData(<|fim▁hole|> }),
TabItemType.OPEN_TAB, 'example'));
let tabSearchItemCloseButton =
tabSearchItem.shadowRoot!.querySelector('cr-icon-button');
assertNotEquals(null, tabSearchItemCloseButton);
await setupTest(new TabData(
{
tabId: 0,
title: 'Example.com site',
url: {url: 'https://example.com'},
groupId: undefined,
lastActiveTime: {internalValue: BigInt(0)},
lastActiveElapsedText: '',
} as RecentlyClosedTab,
TabItemType.RECENTLY_CLOSED_TAB, 'example'));
tabSearchItemCloseButton =
tabSearchItem.shadowRoot!.querySelector('cr-icon-button');
assertEquals(null, tabSearchItemCloseButton);
});
test('GroupDetailsPresence', async () => {
const token = sampleToken(1n, 1n);
const tab: Tab = createTab({
active: true,
isDefaultFavicon: true,
showIcon: true,
groupId: token,
});
const tabGroup: TabGroup = {
id: token,
color: TabGroupColor.kBlue,
title: 'Examples',
};
const tabData = new TabData(tab, TabItemType.OPEN_TAB, 'example');
tabData.tabGroup = tabGroup;
await setupTest(tabData);
const groupDotElement =
tabSearchItem.shadowRoot!.querySelector('#groupDot')!;
assertNotEquals(null, groupDotElement);
const groupDotComputedStyle = getComputedStyle(groupDotElement!);
assertEquals(
groupDotComputedStyle.getPropertyValue('--tab-group-color-blue'),
groupDotComputedStyle.getPropertyValue('--group-dot-color'));
assertNotEquals(
null, tabSearchItem.shadowRoot!.querySelector('#groupTitle'));
});
test('MediaAlertIndicatorPresence', async () => {
const token = sampleToken(1n, 1n);
const tab: Tab = createTab({
active: true,
alertStates: [TabAlertState.kMediaRecording, TabAlertState.kAudioPlaying],
isDefaultFavicon: true,
showIcon: true,
groupId: token,
});
await setupTest(new TabData(tab, TabItemType.OPEN_TAB, 'example'));
const recordingMediaAlert =
tabSearchItem.shadowRoot!.querySelector<HTMLElement>('#mediaAlert');
assertNotEquals(null, recordingMediaAlert);
assertEquals('media-recording', recordingMediaAlert!.getAttribute('class'));
});
});<|fim▁end|>
|
createTab({
active: true,
isDefaultFavicon: true,
showIcon: true,
|
<|file_name|>script26_plot.py<|end_file_name|><|fim▁begin|>from matplotlib import pylab as plt
(x,y,yEA) = in_object
print(in_object)
plt.plot(x,y/y.max(),label="Fully coherent")
plt.plot(x,yEA/yEA.max(),label="Partial coherent")
plt.xlabel("Z [um]")
plt.ylabel("Intensity [Arbitrary Units]")<|fim▁hole|>plt.show()<|fim▁end|>
|
plt.legend()
|
<|file_name|>ProjectAuthorization.java<|end_file_name|><|fim▁begin|>package net.ontrack.core.security;
import lombok.Data;
import net.ontrack.core.model.AccountSummary;
@Data
public class ProjectAuthorization {<|fim▁hole|>
private final int project;
private final AccountSummary account;
private final ProjectRole role;
}<|fim▁end|>
| |
<|file_name|>test_http_headers.py<|end_file_name|><|fim▁begin|>import unittest
import copy
from scrapy.http import Headers
class HeadersTest(unittest.TestCase):
def test_basics(self):
h = Headers({'Content-Type': 'text/html', 'Content-Length': 1234})
assert h['Content-Type']
assert h['Content-Length']
self.assertRaises(KeyError, h.__getitem__, 'Accept')
self.assertEqual(h.get('Accept'), None)
self.assertEqual(h.getlist('Accept'), [])
self.assertEqual(h.get('Accept', '*/*'), '*/*')
self.assertEqual(h.getlist('Accept', '*/*'), ['*/*'])
self.assertEqual(h.getlist('Accept', ['text/html', 'images/jpeg']), ['text/html','images/jpeg'])
def test_single_value(self):
h = Headers()
h['Content-Type'] = 'text/html'
self.assertEqual(h['Content-Type'], 'text/html')
self.assertEqual(h.get('Content-Type'), 'text/html')
self.assertEqual(h.getlist('Content-Type'), ['text/html'])
def test_multivalue(self):
h = Headers()
h['X-Forwarded-For'] = hlist = ['ip1', 'ip2']
self.assertEqual(h['X-Forwarded-For'], 'ip2')
self.assertEqual(h.get('X-Forwarded-For'), 'ip2')
self.assertEqual(h.getlist('X-Forwarded-For'), hlist)
assert h.getlist('X-Forwarded-For') is not hlist
def test_encode_utf8(self):
h = Headers({u'key': u'\xa3'}, encoding='utf-8')
key, val = dict(h).items()[0]
assert isinstance(key, str), key
assert isinstance(val[0], str), val[0]
self.assertEqual(val[0], '\xc2\xa3')
def test_encode_latin1(self):
h = Headers({u'key': u'\xa3'}, encoding='latin1')
key, val = dict(h).items()[0]
self.assertEqual(val[0], '\xa3')
def test_encode_multiple(self):
h = Headers({u'key': [u'\xa3']}, encoding='utf-8')
key, val = dict(h).items()[0]
self.assertEqual(val[0], '\xc2\xa3')
def test_delete_and_contains(self):
h = Headers()
h['Content-Type'] = 'text/html'
assert 'Content-Type' in h
del h['Content-Type']
assert 'Content-Type' not in h
def test_setdefault(self):
h = Headers()
hlist = ['ip1', 'ip2']
olist = h.setdefault('X-Forwarded-For', hlist)
assert h.getlist('X-Forwarded-For') is not hlist
assert h.getlist('X-Forwarded-For') is olist
h = Headers()
olist = h.setdefault('X-Forwarded-For', 'ip1')
self.assertEqual(h.getlist('X-Forwarded-For'), ['ip1'])
assert h.getlist('X-Forwarded-For') is olist
def test_iterables(self):
idict = {'Content-Type': 'text/html', 'X-Forwarded-For': ['ip1', 'ip2']}
h = Headers(idict)
self.assertEqual(dict(h), {'Content-Type': ['text/html'], 'X-Forwarded-For': ['ip1', 'ip2']})
self.assertEqual(h.keys(), ['X-Forwarded-For', 'Content-Type'])
self.assertEqual(h.items(), [('X-Forwarded-For', ['ip1', 'ip2']), ('Content-Type', ['text/html'])])
self.assertEqual(list(h.iteritems()),
[('X-Forwarded-For', ['ip1', 'ip2']), ('Content-Type', ['text/html'])])
self.assertEqual(h.values(), ['ip2', 'text/html'])
def test_update(self):
h = Headers()
h.update({'Content-Type': 'text/html', 'X-Forwarded-For': ['ip1', 'ip2']})
self.assertEqual(h.getlist('Content-Type'), ['text/html'])
self.assertEqual(h.getlist('X-Forwarded-For'), ['ip1', 'ip2'])
def test_copy(self):
h1 = Headers({'header1': ['value1', 'value2']})
h2 = copy.copy(h1)
self.assertEqual(h1, h2)
self.assertEqual(h1.getlist('header1'), h2.getlist('header1'))
assert h1.getlist('header1') is not h2.getlist('header1')
assert isinstance(h2, Headers)
def test_appendlist(self):
h1 = Headers({'header1': 'value1'})
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), ['value1', 'value3'])
h1 = Headers()
h1.appendlist('header1', 'value1')
h1.appendlist('header1', 'value3')
self.assertEqual(h1.getlist('header1'), ['value1', 'value3'])
def test_setlist(self):
h1 = Headers({'header1': 'value1'})
self.assertEqual(h1.getlist('header1'), ['value1'])
h1.setlist('header1', ['value2', 'value3'])
self.assertEqual(h1.getlist('header1'), ['value2', 'value3'])
def test_setlistdefault(self):
h1 = Headers({'header1': 'value1'})
h1.setlistdefault('header1', ['value2', 'value3'])
h1.setlistdefault('header2', ['value2', 'value3'])
self.assertEqual(h1.getlist('header1'), ['value1'])
self.assertEqual(h1.getlist('header2'), ['value2', 'value3'])
def test_none_value(self):
h1 = Headers()<|fim▁hole|> h1.setdefault('foo', 'bar')
self.assertEqual(h1.get('foo'), None)
self.assertEqual(h1.getlist('foo'), [])<|fim▁end|>
|
h1['foo'] = 'bar'
h1['foo'] = None
|
<|file_name|>loadFile.py<|end_file_name|><|fim▁begin|>import os
import logging
import pandas as pd
from dataactvalidator.app import createApp
from dataactvalidator.scripts.loaderUtils import LoaderUtils
from dataactcore.interfaces.db import GlobalDB
from dataactcore.models.domainModels import CGAC, ObjectClass, ProgramActivity
from dataactcore.config import CONFIG_BROKER
logger = logging.getLogger(__name__)<|fim▁hole|>def loadCgac(filename):
"""Load CGAC (high-level agency names) lookup table."""
model = CGAC
with createApp().app_context():
sess = GlobalDB.db().session
# for CGAC, delete and replace values
sess.query(model).delete()
# read CGAC values from csv
data = pd.read_csv(filename, dtype=str)
# clean data
data = LoaderUtils.cleanData(
data,
model,
{"cgac": "cgac_code", "agency": "agency_name"},
{"cgac_code": {"pad_to_length": 3}}
)
# de-dupe
data.drop_duplicates(subset=['cgac_code'], inplace=True)
# insert to db
table_name = model.__table__.name
num = LoaderUtils.insertDataframe(data, table_name, sess.connection())
sess.commit()
logger.info('{} records inserted to {}'.format(num, table_name))
def loadObjectClass(filename):
"""Load object class lookup table."""
model = ObjectClass
with createApp().app_context():
sess = GlobalDB.db().session
# for object class, delete and replace values
sess.query(model).delete()
data = pd.read_csv(filename, dtype=str)
data = LoaderUtils.cleanData(
data,
model,
{"max_oc_code": "object_class_code",
"max_object_class_name": "object_class_name"},
{}
)
# de-dupe
data.drop_duplicates(subset=['object_class_code'], inplace=True)
# insert to db
table_name = model.__table__.name
num = LoaderUtils.insertDataframe(data, table_name, sess.connection())
sess.commit()
logger.info('{} records inserted to {}'.format(num, table_name))
def loadProgramActivity(filename):
"""Load program activity lookup table."""
model = ProgramActivity
with createApp().app_context():
sess = GlobalDB.db().session
# for program activity, delete and replace values??
sess.query(model).delete()
data = pd.read_csv(filename, dtype=str)
data = LoaderUtils.cleanData(
data,
model,
{"year": "budget_year",
"agency_id": "agency_id",
"alloc_id": "allocation_transfer_id",
"account": "account_number",
"pa_code": "program_activity_code",
"pa_name": "program_activity_name"},
{"program_activity_code": {"pad_to_length": 4},
"agency_id": {"pad_to_length": 3},
"allocation_transfer_id": {"pad_to_length": 3, "keep_null": True},
"account_number": {"pad_to_length": 4}
}
)
# because we're only loading a subset of program activity info,
# there will be duplicate records in the dataframe. this is ok,
# but need to de-duped before the db load.
data.drop_duplicates(inplace=True)
# insert to db
table_name = model.__table__.name
num = LoaderUtils.insertDataframe(data, table_name, sess.connection())
sess.commit()
logger.info('{} records inserted to {}'.format(num, table_name))
def loadDomainValues(basePath, localProgramActivity = None):
"""Load all domain value files.
Parameters
----------
basePath : directory that contains the domain values files.
localProgramActivity : optional location of the program activity file (None = use basePath)
"""
logger.info('Loading CGAC')
loadCgac(os.path.join(basePath,"cgac.csv"))
logger.info('Loading object class')
loadObjectClass(os.path.join(basePath,"object_class.csv"))
logger.info('Loading program activity')
if localProgramActivity is not None:
loadProgramActivity(localProgramActivity)
else:
loadProgramActivity(os.path.join(basePath, "program_activity.csv"))
if __name__ == '__main__':
loadDomainValues(
os.path.join(CONFIG_BROKER["path"], "dataactvalidator", "config")
)<|fim▁end|>
|
logging.basicConfig(level=logging.INFO)
|
<|file_name|>channels.js<|end_file_name|><|fim▁begin|>/**
* IRCAnywhere server/channels.js
*
* @title ChannelManager
* @copyright (c) 2013-2014 http://ircanywhere.com
* @license GPL v2
* @author Ricki Hastings
*/
var _ = require('lodash'),
hooks = require('hooks');
/**
* This object is responsible for managing everything related to channel records, such as
* the handling of joins/parts/mode changes/topic changes and such.
* As always these functions are extendable and can be prevented or extended by using hooks.
*
* @class ChannelManager
* @method ChannelManager
* @return void
*/
function ChannelManager() {
this.channel = {
network: '',
channel: '',
topic: {},
modes: ''
};
// a default channel object
}
/**
* Gets a tab record from the parameters passed in, strictly speaking this doesn't have to
* be a channel, a normal query window will also be returned. However this class doesn't
* need to work with anything other than channels.
*
* A new object is created but not inserted into the database if the channel doesn't exist.
*
* @method getChannel
* @param {String} network A network string such as 'freenode'
* @param {String} channel The name of a channel **with** the hash key '#ircanywhere'
* @return {Object} A channel object straight out of the database.
*/
ChannelManager.prototype.getChannel = function(network, channel) {
var chan = application.Tabs.sync.findOne({network: network, target: channel});
if (!chan) {
var chan = _.clone(this.channel);
chan.network = network;
chan.channel = channel;
}
return chan;
}
/**
* Inserts a user or an array of users into a channel record matching the network key
* network name and channel name, with the option to force an overwrite
*
* @method insertUsers
* @param {ObjectID} key A valid Mongo ObjectID for the networks collection
* @param {String} network The network name, such as 'freenode'
* @param {String} channel The channel name '#ircanywhere'
* @param {Array[Object]} users An array of valid user objects usually from a who/join output
* @param {Boolean} [force] Optional boolean whether to overwrite the contents of the channelUsers
* @return {Array} The final array of the users inserted
*/
ChannelManager.prototype.insertUsers = function(key, network, channel, users, force) {
var force = force || false,
channel = channel.toLowerCase(),
burst = (users.length > 1) ? true : false,
find = [],
chan = this.getChannel(key, channel),
finalArray = [];
for (var uid in users) {
var u = users[uid];
u.network = network;
u.channel = channel;
u._burst = burst;
find.push(u.nickname);
if (u.nickname == Clients[key].nick) {
application.Networks.sync.update({_id: key}, {$set: {hostname: u.hostname}});
}
// update hostname
}
// turn this into an array of nicknames
if (force) {
application.ChannelUsers.sync.remove({network: network, channel: channel});
} else {
application.ChannelUsers.sync.remove({network: network, channel: channel, nickname: {$in: find}});
}
// ok so here we've gotta remove any users in the channel already
// and all of them if we're being told to force the update
for (var uid in users) {
var u = users[uid],
prefix = eventManager.getPrefix(Clients[key], u);
u.sort = prefix.sort;
u.prefix = prefix.prefix;
finalArray.push(u);
}
// send the update out
if (finalArray.length > 0) {
return application.ChannelUsers.sync.insert(finalArray);
} else {
return [];
}
}
/**
* Removes a specific user from a channel, if users is omitted, channel should be equal to a nickname
* and that nickname will be removed from all channels records on that network.
*
* @method removeUsers
* @param {String} network A valid network name
* @param {String} [channel] A valid channel name
* @param {Array} users An array of users to remove from the network `or` channel
* @return void
*/
ChannelManager.prototype.removeUsers = function(network, channel, users) {
var channel = (_.isArray(channel)) ? channel : channel.toLowerCase(),
users = (_.isArray(channel)) ? channel : users;
// basically we check if channel is an array, if it is we're being told to
// just remove the user from the entire network (on quits etc)
if (users.length === 0) {
return false;
}
if (_.isArray(channel)) {
application.ChannelUsers.remove({network: network, nickname: {$in: users}}, {safe: false});
} else {
application.ChannelUsers.remove({network: network, channel: channel, nickname: {$in: users}}, {safe: false});
}
// send the update out
}
/**
* Updates a user or an array of users from the specific channel with the values passed in.
*
* @method updateUsers
* @param {ObjectID} key A valid Mongo ObjectID for the networks collection
* @param {String} network The name of the network
* @param {Array} users A valid users array
* @param {Object} values A hash of keys and values to be replaced in the users array
* @return void
*/
ChannelManager.prototype.updateUsers = function(key, network, users, values) {
var update = {};
for (var uid in users) {
var u = users[uid],
s = {network: network, nickname: u},
records = application.ChannelUsers.sync.find(s).sync.toArray();
for (var rid in records) {
var user = records[rid];
var updated = _.extend(user, values);
updated.sort = eventManager.getPrefix(Clients[key], updated).sort;
application.ChannelUsers.sync.update(s, _.omit(updated, '_id'));
// update the record
}
}
// this is hacky as hell I feel but it's getting done this way to
// comply with all the other functions in this class
}
/**
* Takes a mode string, parses it and handles any updates to any records relating to
* the specific channel. This handles user updates and such, it shouldn't really be called
* externally, however can be pre and post hooked like all other functions in this object.
*
* @method updateModes
* @param {ObjectID} key A valid Mongo ObjectID for the networks collection
* @param {Object} capab A valid capabilities object from the 'registered' event
* @param {String} network Network name
* @param {String} channel Channel name
* @param {String} mode Mode string
* @return void
*/
ChannelManager.prototype.updateModes = function(key, capab, network, channel, mode) {
var channel = channel.toLowerCase(),
chan = this.getChannel(key, channel),
us = {};
var users = application.ChannelUsers.sync.find({network: network, channel: channel}).sync.toArray(),
parsedModes = modeParser.sortModes(capab, mode);
// we're not arsed about the channel or network here
var modes = modeParser.changeModes(capab, chan.modes, parsedModes);
// we need to attempt to update the record now with the new info
application.Tabs.update({network: key, target: channel}, {$set: {modes: modes}}, {safe: false});
// update the record
users.forEach(function(u) {
delete u._id;
us[u.nickname] = u;
});
modeParser.handleParams(capab, us, parsedModes).forEach(function(u) {
var prefix = eventManager.getPrefix(Clients[key], u);
u.sort = prefix.sort;
u.prefix = prefix.prefix;
application.ChannelUsers.update({network: network, channel: channel, nickname: u.nickname}, u, {safe: false});
});
// update users now
}
/**
* Updates the specific channel's topic and setby in the internal records.
<|fim▁hole|> * @param {ObjectID} key A valid Mongo ObjectID for the networks collection
* @param {String} channel A valid channel name
* @param {String} topic The new topic
* @param {String} setby A setter string, usually in the format of 'nickname!username@hostname'
* @return void
*/
ChannelManager.prototype.updateTopic = function(key, channel, topic, setby) {
var channel = channel.toLowerCase(),
chan = this.getChannel(key, channel);
var topic = {
topic: topic,
setter: setby || ''
};
// updat the topic record
application.Tabs.update({network: key, target: channel}, {$set: {topic: topic}}, {safe: false});
// update the record
}
exports.ChannelManager = _.extend(ChannelManager, hooks);<|fim▁end|>
|
*
* @method updateTopic
|
<|file_name|>rdxml.py<|end_file_name|><|fim▁begin|>try:
from xml.etree import cElementTree as etree
except ImportError:
from xml.etree import ElementTree as etree
import xml2nrn
# module names derived from the namespace. Add new tags in proper namespace
import neuroml
import metadata
import morphml
import biophysics
class FileWrapper:
def __init__(self, source):
self.source = source
self.lineno = 0
def read(self, bytes):
s = self.source.readline()
self.lineno += 1
return s
# for each '{namespace}element' call the corresponding module.func
def handle(x2n, fw, event, node):
tag = node.tag.split('}')
# hopefully a namespace token corresponding to an imported module name
ns = tag[0].split('/')[-2]<|fim▁hole|> try:
if event == 'start':
f = eval(tag)
elif event == 'end':
f = eval(tag + '_end')
except:
pass
if f:
x2n.locator.lineno = fw.lineno
try:
f(x2n, node) # handle the element when it opens
except:
print tag,' failed at ', x2n.locator.getLineNumber()
elif event == 'start':
print 'ignore', node.tag # no function to handle the element
return 0
return 1
def rdxml(fname, ho = None):
f = FileWrapper(open(fname))
x2n = xml2nrn.XML2Nrn()
ig = None
for event, elem in etree.iterparse(f, events=("start", "end")):
if ig != elem:
if handle(x2n, f, event, elem) == 0:
ig = elem
if (ho):
ho.parsed(x2n)
if __name__ == '__main__':
rdxml('temp.xml')<|fim▁end|>
|
tag = ns+'.'+tag[1] #namespace.element should correspond to module.func
f = None
|
<|file_name|>tabs.py<|end_file_name|><|fim▁begin|>"""
This module is essentially a broker to xmodule/tabs.py -- it was originally introduced to
perform some LMS-specific tab display gymnastics for the Entrance Exams feature
"""
from django.conf import settings
from django.utils.translation import ugettext as _, ugettext_noop
from courseware.access import has_access
from courseware.entrance_exams import user_must_complete_entrance_exam
from student.models import UserProfile
from openedx.core.lib.course_tabs import CourseTabPluginManager
from student.models import CourseEnrollment
from xmodule.tabs import CourseTab, CourseTabList, key_checker
from xmodule.tabs import StaticTab
class EnrolledTab(CourseTab):
"""
A base class for any view types that require a user to be enrolled.
"""
@classmethod
def is_enabled(cls, course, user=None):
if user is None:
return True
return bool(CourseEnrollment.is_enrolled(user, course.id) or has_access(user, 'staff', course, course.id))
class CoursewareTab(EnrolledTab):
"""
The main courseware view.
"""
type = 'courseware'
title = ugettext_noop('Courseware')
priority = 10
view_name = 'courseware'
is_movable = False
is_default = False
is_visible_to_sneak_peek = True
class CourseInfoTab(CourseTab):
"""
The course info view.
"""
type = 'course_info'
title = ugettext_noop('Course Info')
priority = 20
view_name = 'info'
tab_id = 'info'
is_movable = False
is_default = False
is_visible_to_sneak_peek = True
@classmethod
def is_enabled(cls, course, user=None):
return True
class SyllabusTab(EnrolledTab):
"""
A tab for the course syllabus.
"""
type = 'syllabus'
title = ugettext_noop('Syllabus')
priority = 30
view_name = 'syllabus'
allow_multiple = True
is_default = False
is_visible_to_sneak_peek = True
@classmethod
def is_enabled(cls, course, user=None):
if not super(SyllabusTab, cls).is_enabled(course, user=user):
return False
return getattr(course, 'syllabus_present', False)
class ProgressTab(EnrolledTab):
"""
The course progress view.
"""
type = 'progress'
title = ugettext_noop('Progress')
priority = 40
view_name = 'progress'
is_hideable = True
is_default = False
@classmethod
def is_enabled(cls, course, user=None): # pylint: disable=unused-argument
if not super(ProgressTab, cls).is_enabled(course, user=user):
return False
return not course.hide_progress_tab
class TextbookTabsBase(CourseTab):
"""
Abstract class for textbook collection tabs classes.
"""
# Translators: 'Textbooks' refers to the tab in the course that leads to the course' textbooks
title = ugettext_noop("Textbooks")
is_collection = True
is_default = False
@classmethod
def is_enabled(cls, course, user=None): # pylint: disable=unused-argument
return user is None or user.is_authenticated()
@classmethod
def items(cls, course):
"""
A generator for iterating through all the SingleTextbookTab book objects associated with this
collection of textbooks.
"""
raise NotImplementedError()
class TextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all textbook tabs.
"""
type = 'textbooks'
priority = None
view_name = 'book'
@classmethod
def is_enabled(cls, course, user=None): # pylint: disable=unused-argument
parent_is_enabled = super(TextbookTabs, cls).is_enabled(course, user)
return settings.FEATURES.get('ENABLE_TEXTBOOK') and parent_is_enabled
@classmethod
def items(cls, course):
for index, textbook in enumerate(course.textbooks):
yield SingleTextbookTab(
name=textbook.title,
tab_id='textbook/{0}'.format(index),
view_name=cls.view_name,
index=index
)
class PDFTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all PDF textbook tabs.
"""
type = 'pdf_textbooks'
priority = None
view_name = 'pdf_book'
@classmethod
def items(cls, course):
for index, textbook in enumerate(course.pdf_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='pdftextbook/{0}'.format(index),
view_name=cls.view_name,
index=index
)
class HtmlTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all Html textbook tabs.
"""
type = 'html_textbooks'
priority = None
view_name = 'html_book'
@classmethod
def items(cls, course):
for index, textbook in enumerate(course.html_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='htmltextbook/{0}'.format(index),
view_name=cls.view_name,
index=index
)
class LinkTab(CourseTab):
"""
Abstract class for tabs that contain external links.
"""
link_value = ''
def __init__(self, tab_dict=None, name=None, link=None):
self.link_value = tab_dict['link'] if tab_dict else link
def link_value_func(_course, _reverse_func):
""" Returns the link_value as the link. """
return self.link_value
self.type = tab_dict['type']
tab_dict['link_func'] = link_value_func
super(LinkTab, self).__init__(tab_dict)
def __getitem__(self, key):
if key == 'link':
return self.link_value
else:
return super(LinkTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'link':
self.link_value = value
else:
super(LinkTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(LinkTab, self).to_json()
to_json_val.update({'link': self.link_value})
return to_json_val
def __eq__(self, other):
if not super(LinkTab, self).__eq__(other):
return False
return self.link_value == other.get('link')
@classmethod
def is_enabled(cls, course, user=None): # pylint: disable=unused-argument
return True
class ExternalDiscussionCourseTab(LinkTab):
"""
A course tab that links to an external discussion service.
"""
type = 'external_discussion'
# Translators: 'Discussion' refers to the tab in the courseware that leads to the discussion forums
title = ugettext_noop('Discussion')
priority = None
is_default = False
@classmethod
def validate(cls, tab_dict, raise_error=True):
""" Validate that the tab_dict for this course tab has the necessary information to render. """
return (super(ExternalDiscussionCourseTab, cls).validate(tab_dict, raise_error) and
key_checker(['link'])(tab_dict, raise_error))
@classmethod
def is_enabled(cls, course, user=None): # pylint: disable=unused-argument
if not super(ExternalDiscussionCourseTab, cls).is_enabled(course, user=user):
return False
return course.discussion_link
class ExternalLinkCourseTab(LinkTab):
"""
A course tab containing an external link.
"""
type = 'external_link'
priority = None
is_default = False # An external link tab is not added to a course by default
allow_multiple = True
@classmethod
def validate(cls, tab_dict, raise_error=True):
""" Validate that the tab_dict for this course tab has the necessary information to render. """
return (super(ExternalLinkCourseTab, cls).validate(tab_dict, raise_error) and
key_checker(['link', 'name'])(tab_dict, raise_error))
class SingleTextbookTab(CourseTab):
"""
A tab representing a single textbook. It is created temporarily when enumerating all textbooks within a
Textbook collection tab. It should not be serialized or persisted.
"""
type = 'single_textbook'
is_movable = False
is_collection_item = True
priority = None
def __init__(self, name, tab_id, view_name, index):
def link_func(course, reverse_func, index=index):
""" Constructs a link for textbooks from a view name, a course, and an index. """
return reverse_func(view_name, args=[unicode(course.id), index])
tab_dict = dict()
tab_dict['name'] = name
tab_dict['tab_id'] = tab_id
tab_dict['link_func'] = link_func
super(SingleTextbookTab, self).__init__(tab_dict)
def to_json(self):
raise NotImplementedError('SingleTextbookTab should not be serialized.')
def get_course_tab_list(request, course):
"""
Retrieves the course tab list from xmodule.tabs and manipulates the set as necessary
"""
user = request.user
is_user_enrolled = user.is_authenticated() and CourseEnrollment.is_enrolled(user, course.id)
xmodule_tab_list = CourseTabList.iterate_displayable(
course,
user=user,<|fim▁hole|> is_user_staff=has_access(user, 'staff', course, course.id),
is_user_enrolled=is_user_enrolled,
is_user_sneakpeek=not UserProfile.has_registered(user),
)
# Now that we've loaded the tabs for this course, perform the Entrance Exam work.
# If the user has to take an entrance exam, we'll need to hide away all but the
# "Courseware" tab. The tab is then renamed as "Entrance Exam".
course_tab_list = []
for tab in xmodule_tab_list:
if user_must_complete_entrance_exam(request, user, course):
# Hide all of the tabs except for 'Courseware'
# Rename 'Courseware' tab to 'Entrance Exam'
if tab.type is not 'courseware':
continue
tab.name = _("Entrance Exam")
course_tab_list.append(tab)
# Add in any dynamic tabs, i.e. those that are not persisted
course_tab_list += _get_dynamic_tabs(course, user)
return course_tab_list
def _get_dynamic_tabs(course, user):
"""
Returns the dynamic tab types for the current user.
Note: dynamic tabs are those that are not persisted in the course, but are
instead added dynamically based upon the user's role.
"""
dynamic_tabs = list()
for tab_type in CourseTabPluginManager.get_tab_types():
if getattr(tab_type, "is_dynamic", False):
tab = tab_type(dict())
if tab.is_enabled(course, user=user):
dynamic_tabs.append(tab)
dynamic_tabs.sort(key=lambda dynamic_tab: dynamic_tab.name)
return dynamic_tabs<|fim▁end|>
|
settings=settings,
is_user_authenticated=user.is_authenticated(),
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Config file handling module
# Copyright (C) 2014 Yury Gavrilov <[email protected]>
# This file is part of VKBuddy.
# VKBuddy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# VKBuddy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with VKBuddy. If not, see <http://www.gnu.org/licenses/>.
import yaml
class IncorrectConfig(Exception): pass
class BareConfig:
def __init__(self):
self.config = {}
self.required_list = []
def add_parameter(self, name, required=False, description='',
default=None, typ=str):
if required:
self.required_list.append(name)
self.config[name] = {
'description': description,
'default': default,
'type': typ
}
class Config:
def __init__(self, filename, bare):
cfile = open(filename, 'r')
self.__config = yaml.load(cfile)
cfile.close()
self.bare = bare
if not self.__config:
self.__config = {}
for param in bare.required_list:
if not param in self.__config:
raise IncorrectConfig(
'Required parameter \'{}\' not found'.format(param)
)
def __getitem__(self, item):
if item in self.__config:
if item in self.bare.config:
return self.bare.config[item]['type'](self.__config[item])
else:
return self.__config[item]
elif item in self.bare.config:
return self.bare.config[item]['default']
else:
raise KeyError(item)<|fim▁end|>
| |
<|file_name|>vpcmpub.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};<|fim▁hole|>use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vpcmpub_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K1)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM3)), operand4: Some(Literal8(17)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 243, 109, 14, 62, 203, 17], OperandSize::Dword)
}
fn vpcmpub_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K2)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledIndexedDisplaced(EAX, EBX, Four, 27899211, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(56)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 243, 101, 10, 62, 148, 152, 75, 181, 169, 1, 56], OperandSize::Dword)
}
fn vpcmpub_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K2)), operand2: Some(Direct(XMM2)), operand3: Some(Direct(XMM14)), operand4: Some(Literal8(52)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 211, 109, 10, 62, 214, 52], OperandSize::Qword)
}
fn vpcmpub_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K2)), operand2: Some(Direct(XMM19)), operand3: Some(IndirectScaledDisplaced(RSI, Two, 1131204265, Some(OperandSize::Xmmword), None)), operand4: Some(Literal8(93)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 243, 101, 3, 62, 20, 117, 169, 206, 108, 67, 93], OperandSize::Qword)
}
fn vpcmpub_5() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K7)), operand2: Some(Direct(YMM4)), operand3: Some(Direct(YMM4)), operand4: Some(Literal8(14)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 243, 93, 44, 62, 252, 14], OperandSize::Dword)
}
fn vpcmpub_6() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K4)), operand2: Some(Direct(YMM3)), operand3: Some(IndirectScaledIndexedDisplaced(EDI, EDI, Two, 2073979736, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(0)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 243, 101, 42, 62, 164, 127, 88, 107, 158, 123, 0], OperandSize::Dword)
}
fn vpcmpub_7() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K2)), operand2: Some(Direct(YMM15)), operand3: Some(Direct(YMM31)), operand4: Some(Literal8(15)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 147, 5, 45, 62, 215, 15], OperandSize::Qword)
}
fn vpcmpub_8() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K5)), operand2: Some(Direct(YMM19)), operand3: Some(Indirect(RDX, Some(OperandSize::Ymmword), None)), operand4: Some(Literal8(43)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 243, 101, 37, 62, 42, 43], OperandSize::Qword)
}
fn vpcmpub_9() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K1)), operand2: Some(Direct(ZMM7)), operand3: Some(Direct(ZMM3)), operand4: Some(Literal8(119)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 243, 69, 73, 62, 203, 119], OperandSize::Dword)
}
fn vpcmpub_10() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K1)), operand2: Some(Direct(ZMM1)), operand3: Some(IndirectScaledDisplaced(EDI, Eight, 996199483, Some(OperandSize::Zmmword), None)), operand4: Some(Literal8(88)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 243, 117, 79, 62, 12, 253, 59, 204, 96, 59, 88], OperandSize::Dword)
}
fn vpcmpub_11() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K7)), operand2: Some(Direct(ZMM12)), operand3: Some(Direct(ZMM22)), operand4: Some(Literal8(113)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 179, 29, 73, 62, 254, 113], OperandSize::Qword)
}
fn vpcmpub_12() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPUB, operand1: Some(Direct(K7)), operand2: Some(Direct(ZMM20)), operand3: Some(IndirectScaledDisplaced(RDI, Four, 479754913, Some(OperandSize::Zmmword), None)), operand4: Some(Literal8(87)), lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 243, 93, 71, 62, 60, 189, 161, 122, 152, 28, 87], OperandSize::Qword)
}<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! `session_types`
//!
//! This is an implementation of *session types* in Rust.
//!
//! The channels in Rusts standard library are useful for a great many things,
//! but they're restricted to a single type. Session types allows one to use a
//! single channel for transferring values of different types, depending on the
//! context in which it is used. Specifically, a session typed channel always
//! carry a *protocol*, which dictates how communication is to take place.
//!
//! For example, imagine that two threads, `A` and `B` want to communicate with
//! the following pattern:
//!
//! 1. `A` sends an integer to `B`.
//! 2. `B` sends a boolean to `A` depending on the integer received.
//!
//! With session types, this could be done by sharing a single channel. From
//! `A`'s point of view, it would have the type `int ! (bool ? eps)` where `t ! r`
//! is the protocol "send something of type `t` then proceed with
//! protocol `r`", the protocol `t ? r` is "receive something of type `t` then proceed
//! with protocol `r`, and `eps` is a special marker indicating the end of a
//! communication session.
//!
//! Our session type library allows the user to create channels that adhere to a
//! specified protocol. For example, a channel like the above would have the type
//! `Chan<(), Send<i64, Recv<bool, Eps>>>`, and the full program could look like this:
//!
//! ```
//! extern crate session_types;
//! use session_types::*;
//!
//! type Server = Recv<i64, Send<bool, Eps>>;
//! type Client = Send<i64, Recv<bool, Eps>>;
//!
//! fn srv(c: Chan<(), Server>) {
//! let (c, n) = c.recv();
//! if n % 2 == 0 {
//! c.send(true).close()
//! } else {
//! c.send(false).close()
//! }
//! }
//!
//! fn cli(c: Chan<(), Client>) {
//! let n = 42;
//! let c = c.send(n);
//! let (c, b) = c.recv();
//!
//! if b {
//! println!("{} is even", n);
//! } else {
//! println!("{} is odd", n);
//! }
//!
//! c.close();
//! }
//!
//! fn main() {
//! connect(srv, cli);
//! }
//! ```
#![cfg_attr(feature = "cargo-clippy", allow(clippy::double_must_use))]
#![cfg_attr(feature = "cargo-clippy", allow(clippy::type_complexity))]
extern crate crossbeam_channel;
use std::marker::PhantomData;
use std::thread::spawn;
use std::{marker, mem, ptr};
use std::collections::HashMap;
use crossbeam_channel::{unbounded, Receiver, Sender};
use crossbeam_channel::Select;
pub use Branch::*;
/// A session typed channel. `P` is the protocol and `E` is the environment,
/// containing potential recursion targets
#[must_use]
pub struct Chan<E, P>(Sender<*mut u8>, Receiver<*mut u8>, PhantomData<(E, P)>);
unsafe impl<E: marker::Send, P: marker::Send> marker::Send for Chan<E, P> {}
unsafe fn write_chan<A: marker::Send + 'static, E, P>(&Chan(ref tx, _, _): &Chan<E, P>, x: A) {
tx.send(Box::into_raw(Box::new(x)) as *mut _).unwrap()
}
unsafe fn read_chan<A: marker::Send + 'static, E, P>(&Chan(_, ref rx, _): &Chan<E, P>) -> A {
*Box::from_raw(rx.recv().unwrap() as *mut A)
}
unsafe fn try_read_chan<A: marker::Send + 'static, E, P>(
&Chan(_, ref rx, _): &Chan<E, P>,
) -> Option<A> {
match rx.try_recv() {
Ok(a) => Some(*Box::from_raw(a as *mut A)),
Err(_) => None,
}
}
/// Peano numbers: Zero
#[allow(missing_copy_implementations)]
pub struct Z;
/// Peano numbers: Increment
pub struct S<N>(PhantomData<N>);
/// End of communication session (epsilon)
#[allow(missing_copy_implementations)]
pub struct Eps;
/// Receive `A`, then `P`
pub struct Recv<A, P>(PhantomData<(A, P)>);
/// Send `A`, then `P`
pub struct Send<A, P>(PhantomData<(A, P)>);
/// Active choice between `P` and `Q`
pub struct Choose<P, Q>(PhantomData<(P, Q)>);
/// Passive choice (offer) between `P` and `Q`
pub struct Offer<P, Q>(PhantomData<(P, Q)>);
/// Enter a recursive environment
pub struct Rec<P>(PhantomData<P>);
/// Recurse. N indicates how many layers of the recursive environment we recurse
/// out of.
pub struct Var<N>(PhantomData<N>);
/// The HasDual trait defines the dual relationship between protocols.
///
/// Any valid protocol has a corresponding dual.
///
/// This trait is sealed and cannot be implemented outside of session-types
pub trait HasDual: private::Sealed {
type Dual;
}
impl HasDual for Eps {
type Dual = Eps;
}
impl<A, P: HasDual> HasDual for Send<A, P> {
type Dual = Recv<A, P::Dual>;
}
impl<A, P: HasDual> HasDual for Recv<A, P> {
type Dual = Send<A, P::Dual>;
}
impl<P: HasDual, Q: HasDual> HasDual for Choose<P, Q> {
type Dual = Offer<P::Dual, Q::Dual>;
}
impl<P: HasDual, Q: HasDual> HasDual for Offer<P, Q> {
type Dual = Choose<P::Dual, Q::Dual>;
}
impl HasDual for Var<Z> {
type Dual = Var<Z>;
}
impl<N> HasDual for Var<S<N>> {
type Dual = Var<S<N>>;
}
impl<P: HasDual> HasDual for Rec<P> {
type Dual = Rec<P::Dual>;
}
pub enum Branch<L, R> {
Left(L),
Right(R),
}
impl<E, P> Drop for Chan<E, P> {
fn drop(&mut self) {
panic!("Session channel prematurely dropped");
}
}
impl<E> Chan<E, Eps> {
/// Close a channel. Should always be used at the end of your program.
pub fn close(self) {
// This method cleans up the channel without running the panicky destructor
// In essence, it calls the drop glue bypassing the `Drop::drop` method
let this = mem::ManuallyDrop::new(self);
let sender = unsafe { ptr::read(&(this).0 as *const _) };
let receiver = unsafe { ptr::read(&(this).1 as *const _) };
drop(sender);
drop(receiver); // drop them
}
}
impl<E, P> Chan<E, P> {
unsafe fn cast<E2, P2>(self) -> Chan<E2, P2> {
let this = mem::ManuallyDrop::new(self);
Chan(
ptr::read(&(this).0 as *const _),
ptr::read(&(this).1 as *const _),
PhantomData,
)
}
}
impl<E, P, A: marker::Send + 'static> Chan<E, Send<A, P>> {
/// Send a value of type `A` over the channel. Returns a channel with
/// protocol `P`
#[must_use]
pub fn send(self, v: A) -> Chan<E, P> {
unsafe {
write_chan(&self, v);
self.cast()
}
}
}
impl<E, P, A: marker::Send + 'static> Chan<E, Recv<A, P>> {
/// Receives a value of type `A` from the channel. Returns a tuple
/// containing the resulting channel and the received value.
#[must_use]
pub fn recv(self) -> (Chan<E, P>, A) {
unsafe {
let v = read_chan(&self);
(self.cast(), v)
}
}
/// Non-blocking receive.
#[must_use]
pub fn try_recv(self) -> Result<(Chan<E, P>, A), Self> {
unsafe {
if let Some(v) = try_read_chan(&self) {
Ok((self.cast(), v))
} else {
Err(self)
}
}
}
}
impl<E, P, Q> Chan<E, Choose<P, Q>> {
/// Perform an active choice, selecting protocol `P`.
#[must_use]
pub fn sel1(self) -> Chan<E, P> {
unsafe {
write_chan(&self, true);
self.cast()
}
}
/// Perform an active choice, selecting protocol `Q`.
#[must_use]
pub fn sel2(self) -> Chan<E, Q> {
unsafe {
write_chan(&self, false);
self.cast()
}
}
}
/// Convenience function. This is identical to `.sel2()`
impl<Z, A, B> Chan<Z, Choose<A, B>> {
#[must_use]
pub fn skip(self) -> Chan<Z, B> {
self.sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2()`
impl<Z, A, B, C> Chan<Z, Choose<A, Choose<B, C>>> {
#[must_use]
pub fn skip2(self) -> Chan<Z, C> {
self.sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2()`
impl<Z, A, B, C, D> Chan<Z, Choose<A, Choose<B, Choose<C, D>>>> {
#[must_use]
pub fn skip3(self) -> Chan<Z, D> {
self.sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, E>>>>> {
#[must_use]
pub fn skip4(self) -> Chan<Z, E> {
self.sel2().sel2().sel2().sel2()
}
}
/// Convenience function. This is identical to `.sel2().sel2().sel2().sel2().sel2()`
impl<Z, A, B, C, D, E, F> Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, F>>>>>> {
#[must_use]
pub fn skip5(self) -> Chan<Z, F> {
self.sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G>
Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, G>>>>>>>
{
#[must_use]
pub fn skip6(self) -> Chan<Z, G> {
self.sel2().sel2().sel2().sel2().sel2().sel2()
}
}
/// Convenience function.
impl<Z, A, B, C, D, E, F, G, H>
Chan<Z, Choose<A, Choose<B, Choose<C, Choose<D, Choose<E, Choose<F, Choose<G, H>>>>>>>>
{
#[must_use]
pub fn skip7(self) -> Chan<Z, H> {
self.sel2().sel2().sel2().sel2().sel2().sel2().sel2()
}
}
impl<E, P, Q> Chan<E, Offer<P, Q>> {
/// Passive choice. This allows the other end of the channel to select one
/// of two options for continuing the protocol: either `P` or `Q`.
#[must_use]
pub fn offer(self) -> Branch<Chan<E, P>, Chan<E, Q>> {
unsafe {
let b = read_chan(&self);
if b {
Left(self.cast())
} else {
Right(self.cast())
}
}
}
/// Poll for choice.
#[must_use]
pub fn try_offer(self) -> Result<Branch<Chan<E, P>, Chan<E, Q>>, Self> {
unsafe {
if let Some(b) = try_read_chan(&self) {
if b {
Ok(Left(self.cast()))
} else {
Ok(Right(self.cast()))
}
} else {
Err(self)
}
}
}
}
impl<E, P> Chan<E, Rec<P>> {
/// Enter a recursive environment, putting the current environment on the
/// top of the environment stack.
#[must_use]
pub fn enter(self) -> Chan<(P, E), P> {
unsafe { self.cast() }
}
}
impl<E, P> Chan<(P, E), Var<Z>> {
/// Recurse to the environment on the top of the environment stack.
#[must_use]
pub fn zero(self) -> Chan<(P, E), P> {
unsafe { self.cast() }
}
}
impl<E, P, N> Chan<(P, E), Var<S<N>>> {
/// Pop the top environment from the environment stack.
#[must_use]
pub fn succ(self) -> Chan<E, Var<N>> {
unsafe { self.cast() }
}
}
/// Homogeneous select. We have a vector of channels, all obeying the same
/// protocol (and in the exact same point of the protocol), wait for one of them
/// to receive. Removes the receiving channel from the vector and returns both
/// the channel and the new vector.
#[must_use]
pub fn hselect<E, P, A>(
mut chans: Vec<Chan<E, Recv<A, P>>>,
) -> (Chan<E, Recv<A, P>>, Vec<Chan<E, Recv<A, P>>>) {
let i = iselect(&chans);
let c = chans.remove(i);
(c, chans)
}
/// An alternative version of homogeneous select, returning the index of the Chan
/// that is ready to receive.
pub fn iselect<E, P, A>(chans: &[Chan<E, Recv<A, P>>]) -> usize {
let mut map = HashMap::new();
let id = {
let mut sel = Select::new();
let mut handles = Vec::with_capacity(chans.len()); // collect all the handles
for (i, chan) in chans.iter().enumerate() {
let &Chan(_, ref rx, _) = chan;
let handle = sel.recv(rx);
map.insert(handle, i);
handles.push(handle);
}
sel.ready()
};
map.remove(&id).unwrap()
}
/// Heterogeneous selection structure for channels
///
/// This builds a structure of channels that we wish to select over. This is
/// structured in a way such that the channels selected over cannot be
/// interacted with (consumed) as long as the borrowing ChanSelect object
/// exists. This is necessary to ensure memory safety.
///
/// The type parameter T is a return type, ie we store a value of some type T
/// that is returned in case its associated channels is selected on `wait()`
pub struct ChanSelect<'c> {
receivers: Vec<&'c Receiver<*mut u8>>,
}
impl<'c> ChanSelect<'c> {
pub fn new() -> ChanSelect<'c> {
ChanSelect {
receivers: Vec::new(),
}
}
/// Add a channel whose next step is `Recv`
///
/// Once a channel has been added it cannot be interacted with as long as it
/// is borrowed here (by virtue of borrow checking and lifetimes).
pub fn add_recv<E, P, A: marker::Send>(&mut self, chan: &'c Chan<E, Recv<A, P>>) {
let &Chan(_, ref rx, _) = chan;
let _ = self.receivers.push(rx);
}
pub fn add_offer<E, P, Q>(&mut self, chan: &'c Chan<E, Offer<P, Q>>) {
let &Chan(_, ref rx, _) = chan;
let _ = self.receivers.push(rx);
}
/// Find a Receiver (and hence a Chan) that is ready to receive.
///
/// This method consumes the ChanSelect, freeing up the borrowed Receivers
/// to be consumed.
pub fn wait(self) -> usize {
let mut sel = Select::new();
for rx in self.receivers.into_iter() {
sel.recv(rx);
}
sel.ready()
}
/// How many channels are there in the structure?
pub fn len(&self) -> usize {
self.receivers.len()
}
pub fn is_empty(&self) -> bool {
self.receivers.is_empty()
}
}
impl<'c> Default for ChanSelect<'c> {
fn default() -> Self {
Self::new()
}
}
/// Returns two session channels
#[must_use]
pub fn session_channel<P: HasDual>() -> (Chan<(), P>, Chan<(), P::Dual>) {
let (tx1, rx1) = unbounded();
let (tx2, rx2) = unbounded();
let c1 = Chan(tx1, rx2, PhantomData);
let c2 = Chan(tx2, rx1, PhantomData);
(c1, c2)
}
/// Connect two functions using a session typed channel.
pub fn connect<F1, F2, P>(srv: F1, cli: F2)
where
F1: Fn(Chan<(), P>) + marker::Send + 'static,
F2: Fn(Chan<(), P::Dual>) + marker::Send,
P: HasDual + marker::Send + 'static,
P::Dual: HasDual + marker::Send + 'static,
{
let (c1, c2) = session_channel();
let t = spawn(move || srv(c1));
cli(c2);
t.join().unwrap();
}
mod private {
use super::*;
pub trait Sealed {}
// Impl for all exported protocol types
impl Sealed for Eps {}
impl<A, P> Sealed for Send<A, P> {}
impl<A, P> Sealed for Recv<A, P> {}
impl<P, Q> Sealed for Choose<P, Q> {}
impl<P, Q> Sealed for Offer<P, Q> {}
impl<Z> Sealed for Var<Z> {}
impl<P> Sealed for Rec<P> {}
}
/// This macro is convenient for server-like protocols of the form:
///
/// `Offer<A, Offer<B, Offer<C, ... >>>`
///
/// # Examples
///
/// Assume we have a protocol `Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>,Eps>>>`
/// we can use the `offer!` macro as follows:
///
/// ```rust
/// extern crate session_types;
/// use session_types::*;
/// use std::thread::spawn;
///
/// fn srv(c: Chan<(), Offer<Recv<u64, Eps>, Offer<Recv<String, Eps>, Eps>>>) {
/// offer! { c,
/// Number => {
/// let (c, n) = c.recv();
/// assert_eq!(42, n);
/// c.close();
/// },
/// String => {
/// c.recv().0.close();
/// },
/// Quit => {
/// c.close();
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Choose<Send<u64, Eps>, Choose<Send<String, Eps>, Eps>>>) {
/// c.sel1().send(42).close();
/// }
///
/// fn main() {
/// let (s, c) = session_channel();
/// spawn(move|| cli(c));
/// srv(s);
/// }
/// ```
///
/// The identifiers on the left-hand side of the arrows have no semantic
/// meaning, they only provide a meaningful name for the reader.
#[macro_export]
macro_rules! offer {
(
$id:ident, $branch:ident => $code:expr, $($t:tt)+
) => (
match $id.offer() {
$crate::Left($id) => $code,
$crate::Right($id) => offer!{ $id, $($t)+ }
}
);
(
$id:ident, $branch:ident => $code:expr
) => (
$code
)
}
/// Returns the channel unchanged on `TryRecvError::Empty`.
#[macro_export]
macro_rules! try_offer {
(
$id:ident, $branch:ident => $code:expr, $($t:tt)+
) => (
match $id.try_offer() {
Ok($crate::Left($id)) => $code,
Ok($crate::Right($id)) => try_offer!{ $id, $($t)+ },
Err($id) => Err($id)
}
);
(
$id:ident, $branch:ident => $code:expr
) => (
$code
)
}
/// This macro plays the same role as the `select!` macro does for `Receiver`s.
///
/// It also supports a second form with `Offer`s (see the example below).
///
/// # Examples
///
/// ```rust
/// extern crate session_types;
/// use session_types::*;
/// use std::thread::spawn;
///
/// fn send_str(c: Chan<(), Send<String, Eps>>) {
/// c.send("Hello, World!".to_string()).close();
/// }
///
/// fn send_usize(c: Chan<(), Send<usize, Eps>>) {
/// c.send(42).close();
/// }
///
/// fn main() {
/// let (tcs, rcs) = session_channel();
/// let (tcu, rcu) = session_channel();
///
/// // Spawn threads
/// spawn(move|| send_str(tcs));
/// spawn(move|| send_usize(tcu));
///
/// chan_select! {
/// (c, s) = rcs.recv() => {
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// rcu.recv().0.close();
/// },
/// (c, i) = rcu.recv() => {
/// assert_eq!(42, i);
/// c.close();
/// rcs.recv().0.close();
/// }
/// }
/// }
/// ```
///
/// ```rust
/// extern crate session_types;
/// extern crate rand;
///
/// use std::thread::spawn;
/// use session_types::*;
///
/// type Igo = Choose<Send<String, Eps>, Send<u64, Eps>>;
/// type Ugo = Offer<Recv<String, Eps>, Recv<u64, Eps>>;
///
/// fn srv(chan_one: Chan<(), Ugo>, chan_two: Chan<(), Ugo>) {
/// let _ign;
/// chan_select! {
/// _ign = chan_one.offer() => {
/// String => {
/// let (c, s) = chan_one.recv();
/// assert_eq!("Hello, World!".to_string(), s);
/// c.close();
/// match chan_two.offer() {
/// Left(c) => c.recv().0.close(),
/// Right(c) => c.recv().0.close(),
/// }
/// },
/// Number => {
/// chan_one.recv().0.close();
/// match chan_two.offer() {
/// Left(c) => c.recv().0.close(),
/// Right(c) => c.recv().0.close(),
/// }
/// }
/// },
/// _ign = chan_two.offer() => {
/// String => {
/// chan_two.recv().0.close();
/// match chan_one.offer() {
/// Left(c) => c.recv().0.close(),
/// Right(c) => c.recv().0.close(),
/// }
/// },
/// Number => {
/// chan_two.recv().0.close();
/// match chan_one.offer() {
/// Left(c) => c.recv().0.close(),
/// Right(c) => c.recv().0.close(),
/// }
/// }
/// }
/// }
/// }
///
/// fn cli(c: Chan<(), Igo>) {
/// c.sel1().send("Hello, World!".to_string()).close();
/// }
///
/// fn main() {
/// let (ca1, ca2) = session_channel();
/// let (cb1, cb2) = session_channel();
///
/// cb2.sel2().send(42).close();
///
/// spawn(move|| cli(ca2));
///
/// srv(ca1, cb1);
/// }
/// ```
#[macro_export]
macro_rules! chan_select {
(
$(($c:ident, $name:pat) = $rx:ident.recv() => $code:expr),+
) => ({
let index = {<|fim▁hole|> $( sel.add_recv(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { let ($c, $name) = $rx.recv(); $code }
else )+
{ unreachable!() }
});
(
$($res:ident = $rx:ident.offer() => { $($t:tt)+ }),+
) => ({
let index = {
let mut sel = $crate::ChanSelect::new();
$( sel.add_offer(&$rx); )+
sel.wait()
};
let mut i = 0;
$( if index == { i += 1; i - 1 } { $res = offer!{ $rx, $($t)+ } } else )+
{ unreachable!() }
})
}<|fim▁end|>
|
let mut sel = $crate::ChanSelect::new();
|
<|file_name|>associated-const-dead-code.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>struct MyFoo;
impl MyFoo {
const BAR: u32 = 1;
//~^ ERROR associated const is never used: `BAR`
}
fn main() {
let _: MyFoo = MyFoo;
}<|fim▁end|>
|
#![feature(associated_consts)]
#![deny(dead_code)]
|
<|file_name|>StreamCollectorsTest.java<|end_file_name|><|fim▁begin|>/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.common.util;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Author: Michal Szynkiewicz, [email protected]
* Date: 9/15/16
* Time: 1:37 PM
*/
public class StreamCollectorsTest {
@Test
public void shouldFlattenTwoLists() {
List<String> listOne = Arrays.asList("one-1", "one-2", "one-3");
List<String> listTwo = Arrays.asList("two-1", "two-2");
List<String> actual = Stream.of(listOne, listTwo).collect(StreamCollectors.toFlatList());
List<String> expected = new ArrayList<>(listOne);
expected.addAll(listTwo);
assertThat(actual).hasSameElementsAs(expected);
}<|fim▁hole|> public void shouldFlattenOneList() {
List<String> listOne = Arrays.asList("one-1", "one-2", "one-3");
List<String> actual = Stream.of(listOne).collect(StreamCollectors.toFlatList());
assertThat(actual).hasSameElementsAs(listOne);
}
@Test
public void shouldFlattenNoList() {
List<String> actual = Stream.<List<String>>of().collect(StreamCollectors.toFlatList());
assertThat(actual).isNotNull().isEmpty();
}
}<|fim▁end|>
|
@Test
|
<|file_name|>either.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A type that represents one of two alternatives
use cmp::Eq;
use kinds::Copy;
use result::Result;
use result;
use vec;
/// The either type
#[deriving(Clone, Eq)]
pub enum Either<T, U> {
Left(T),
Right(U)
}
#[inline(always)]
pub fn either<T, U, V>(f_left: &fn(&T) -> V,
f_right: &fn(&U) -> V, value: &Either<T, U>) -> V {
/*!
* Applies a function based on the given either value
*
* If `value` is left(T) then `f_left` is applied to its contents, if
* `value` is right(U) then `f_right` is applied to its contents, and the
* result is returned.
*/
match *value {
Left(ref l) => f_left(l),
Right(ref r) => f_right(r)
}
}
pub fn lefts<T:Copy,U>(eithers: &[Either<T, U>]) -> ~[T] {
//! Extracts from a vector of either all the left values
do vec::build_sized(eithers.len()) |push| {
for vec::each(eithers) |elt| {
match *elt {
Left(ref l) => { push(*l); }
_ => { /* fallthrough */ }
}
}
}
}
pub fn rights<T, U: Copy>(eithers: &[Either<T, U>]) -> ~[U] {
//! Extracts from a vector of either all the right values
do vec::build_sized(eithers.len()) |push| {
for vec::each(eithers) |elt| {
match *elt {
Right(ref r) => { push(*r); }
_ => { /* fallthrough */ }
}
}
}
}
pub fn partition<T, U>(eithers: ~[Either<T, U>])
-> (~[T], ~[U]) {
/*!
* Extracts from a vector of either all the left values and right values
*
* Returns a structure containing a vector of left values and a vector of
* right values.
*/
let mut lefts: ~[T] = ~[];
let mut rights: ~[U] = ~[];
do vec::consume(eithers) |_i, elt| {
match elt {
Left(l) => lefts.push(l),
Right(r) => rights.push(r)
}
}
return (lefts, rights);
}
#[inline(always)]
pub fn flip<T, U>(eith: Either<T, U>) -> Either<U, T> {
//! Flips between left and right of a given either
match eith {
Right(r) => Left(r),
Left(l) => Right(l)
}
}
#[inline(always)]
pub fn to_result<T, U>(eith: Either<T, U>)
-> Result<U, T> {
/*!
* Converts either::t to a result::t
*
* Converts an `either` type to a `result` type, making the "right" choice
* an ok result, and the "left" choice a fail
*/
match eith {
Right(r) => result::Ok(r),
Left(l) => result::Err(l)
}
}
#[inline(always)]
pub fn is_left<T, U>(eith: &Either<T, U>) -> bool {
//! Checks whether the given value is a left
match *eith { Left(_) => true, _ => false }
}
#[inline(always)]
pub fn is_right<T, U>(eith: &Either<T, U>) -> bool {
//! Checks whether the given value is a right
match *eith { Right(_) => true, _ => false }
}
#[inline(always)]
pub fn unwrap_left<T,U>(eith: Either<T,U>) -> T {
//! Retrieves the value in the left branch. Fails if the either is Right.
match eith {
Left(x) => x,
Right(_) => fail!(~"either::unwrap_left Right")
}
}
#[inline(always)]
pub fn unwrap_right<T,U>(eith: Either<T,U>) -> U {
//! Retrieves the value in the right branch. Fails if the either is Left.
match eith {
Right(x) => x,
Left(_) => fail!(~"either::unwrap_right Left")
}
}
pub impl<T, U> Either<T, U> {
#[inline(always)]
fn either<V>(&self, f_left: &fn(&T) -> V, f_right: &fn(&U) -> V) -> V {
either(f_left, f_right, self)
}
#[inline(always)]
fn flip(self) -> Either<U, T> { flip(self) }
#[inline(always)]
fn to_result(self) -> Result<U, T> { to_result(self) }
#[inline(always)]
fn is_left(&self) -> bool { is_left(self) }
#[inline(always)]
fn is_right(&self) -> bool { is_right(self) }
#[inline(always)]
fn unwrap_left(self) -> T { unwrap_left(self) }
#[inline(always)]
fn unwrap_right(self) -> U { unwrap_right(self) }
}
#[test]
fn test_either_left() {
let val = Left(10);
fn f_left(x: &int) -> bool { *x == 10 }
fn f_right(_x: &uint) -> bool { false }
assert!((either(f_left, f_right, &val)));
}
#[test]
fn test_either_right() {
let val = Right(10u);
fn f_left(_x: &int) -> bool { false }
fn f_right(x: &uint) -> bool { *x == 10u }
assert!((either(f_left, f_right, &val)));
}
#[test]
fn test_lefts() {
let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];<|fim▁hole|>}
#[test]
fn test_lefts_none() {
let input: ~[Either<int, int>] = ~[Right(10), Right(10)];
let result = lefts(input);
assert_eq!(vec::len(result), 0u);
}
#[test]
fn test_lefts_empty() {
let input: ~[Either<int, int>] = ~[];
let result = lefts(input);
assert_eq!(vec::len(result), 0u);
}
#[test]
fn test_rights() {
let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];
let result = rights(input);
assert_eq!(result, ~[11, 13]);
}
#[test]
fn test_rights_none() {
let input: ~[Either<int, int>] = ~[Left(10), Left(10)];
let result = rights(input);
assert_eq!(vec::len(result), 0u);
}
#[test]
fn test_rights_empty() {
let input: ~[Either<int, int>] = ~[];
let result = rights(input);
assert_eq!(vec::len(result), 0u);
}
#[test]
fn test_partition() {
let input = ~[Left(10), Right(11), Left(12), Right(13), Left(14)];
let (lefts, rights) = partition(input);
assert_eq!(lefts[0], 10);
assert_eq!(lefts[1], 12);
assert_eq!(lefts[2], 14);
assert_eq!(rights[0], 11);
assert_eq!(rights[1], 13);
}
#[test]
fn test_partition_no_lefts() {
let input: ~[Either<int, int>] = ~[Right(10), Right(11)];
let (lefts, rights) = partition(input);
assert_eq!(vec::len(lefts), 0u);
assert_eq!(vec::len(rights), 2u);
}
#[test]
fn test_partition_no_rights() {
let input: ~[Either<int, int>] = ~[Left(10), Left(11)];
let (lefts, rights) = partition(input);
assert_eq!(vec::len(lefts), 2u);
assert_eq!(vec::len(rights), 0u);
}
#[test]
fn test_partition_empty() {
let input: ~[Either<int, int>] = ~[];
let (lefts, rights) = partition(input);
assert_eq!(vec::len(lefts), 0u);
assert_eq!(vec::len(rights), 0u);
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// End:
//<|fim▁end|>
|
let result = lefts(input);
assert_eq!(result, ~[10, 12, 14]);
|
<|file_name|>formats-data.ts<|end_file_name|><|fim▁begin|>export const FormatsData: {[k: string]: ModdedSpeciesFormatsData} = {
bulbasaur: {
tier: "LC",
},
ivysaur: {
tier: "NFE",
},
venusaur: {
randomBattleMoves: ["curse", "earthquake", "hiddenpowerrock", "leechseed", "sleeppowder", "sludgebomb", "swordsdance", "synthesis"],
tier: "UUBL",
},
charmander: {
tier: "LC",
},
charmeleon: {
tier: "NFE",
},
charizard: {
randomBattleMoves: ["bellydrum", "dragondance", "earthquake", "fireblast", "hiddenpowerflying", "substitute"],
tier: "UUBL",
},
squirtle: {
tier: "LC",
},
wartortle: {
tier: "NFE",
},
blastoise: {
randomBattleMoves: ["earthquake", "icebeam", "mirrorcoat", "rest", "roar", "sleeptalk", "surf", "toxic"],
tier: "UU",
},
caterpie: {
tier: "LC",
},
metapod: {
tier: "NFE",
},
butterfree: {
randomBattleMoves: ["gigadrain", "hiddenpowerfire", "morningsun", "psychic", "sleeppowder", "stunspore", "toxic"],
tier: "NU",
},
weedle: {
tier: "LC",
},
kakuna: {
tier: "NFE",
},
beedrill: {
randomBattleMoves: ["brickbreak", "doubleedge", "endure", "hiddenpowerbug", "sludgebomb", "swordsdance"],
tier: "NU",
},
pidgey: {
tier: "LC",
},
pidgeotto: {
tier: "NFE",
},
pidgeot: {
randomBattleMoves: ["aerialace", "hiddenpowerground", "quickattack", "return", "substitute", "toxic"],
tier: "NU",
},
rattata: {
tier: "LC",
},
raticate: {
randomBattleMoves: ["endeavor", "hiddenpowerground", "quickattack", "return", "reversal", "shadowball", "substitute"],
tier: "NU",
},
spearow: {
tier: "LC",
},
fearow: {
randomBattleMoves: ["agility", "batonpass", "drillpeck", "hiddenpowerground", "quickattack", "return", "substitute"],
tier: "UU",
},
ekans: {
tier: "LC",
},
arbok: {
randomBattleMoves: ["doubleedge", "earthquake", "hiddenpowerfire", "rest", "rockslide", "sleeptalk", "sludgebomb"],
tier: "NU",
},
pichu: {
tier: "LC",
},
pikachu: {
randomBattleMoves: ["hiddenpowerice", "substitute", "surf", "thunderbolt"],
tier: "NU",
},
raichu: {
randomBattleMoves: ["encore", "focuspunch", "hiddenpowergrass", "hiddenpowerice", "substitute", "surf", "thunderbolt", "thunderwave"],
tier: "UU",
},
sandshrew: {
tier: "LC",
},
sandslash: {
randomBattleMoves: ["earthquake", "hiddenpowerbug", "rapidspin", "rockslide", "swordsdance", "toxic"],
tier: "UU",
},
nidoranf: {
tier: "LC",
},
nidorina: {
tier: "NFE",
},
nidoqueen: {
randomBattleMoves: ["earthquake", "fireblast", "icebeam", "shadowball", "sludgebomb", "superpower"],
tier: "UU",
},
nidoranm: {
tier: "LC",
},
nidorino: {
tier: "NFE",
},
nidoking: {
randomBattleMoves: ["earthquake", "fireblast", "icebeam", "megahorn", "sludgebomb", "substitute", "thunderbolt"],
tier: "UU",
},
cleffa: {
tier: "LC",
},
clefairy: {
tier: "NFE",
},
clefable: {
randomBattleMoves: ["calmmind", "counter", "icebeam", "return", "shadowball", "softboiled", "thunderbolt", "thunderwave"],
tier: "UU",
},
vulpix: {
tier: "LC",
},
ninetales: {
randomBattleMoves: ["fireblast", "flamethrower", "hiddenpowergrass", "hypnosis", "substitute", "toxic", "willowisp"],
tier: "UU",
},
igglybuff: {
tier: "LC",
},
jigglypuff: {
tier: "NFE",
},
wigglytuff: {
randomBattleMoves: ["fireblast", "icebeam", "protect", "return", "thunderbolt", "toxic", "wish"],
tier: "NU",
},
zubat: {
tier: "LC",
},
golbat: {
tier: "NU",
},
crobat: {
randomBattleMoves: ["aerialace", "haze", "hiddenpowerground", "shadowball", "sludgebomb", "taunt", "toxic"],
tier: "UUBL",
},
oddish: {
tier: "LC",
},
gloom: {
tier: "NFE",
},
vileplume: {
randomBattleMoves: ["aromatherapy", "hiddenpowerfire", "sleeppowder", "sludgebomb", "solarbeam", "sunnyday", "synthesis"],
tier: "UU",
},
bellossom: {
randomBattleMoves: ["hiddenpowergrass", "leechseed", "moonlight", "sleeppowder", "sludgebomb", "stunspore"],
tier: "NU",
},
paras: {
tier: "LC",
},
parasect: {
randomBattleMoves: ["aromatherapy", "gigadrain", "hiddenpowerbug", "return", "spore", "stunspore", "swordsdance"],
tier: "NU",
},
venonat: {
tier: "LC",
},
venomoth: {
randomBattleMoves: ["batonpass", "hiddenpowerground", "signalbeam", "sleeppowder", "sludgebomb", "substitute"],
tier: "NU",
},
diglett: {
tier: "NU",
},
dugtrio: {
randomBattleMoves: ["aerialace", "earthquake", "hiddenpowerbug", "rockslide", "substitute"],
tier: "OU",
},
meowth: {
tier: "LC",
},
persian: {
randomBattleMoves: ["fakeout", "hiddenpowerground", "hypnosis", "irontail", "return", "shadowball", "substitute"],
tier: "UU",
},
psyduck: {
tier: "LC",
},
golduck: {
randomBattleMoves: ["calmmind", "hiddenpowergrass", "hydropump", "hypnosis", "icebeam", "substitute", "surf"],
tier: "UU",
},
mankey: {
tier: "LC",
},
primeape: {
randomBattleMoves: ["bulkup", "crosschop", "earthquake", "hiddenpowerghost", "rockslide", "substitute"],
tier: "UU",
},
growlithe: {
tier: "LC",
},
arcanine: {
randomBattleMoves: ["fireblast", "flamethrower", "extremespeed", "hiddenpowergrass", "rest", "sleeptalk", "toxic"],
tier: "UUBL",
},
poliwag: {
tier: "LC",
},
poliwhirl: {
tier: "NFE",
},
poliwrath: {
randomBattleMoves: ["brickbreak", "bulkup", "hiddenpowerghost", "hydropump", "hypnosis", "icebeam", "substitute"],
tier: "UU",
},
politoed: {
randomBattleMoves: ["counter", "hiddenpowergrass", "hypnosis", "icebeam", "surf", "toxic"],
tier: "UU",
},
abra: {
tier: "NU",
},
kadabra: {
tier: "UUBL",
},
alakazam: {
randomBattleMoves: ["calmmind", "encore", "firepunch", "icepunch", "psychic", "recover", "substitute"],
tier: "UUBL",
},
machop: {
tier: "LC",
},
machoke: {
tier: "NFE",
},
machamp: {
randomBattleMoves: ["bulkup", "crosschop", "earthquake", "hiddenpowerghost", "rest", "rockslide", "sleeptalk"],
tier: "UUBL",
},
bellsprout: {
tier: "LC",
},
weepinbell: {
tier: "NFE",
},
victreebel: {
randomBattleMoves: ["hiddenpowerfire", "sleeppowder", "sludgebomb", "solarbeam", "sunnyday"],
tier: "UU",
},
tentacool: {
tier: "LC",
},
tentacruel: {
randomBattleMoves: ["gigadrain", "haze", "hydropump", "icebeam", "rapidspin", "surf", "toxic"],
tier: "UU",
},
geodude: {
tier: "LC",
},
graveler: {
tier: "NU",
},
golem: {
randomBattleMoves: ["doubleedge", "earthquake", "explosion", "hiddenpowerbug", "rockslide", "toxic"],
tier: "UU",
},
ponyta: {
tier: "LC",
},
rapidash: {
randomBattleMoves: ["fireblast", "hiddenpowergrass", "hiddenpowerrock", "substitute", "toxic"],
tier: "UU",
},
slowpoke: {
tier: "LC",
},
slowbro: {
randomBattleMoves: ["calmmind", "fireblast", "icebeam", "psychic", "rest", "sleeptalk", "surf", "thunderwave"],
tier: "UUBL",
},
slowking: {
randomBattleMoves: ["calmmind", "flamethrower", "icebeam", "psychic", "surf", "rest", "sleeptalk", "thunderwave"],
tier: "UU",
},
magnemite: {
tier: "LC",
},
magneton: {
randomBattleMoves: ["hiddenpowergrass", "hiddenpowerice", "rest", "sleeptalk", "substitute", "thunderbolt", "thunderwave", "toxic"],
tier: "OU",
},
farfetchd: {
randomBattleMoves: ["agility", "batonpass", "hiddenpowerflying", "slash", "swordsdance"],
tier: "NU",
},
doduo: {
tier: "LC",
},
dodrio: {
randomBattleMoves: ["drillpeck", "flail", "hiddenpowerground", "quickattack", "return", "substitute"],
tier: "UUBL",
},
seel: {
tier: "LC",
},
dewgong: {
randomBattleMoves: ["encore", "hiddenpowergrass", "icebeam", "rest", "sleeptalk", "surf", "toxic"],
tier: "NU",
},
grimer: {
tier: "LC",
},
muk: {
randomBattleMoves: ["curse", "brickbreak", "explosion", "fireblast", "hiddenpowerghost", "rest", "sludgebomb"],
tier: "UU",
},
shellder: {
tier: "LC",
},
cloyster: {
randomBattleMoves: ["explosion", "icebeam", "surf", "rapidspin", "spikes", "toxic"],
tier: "OU",
},
gastly: {
tier: "LC",
},
haunter: {
tier: "NU",
},
gengar: {
randomBattleMoves: ["destinybond", "explosion", "firepunch", "hypnosis", "icepunch", "substitute", "thunderbolt", "willowisp"],
tier: "OU",
},
onix: {
tier: "LC",
},
steelix: {
randomBattleMoves: ["doubleedge", "earthquake", "explosion", "hiddenpowerrock", "irontail", "rest", "roar", "toxic"],
tier: "UUBL",
},
drowzee: {
tier: "LC",
},
hypno: {
randomBattleMoves: ["batonpass", "calmmind", "firepunch", "hypnosis", "protect", "psychic", "toxic", "wish"],
tier: "UU",
},
krabby: {
tier: "LC",
},
kingler: {
randomBattleMoves: ["doubleedge", "hiddenpowerghost", "hiddenpowerground", "surf", "swordsdance"],
tier: "NU",
},
voltorb: {
tier: "LC",
},
electrode: {
randomBattleMoves: ["explosion", "hiddenpowergrass", "hiddenpowerice", "substitute", "thunderbolt", "thunderwave", "toxic"],
tier: "UU",
},
exeggcute: {
tier: "LC",
},
exeggutor: {
randomBattleMoves: ["explosion", "gigadrain", "hiddenpowerfire", "hiddenpowerice", "leechseed", "psychic", "sleeppowder", "solarbeam", "sunnyday"],
tier: "UUBL",
},
cubone: {
tier: "LC",
},
marowak: {
randomBattleMoves: ["bonemerang", "doubleedge", "earthquake", "rockslide", "swordsdance"],
tier: "UUBL",
},
tyrogue: {
tier: "LC",
},
hitmonlee: {
randomBattleMoves: ["bulkup", "earthquake", "hiddenpowerghost", "highjumpkick", "machpunch", "rockslide", "substitute"],
tier: "UU",
},
hitmonchan: {
randomBattleMoves: ["bulkup", "earthquake", "hiddenpowerghost", "machpunch", "rapidspin", "skyuppercut", "toxic"],
tier: "NU",
},
hitmontop: {
randomBattleMoves: ["bulkup", "earthquake", "hiddenpowerghost", "highjumpkick", "machpunch", "rockslide", "toxic"],
tier: "UU",
},
lickitung: {
randomBattleMoves: ["counter", "healbell", "protect", "return", "seismictoss", "toxic", "wish"],
tier: "NU",
},
koffing: {
tier: "LC",
},
weezing: {
randomBattleMoves: ["explosion", "fireblast", "flamethrower", "haze", "painsplit", "sludgebomb", "toxic", "willowisp"],
tier: "UUBL",
},
rhyhorn: {
tier: "LC",
},
rhydon: {
randomBattleMoves: ["doubleedge", "earthquake", "megahorn", "rockslide", "substitute", "swordsdance"],
tier: "UUBL",
},
chansey: {
tier: "UUBL",
},
blissey: {
randomBattleMoves: ["aromatherapy", "calmmind", "icebeam", "seismictoss", "softboiled", "thunderbolt", "thunderwave", "toxic"],
tier: "OU",
},
tangela: {
randomBattleMoves: ["hiddenpowergrass", "leechseed", "morningsun", "sleeppowder", "stunspore"],
tier: "NU",
},
kangaskhan: {
randomBattleMoves: ["earthquake", "fakeout", "focuspunch", "rest", "return", "shadowball", "substitute", "toxic"],
tier: "UU",
},
horsea: {
tier: "LC",
},
seadra: {
tier: "NU",
},
kingdra: {
randomBattleMoves: ["hiddenpowergrass", "hydropump", "icebeam", "raindance", "substitute", "surf"],
tier: "UUBL",
},
goldeen: {
tier: "LC",
},
seaking: {
randomBattleMoves: ["hiddenpowergrass", "hydropump", "icebeam", "megahorn", "raindance"],
tier: "NU",
},
staryu: {
tier: "LC",
},
starmie: {
randomBattleMoves: ["hydropump", "icebeam", "psychic", "recover", "surf", "thunderbolt"],
tier: "OU",
},
mrmime: {
randomBattleMoves: ["barrier", "batonpass", "calmmind", "encore", "firepunch", "hypnosis", "psychic", "substitute", "thunderbolt"],
tier: "UU",
},
scyther: {
randomBattleMoves: ["aerialace", "batonpass", "hiddenpowerground", "hiddenpowerrock", "quickattack", "silverwind", "swordsdance"],
tier: "UU",
},
scizor: {
randomBattleMoves: ["agility", "batonpass", "hiddenpowerground", "hiddenpowerrock", "morningsun", "silverwind", "steelwing", "swordsdance"],
tier: "UUBL",
},
smoochum: {
tier: "LC",
},
jynx: {
randomBattleMoves: ["calmmind", "hiddenpowerfire", "icebeam", "lovelykiss", "psychic", "substitute"],
tier: "UUBL",
},
elekid: {
tier: "LC",
},
electabuzz: {
randomBattleMoves: ["crosschop", "firepunch", "focuspunch", "hiddenpowergrass", "icepunch", "substitute", "thunderbolt"],
tier: "UU",
},
magby: {
tier: "NU",
},
magmar: {
randomBattleMoves: ["crosschop", "fireblast", "flamethrower", "hiddenpowergrass", "psychic", "substitute", "thunderpunch"],
tier: "UU",
},
pinsir: {
randomBattleMoves: ["earthquake", "hiddenpowerbug", "return", "rockslide", "swordsdance"],
tier: "UU",
},
tauros: {
randomBattleMoves: ["doubleedge", "earthquake", "hiddenpowerghost", "hiddenpowerrock", "return"],
tier: "UUBL",
},
magikarp: {
tier: "LC",
},
gyarados: {
randomBattleMoves: ["doubleedge", "dragondance", "earthquake", "hiddenpowerflying", "hydropump", "taunt"],
tier: "OU",
},
lapras: {
randomBattleMoves: ["healbell", "icebeam", "rest", "sleeptalk", "surf", "thunderbolt", "toxic"],
tier: "UUBL",
},
ditto: {
randomBattleMoves: ["transform"],
tier: "NU",
},
eevee: {
tier: "LC",
},
vaporeon: {
randomBattleMoves: ["icebeam", "protect", "surf", "toxic", "wish"],
tier: "UUBL",
},
jolteon: {
randomBattleMoves: ["batonpass", "hiddenpowerice", "substitute", "thunderbolt", "toxic", "wish"],
tier: "OU",
},
flareon: {
randomBattleMoves: ["doubleedge", "fireblast", "hiddenpowergrass", "protect", "shadowball", "toxic", "wish"],
tier: "NU",
},
espeon: {
randomBattleMoves: ["batonpass", "calmmind", "hiddenpowerfire", "morningsun", "psychic", "reflect"],
tier: "UUBL",
},
umbreon: {
randomBattleMoves: ["batonpass", "hiddenpowerdark", "meanlook", "protect", "toxic", "wish"],
tier: "UUBL",
},
porygon: {
tier: "LC",
},
porygon2: {
randomBattleMoves: ["icebeam", "recover", "return", "thunderbolt", "thunderwave", "toxic"],
tier: "OU",
},
omanyte: {
tier: "LC",
},
omastar: {
randomBattleMoves: ["hiddenpowergrass", "hydropump", "icebeam", "raindance", "spikes", "surf"],
tier: "UU",
},
kabuto: {
tier: "LC",
},
kabutops: {
randomBattleMoves: ["brickbreak", "doubleedge", "hiddenpowerground", "rockslide", "surf", "swordsdance"],
tier: "UU",
},
aerodactyl: {
randomBattleMoves: ["doubleedge", "earthquake", "hiddenpowerflying", "rockslide", "substitute"],
tier: "OU",
},
snorlax: {
randomBattleMoves: ["bodyslam", "curse", "earthquake", "return", "rest", "selfdestruct", "shadowball", "sleeptalk"],
tier: "OU",
},
articuno: {
randomBattleMoves: ["healbell", "hiddenpowerfire", "icebeam", "protect", "rest", "roar", "sleeptalk", "toxic"],
tier: "UUBL",
},
zapdos: {
randomBattleMoves: ["agility", "batonpass", "hiddenpowerice", "substitute", "thunderbolt", "thunderwave", "toxic"],
tier: "OU",
},
moltres: {
randomBattleMoves: ["fireblast", "flamethrower", "hiddenpowergrass", "morningsun", "substitute", "toxic", "willowisp"],
tier: "OU",
},
dratini: {
tier: "LC",
},
dragonair: {
tier: "NFE",
},
dragonite: {
randomBattleMoves: ["doubleedge", "dragondance", "earthquake", "flamethrower", "healbell", "hiddenpowerflying", "icebeam", "substitute"],
tier: "UUBL",
},
mewtwo: {
randomBattleMoves: ["calmmind", "flamethrower", "icebeam", "psychic", "recover", "selfdestruct", "substitute", "thunderbolt"],
tier: "Uber",
},
mew: {
randomBattleMoves: ["calmmind", "explosion", "flamethrower", "icebeam", "psychic", "softboiled", "thunderbolt", "thunderwave", "transform"],
tier: "Uber",
},
chikorita: {
tier: "LC",
},
bayleef: {
tier: "NFE",
},
meganium: {
randomBattleMoves: ["bodyslam", "hiddenpowergrass", "leechseed", "synthesis", "toxic"],
tier: "UU",
},
cyndaquil: {
tier: "LC",
},
quilava: {
tier: "NFE",
},
typhlosion: {
randomBattleMoves: ["fireblast", "flamethrower", "focuspunch", "hiddenpowergrass", "hiddenpowerice", "substitute", "thunderpunch"],
tier: "UUBL",
},
totodile: {
tier: "LC",
},
croconaw: {
tier: "NFE",
},
feraligatr: {
randomBattleMoves: ["earthquake", "hiddenpowerflying", "hydropump", "rockslide", "swordsdance"],
tier: "UU",
},
sentret: {
tier: "LC",
},
furret: {
randomBattleMoves: ["doubleedge", "quickattack", "return", "reversal", "shadowball", "substitute", "trick"],
tier: "NU",
},
hoothoot: {
tier: "LC",
},
noctowl: {
randomBattleMoves: ["hypnosis", "psychic", "reflect", "toxic", "whirlwind"],
tier: "NU",
},
ledyba: {
tier: "LC",
},
ledian: {
randomBattleMoves: ["agility", "batonpass", "lightscreen", "reflect", "silverwind", "swordsdance", "toxic"],
tier: "NU",
},
spinarak: {
tier: "LC",
},
ariados: {
randomBattleMoves: ["agility", "batonpass", "signalbeam", "sludgebomb", "spiderweb", "toxic"],
tier: "NU",
},
chinchou: {
tier: "LC",
},
lanturn: {
randomBattleMoves: ["confuseray", "icebeam", "rest", "sleeptalk", "surf", "thunderbolt", "thunderwave", "toxic"],
tier: "UU",
},
togepi: {
tier: "LC",
},
togetic: {
randomBattleMoves: ["charm", "encore", "flamethrower", "seismictoss", "softboiled", "thunderwave", "toxic"],
tier: "NU",
},
natu: {
tier: "LC",
},
xatu: {
randomBattleMoves: ["batonpass", "calmmind", "hiddenpowerfire", "psychic", "reflect", "wish"],
tier: "UU",
},
mareep: {
tier: "LC",
},
flaaffy: {
tier: "NFE",
},
ampharos: {
randomBattleMoves: ["firepunch", "healbell", "hiddenpowergrass", "hiddenpowerice", "thunderbolt", "toxic"],
tier: "UU",
},
azurill: {
tier: "LC",
},
marill: {
tier: "NFE",
},
azumarill: {
randomBattleMoves: ["encore", "focuspunch", "hiddenpowerghost", "hydropump", "return", "substitute"],
tier: "UU",
},
sudowoodo: {
randomBattleMoves: ["brickbreak", "doubleedge", "earthquake", "explosion", "rockslide", "toxic"],
tier: "NU",
},
hoppip: {
tier: "LC",
},
skiploom: {
tier: "NFE",
},
jumpluff: {
randomBattleMoves: ["encore", "hiddenpowerflying", "leechseed", "sleeppowder", "substitute", "toxic"],
tier: "UUBL",
},
aipom: {
randomBattleMoves: ["batonpass", "doubleedge", "focuspunch", "shadowball", "substitute", "thunderwave"],
tier: "NU",
},
sunkern: {
tier: "LC",
},
sunflora: {
randomBattleMoves: ["hiddenpowerfire", "leechseed", "razorleaf", "solarbeam", "sunnyday", "synthesis", "toxic"],
tier: "NU",
},
yanma: {
randomBattleMoves: ["hiddenpowerflying", "hypnosis", "reversal", "shadowball", "substitute"],
tier: "NU",
},
wooper: {
tier: "LC",
},
quagsire: {
randomBattleMoves: ["counter", "curse", "earthquake", "hiddenpowerrock", "icebeam", "rest", "surf", "toxic"],
tier: "UU",
},
murkrow: {
randomBattleMoves: ["doubleedge", "drillpeck", "hiddenpowerfighting", "hiddenpowerground", "meanlook", "perishsong", "protect", "shadowball", "substitute"],
tier: "NU",
},
misdreavus: {
randomBattleMoves: ["calmmind", "hiddenpowerice", "meanlook", "perishsong", "protect", "substitute", "thunderbolt", "toxic"],
tier: "UU",
},
unown: {
randomBattleMoves: ["hiddenpowerpsychic"],
tier: "NU",
},
wynaut: {
tier: "Uber",
},
wobbuffet: {
randomBattleMoves: ["counter", "destinybond", "encore", "mirrorcoat"],
tier: "Uber",
},
girafarig: {
randomBattleMoves: ["agility", "batonpass", "calmmind", "psychic", "substitute", "thunderbolt", "thunderwave", "wish"],
tier: "UU",
},
pineco: {
tier: "LC",
},
forretress: {
randomBattleMoves: ["earthquake", "explosion", "hiddenpowerbug", "rapidspin", "spikes", "toxic"],
tier: "OU",
},
dunsparce: {
randomBattleMoves: ["bodyslam", "curse", "headbutt", "rockslide", "rest", "shadowball", "thunderwave"],
tier: "NU",
},
gligar: {
randomBattleMoves: ["earthquake", "hiddenpowerflying", "irontail", "quickattack", "rockslide", "substitute", "swordsdance"],
tier: "UU",
},
snubbull: {
tier: "LC",
},
granbull: {
randomBattleMoves: ["bulkup", "earthquake", "healbell", "overheat", "rest", "return", "shadowball", "thunderwave"],
tier: "UU",
},
qwilfish: {
randomBattleMoves: ["destinybond", "hydropump", "selfdestruct", "shadowball", "sludgebomb", "spikes", "swordsdance"],
tier: "UU",
},
shuckle: {
randomBattleMoves: ["encore", "protect", "rest", "toxic", "wrap"],
tier: "NU",
},
heracross: {
randomBattleMoves: ["brickbreak", "focuspunch", "megahorn", "rest", "rockslide", "sleeptalk", "substitute", "swordsdance"],
tier: "OU",
},
sneasel: {
randomBattleMoves: ["brickbreak", "doubleedge", "hiddenpowerflying", "shadowball", "substitute", "swordsdance"],
tier: "UU",
},
teddiursa: {
tier: "LC",
},
ursaring: {
randomBattleMoves: ["earthquake", "focuspunch", "hiddenpowerghost", "return", "swordsdance"],
tier: "UUBL",
},
slugma: {
tier: "LC",
},
magcargo: {
randomBattleMoves: ["fireblast", "hiddenpowergrass", "rest", "sleeptalk", "toxic", "yawn"],
tier: "NU",
},
swinub: {
tier: "LC",
},
piloswine: {
randomBattleMoves: ["doubleedge", "earthquake", "icebeam", "protect", "rockslide", "toxic"],
tier: "NU",
},
corsola: {
randomBattleMoves: ["calmmind", "confuseray", "icebeam", "recover", "surf", "toxic"],
tier: "NU",
},
remoraid: {
tier: "LC",
},
octillery: {
randomBattleMoves: ["fireblast", "hiddenpowergrass", "icebeam", "rockblast", "surf", "thunderwave"],
tier: "NU",
},
delibird: {
randomBattleMoves: ["aerialace", "focuspunch", "hiddenpowerground", "icebeam", "quickattack"],
tier: "NU",
},
mantine: {
randomBattleMoves: ["haze", "hiddenpowergrass", "icebeam", "surf", "raindance", "rest", "sleeptalk", "toxic"],
tier: "UU",
},
skarmory: {
randomBattleMoves: ["drillpeck", "protect", "rest", "roar", "sleeptalk", "spikes", "toxic"],
tier: "OU",
},
houndour: {
tier: "LC",
},
houndoom: {
randomBattleMoves: ["crunch", "fireblast", "flamethrower", "hiddenpowergrass", "pursuit", "willowisp"],
tier: "UUBL",
},
phanpy: {
tier: "LC",
},
donphan: {
randomBattleMoves: ["earthquake", "hiddenpowerbug", "rapidspin", "rest", "roar", "rockslide", "sleeptalk", "toxic"],
tier: "UUBL",
},
stantler: {
randomBattleMoves: ["earthquake", "hypnosis", "return", "shadowball", "thunderbolt"],
tier: "UU",
},
smeargle: {
randomBattleMoves: ["encore", "explosion", "spikes", "spore"],
tier: "UUBL",
},
miltank: {
randomBattleMoves: ["bodyslam", "curse", "earthquake", "healbell", "milkdrink", "toxic"],
tier: "UUBL",
},
raikou: {
randomBattleMoves: ["calmmind", "crunch", "hiddenpowergrass", "hiddenpowerice", "rest", "sleeptalk", "substitute", "thunderbolt"],
tier: "UUBL",
},
entei: {
randomBattleMoves: ["bodyslam", "calmmind", "fireblast", "flamethrower", "hiddenpowergrass", "hiddenpowerice", "solarbeam", "substitute", "sunnyday"],
tier: "UUBL",
},
suicune: {
randomBattleMoves: ["calmmind", "icebeam", "rest", "sleeptalk", "substitute", "surf", "toxic"],
tier: "OU",
},
larvitar: {
tier: "LC",
},
pupitar: {
tier: "NU",
},
tyranitar: {
randomBattleMoves: ["dragondance", "earthquake", "fireblast", "focuspunch", "hiddenpowerbug", "icebeam", "pursuit", "rockslide", "substitute"],
tier: "OU",
},
lugia: {
randomBattleMoves: ["aeroblast", "calmmind", "earthquake", "icebeam", "recover", "substitute", "thunderbolt", "toxic"],
tier: "Uber",
},
hooh: {
randomBattleMoves: ["calmmind", "earthquake", "sacredfire", "thunderbolt", "recover", "substitute", "toxic"],
tier: "Uber",
},
celebi: {
randomBattleMoves: ["batonpass", "calmmind", "healbell", "hiddenpowergrass", "leechseed", "psychic", "recover"],
tier: "OU",
},
treecko: {
tier: "LC",
},
grovyle: {
tier: "NFE",
},
sceptile: {
randomBattleMoves: ["focuspunch", "hiddenpowerice", "leafblade", "leechseed", "substitute", "thunderpunch"],
tier: "UUBL",
},
torchic: {
tier: "LC",
},
combusken: {
tier: "NFE",
},
blaziken: {
randomBattleMoves: ["endure", "fireblast", "hiddenpowerice", "reversal", "rockslide", "skyuppercut", "swordsdance", "thunderpunch"],
tier: "UUBL",
},
mudkip: {
tier: "LC",
},
marshtomp: {
tier: "NFE",
},
swampert: {
randomBattleMoves: ["earthquake", "hydropump", "icebeam", "protect", "rest", "rockslide", "sleeptalk", "surf", "toxic"],
tier: "OU",
},
poochyena: {
tier: "LC",
},
mightyena: {
randomBattleMoves: ["crunch", "doubleedge", "healbell", "hiddenpowerfighting", "protect", "shadowball", "toxic"],
tier: "NU",
},
zigzagoon: {
tier: "LC",
},
linoone: {
randomBattleMoves: ["bellydrum", "extremespeed", "flail", "hiddenpowerground", "shadowball", "substitute"],
tier: "UU",
},
wurmple: {
tier: "LC",
},
silcoon: {
tier: "NFE",
},
beautifly: {
randomBattleMoves: ["hiddenpowerbug", "hiddenpowerflying", "morningsun", "stunspore", "substitute", "toxic"],
tier: "NU",
},
cascoon: {
tier: "NFE",
},
dustox: {
randomBattleMoves: ["hiddenpowerground", "lightscreen", "moonlight", "sludgebomb", "toxic", "whirlwind"],
tier: "NU",
},
lotad: {
tier: "LC",
},
lombre: {
tier: "NFE",
},
ludicolo: {
randomBattleMoves: ["hiddenpowergrass", "icebeam", "leechseed", "raindance", "substitute", "surf"],
tier: "UUBL",
},
seedot: {
tier: "LC",
},
nuzleaf: {
tier: "NFE",
},
shiftry: {
randomBattleMoves: ["brickbreak", "explosion", "hiddenpowerfire", "shadowball", "solarbeam", "sunnyday", "swordsdance"],
tier: "UU",
},
taillow: {
tier: "LC",
},
swellow: {
randomBattleMoves: ["aerialace", "doubleedge", "hiddenpowerfighting", "hiddenpowerground", "quickattack", "return"],
tier: "UUBL",
},
wingull: {
tier: "LC",
},
pelipper: {
randomBattleMoves: ["icebeam", "protect", "rest", "sleeptalk", "surf", "toxic"],
tier: "NU",
},
ralts: {
tier: "LC",
},
kirlia: {
tier: "NFE",
},
gardevoir: {
randomBattleMoves: ["calmmind", "firepunch", "hypnosis", "psychic", "substitute", "thunderbolt", "willowisp"],
tier: "UUBL",
},
surskit: {
tier: "LC",
},
masquerain: {
randomBattleMoves: ["hydropump", "icebeam", "stunspore", "substitute", "toxic"],
tier: "NU",
},
shroomish: {
tier: "LC",
},
breloom: {
randomBattleMoves: ["focuspunch", "hiddenpowerghost", "hiddenpowerrock", "leechseed", "machpunch", "skyuppercut", "spore", "substitute", "swordsdance"],
tier: "OU",
},
slakoth: {
tier: "LC",
},
vigoroth: {
randomBattleMoves: ["brickbreak", "bulkup", "earthquake", "return", "shadowball", "slackoff"],
tier: "NU",
},
slaking: {
randomBattleMoves: ["doubleedge", "earthquake", "focuspunch", "return", "shadowball"],
tier: "UUBL",
},
nincada: {
tier: "LC",
},
ninjask: {
randomBattleMoves: ["aerialace", "batonpass", "hiddenpowerrock", "protect", "silverwind", "substitute", "swordsdance"],
tier: "UUBL",
},
shedinja: {
randomBattleMoves: ["agility", "batonpass", "hiddenpowerground", "shadowball", "silverwind", "toxic"],
tier: "NU",
},
whismur: {
tier: "LC",
},
loudred: {
tier: "NFE",
},
exploud: {
randomBattleMoves: ["earthquake", "flamethrower", "icebeam", "overheat", "return", "shadowball", "substitute"],
tier: "UU",
},
makuhita: {
tier: "LC",
},
hariyama: {
randomBattleMoves: ["bulkup", "crosschop", "fakeout", "focuspunch", "hiddenpowerghost", "rockslide", "substitute", "rest", "sleeptalk"],
tier: "UUBL",
},
nosepass: {
randomBattleMoves: ["earthquake", "explosion", "rockslide", "thunderbolt", "thunderwave", "toxic"],
tier: "NU",
},
skitty: {
tier: "LC",
},
delcatty: {
randomBattleMoves: ["batonpass", "doubleedge", "healbell", "thunderwave", "wish"],
tier: "NU",
},
sableye: {
randomBattleMoves: ["knockoff", "recover", "seismictoss", "shadowball", "toxic"],
tier: "NU",
},
mawile: {
randomBattleMoves: ["batonpass", "brickbreak", "focuspunch", "hiddenpowersteel", "rockslide", "substitute", "swordsdance", "toxic"],
tier: "NU",
},
aron: {
tier: "LC",
},
lairon: {
tier: "NFE",
},
aggron: {
randomBattleMoves: ["counter", "doubleedge", "earthquake", "focuspunch", "irontail", "rockslide", "substitute", "thunderwave", "toxic"],
tier: "UU",
},
meditite: {
tier: "LC",
},
medicham: {
randomBattleMoves: ["brickbreak", "bulkup", "fakeout", "recover", "rockslide", "shadowball", "substitute"],
tier: "UUBL",
},
electrike: {
tier: "LC",
},
manectric: {
randomBattleMoves: ["crunch", "hiddenpowergrass", "hiddenpowerice", "substitute", "thunderbolt", "thunderwave"],
tier: "UU",
},
plusle: {
randomBattleMoves: ["agility", "batonpass", "encore", "hiddenpowergrass", "substitute", "thunderbolt", "toxic"],
tier: "NU",
},
minun: {
randomBattleMoves: ["batonpass", "encore", "hiddenpowerice", "lightscreen", "substitute", "thunderbolt", "wish"],
tier: "NU",
},
volbeat: {
randomBattleMoves: ["batonpass", "icepunch", "tailglow", "thunderbolt"],
tier: "NU",
},
illumise: {
randomBattleMoves: ["batonpass", "encore", "icepunch", "substitute", "thunderwave", "wish"],
tier: "NU",
},
roselia: {
randomBattleMoves: ["aromatherapy", "gigadrain", "hiddenpowerfire", "spikes", "stunspore", "synthesis"],
tier: "NU",
},
gulpin: {
tier: "LC",
},
swalot: {
randomBattleMoves: ["encore", "explosion", "hiddenpowerground", "icebeam", "sludgebomb", "toxic", "yawn"],
tier: "NU",
},
carvanha: {
tier: "LC",
},
sharpedo: {
randomBattleMoves: ["crunch", "earthquake", "endure", "hiddenpowerflying", "hydropump", "icebeam", "return"],
tier: "UU",
},
wailmer: {
tier: "LC",
},
wailord: {
randomBattleMoves: ["hiddenpowergrass", "icebeam", "rest", "selfdestruct", "sleeptalk", "surf", "toxic"],
tier: "NU",
},
numel: {
tier: "LC",
},
camerupt: {
randomBattleMoves: ["earthquake", "explosion", "fireblast", "rest", "rockslide", "sleeptalk", "toxic"],
tier: "UU",
},
torkoal: {
randomBattleMoves: ["explosion", "fireblast", "flamethrower", "hiddenpowergrass", "rest", "toxic", "yawn"],
tier: "NU",
},
spoink: {
tier: "LC",
},
grumpig: {
randomBattleMoves: ["calmmind", "firepunch", "icywind", "psychic", "substitute", "taunt"],
tier: "UU",
},
spinda: {
randomBattleMoves: ["bodyslam", "encore", "focuspunch", "shadowball", "substitute", "teeterdance", "toxic"],
tier: "NU",
},
trapinch: {
tier: "LC",
},
vibrava: {
tier: "NFE",
},
flygon: {
randomBattleMoves: ["dragonclaw", "earthquake", "fireblast", "hiddenpowerbug", "rockslide", "substitute", "toxic"],
tier: "OU",
},
cacnea: {
tier: "LC",
},
cacturne: {
randomBattleMoves: ["focuspunch", "hiddenpowerdark", "leechseed", "needlearm", "spikes", "substitute", "thunderpunch"],
tier: "NU",
},
swablu: {
tier: "LC",
},
altaria: {
randomBattleMoves: ["dragonclaw", "dragondance", "earthquake", "fireblast", "flamethrower", "haze", "hiddenpowerflying", "rest", "toxic"],
tier: "UU",
},
zangoose: {
randomBattleMoves: ["brickbreak", "quickattack", "return", "shadowball", "swordsdance"],
tier: "UUBL",
},
seviper: {
randomBattleMoves: ["crunch", "doubleedge", "earthquake", "flamethrower", "hiddenpowergrass", "sludgebomb"],
tier: "NU",
},
lunatone: {
randomBattleMoves: ["batonpass", "calmmind", "explosion", "hypnosis", "icebeam", "psychic"],
tier: "UU",<|fim▁hole|> tier: "UU",
},
barboach: {
tier: "LC",
},
whiscash: {
randomBattleMoves: ["earthquake", "hiddenpowerbug", "icebeam", "rockslide", "rest", "sleeptalk", "spark", "surf", "toxic"],
tier: "NU",
},
corphish: {
tier: "LC",
},
crawdaunt: {
randomBattleMoves: ["brickbreak", "crunch", "doubleedge", "hiddenpowerghost", "icebeam", "surf"],
tier: "NU",
},
baltoy: {
tier: "LC",
},
claydol: {
randomBattleMoves: ["earthquake", "explosion", "icebeam", "psychic", "rapidspin", "toxic"],
tier: "OU",
},
lileep: {
tier: "LC",
},
cradily: {
randomBattleMoves: ["barrier", "earthquake", "hiddenpowergrass", "mirrorcoat", "recover", "rockslide", "toxic"],
tier: "UU",
},
anorith: {
tier: "LC",
},
armaldo: {
randomBattleMoves: ["doubleedge", "earthquake", "hiddenpowerbug", "rockslide", "swordsdance"],
tier: "UUBL",
},
feebas: {
tier: "LC",
},
milotic: {
randomBattleMoves: ["icebeam", "mirrorcoat", "recover", "rest", "sleeptalk", "surf", "toxic"],
tier: "OU",
},
castform: {
randomBattleMoves: ["flamethrower", "icebeam", "substitute", "thunderbolt", "thunderwave"],
tier: "NU",
},
castformsunny: {
tier: "NU",
},
castformrainy: {
tier: "NU",
},
castformsnowy: {
tier: "NU",
},
kecleon: {
randomBattleMoves: ["brickbreak", "return", "shadowball", "thunderwave", "trick"],
tier: "NU",
},
shuppet: {
tier: "LC",
},
banette: {
randomBattleMoves: ["destinybond", "endure", "hiddenpowerfighting", "knockoff", "shadowball", "willowisp"],
tier: "UU",
},
duskull: {
tier: "LC",
},
dusclops: {
randomBattleMoves: ["focuspunch", "icebeam", "painsplit", "rest", "shadowball", "sleeptalk", "substitute", "willowisp"],
tier: "UUBL",
},
tropius: {
randomBattleMoves: ["hiddenpowerfire", "solarbeam", "sunnyday", "synthesis"],
tier: "NU",
},
chimecho: {
randomBattleMoves: ["calmmind", "healbell", "hiddenpowerfire", "lightscreen", "psychic", "reflect", "toxic", "yawn"],
tier: "NU",
},
absol: {
randomBattleMoves: ["batonpass", "hiddenpowerfighting", "quickattack", "shadowball", "swordsdance"],
tier: "UU",
},
snorunt: {
tier: "LC",
},
glalie: {
randomBattleMoves: ["earthquake", "explosion", "icebeam", "spikes", "toxic"],
tier: "NU",
},
spheal: {
tier: "LC",
},
sealeo: {
tier: "NFE",
},
walrein: {
randomBattleMoves: ["encore", "hiddenpowergrass", "icebeam", "rest", "sleeptalk", "surf", "toxic"],
tier: "UU",
},
clamperl: {
tier: "LC",
},
huntail: {
randomBattleMoves: ["doubleedge", "hiddenpowergrass", "hydropump", "icebeam", "raindance", "surf"],
tier: "NU",
},
gorebyss: {
randomBattleMoves: ["hiddenpowerelectric", "hiddenpowergrass", "hydropump", "icebeam", "raindance", "surf"],
tier: "UU",
},
relicanth: {
randomBattleMoves: ["doubleedge", "earthquake", "hiddenpowerflying", "rest", "rockslide", "sleeptalk", "toxic"],
tier: "NU",
},
luvdisc: {
randomBattleMoves: ["icebeam", "protect", "substitute", "surf", "sweetkiss", "toxic"],
tier: "NU",
},
bagon: {
tier: "LC",
},
shelgon: {
tier: "NU",
},
salamence: {
randomBattleMoves: ["brickbreak", "dragondance", "earthquake", "fireblast", "hiddenpowerflying", "rockslide"],
tier: "OU",
},
beldum: {
tier: "LC",
},
metang: {
tier: "NU",
},
metagross: {
randomBattleMoves: ["agility", "earthquake", "explosion", "meteormash", "psychic", "rockslide"],
tier: "OU",
},
regirock: {
randomBattleMoves: ["curse", "earthquake", "explosion", "rest", "rockslide", "superpower", "thunderwave"],
tier: "UUBL",
},
regice: {
randomBattleMoves: ["explosion", "icebeam", "rest", "sleeptalk", "thunderbolt", "thunderwave", "toxic"],
tier: "UUBL",
},
registeel: {
randomBattleMoves: ["protect", "rest", "seismictoss", "sleeptalk", "toxic"],
tier: "UUBL",
},
latias: {
randomBattleMoves: ["calmmind", "dragonclaw", "hiddenpowerfire", "recover", "refresh", "toxic"],
tier: "Uber",
},
latios: {
randomBattleMoves: ["calmmind", "dragonclaw", "hiddenpowerfire", "psychic", "recover", "refresh", "thunderbolt"],
tier: "Uber",
},
kyogre: {
randomBattleMoves: ["calmmind", "icebeam", "rest", "sleeptalk", "surf", "thunder", "waterspout"],
tier: "Uber",
},
groudon: {
randomBattleMoves: ["earthquake", "hiddenpowerghost", "overheat", "rockslide", "substitute", "swordsdance", "thunderwave"],
tier: "Uber",
},
rayquaza: {
randomBattleMoves: ["dragondance", "earthquake", "extremespeed", "hiddenpowerflying", "overheat", "rockslide"],
tier: "Uber",
},
jirachi: {
randomBattleMoves: ["bodyslam", "calmmind", "firepunch", "icepunch", "protect", "psychic", "substitute", "thunderbolt", "wish"],
tier: "OU",
},
deoxys: {
randomBattleMoves: ["extremespeed", "firepunch", "icebeam", "psychoboost", "shadowball", "superpower"],
tier: "Uber",
},
deoxysattack: {
randomBattleMoves: ["extremespeed", "firepunch", "psychoboost", "shadowball", "superpower"],
tier: "Uber",
},
deoxysdefense: {
randomBattleMoves: ["nightshade", "recover", "spikes", "taunt", "toxic"],
tier: "Uber",
},
deoxysspeed: {
randomBattleMoves: ["calmmind", "icebeam", "psychic", "recover", "spikes", "taunt", "toxic"],
tier: "Uber",
},
};<|fim▁end|>
|
},
solrock: {
randomBattleMoves: ["earthquake", "explosion", "overheat", "reflect", "rockslide", "shadowball"],
|
<|file_name|>0020_userprofile_last_transaction.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0019_auto_20160518_0048'),
]
operations = [
migrations.AddField(
model_name='userprofile',<|fim▁hole|> ]<|fim▁end|>
|
name='last_transaction',
field=models.DateTimeField(null=True, blank=True),
),
|
<|file_name|>age.js<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2017 3D Repo Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.<|fim▁hole|> * along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
"use strict";
(() => {
// calulate how many weeks an issue has been created.
// 0 - <=1 = week 0
// >1 - <=2 = week 1
// ...
module.exports = {
"$ceil": {
"$divide": [
{"$subtract": [new Date().valueOf(), "$$CURRENT.created"]},
// one week in milliseconds
604800000
]
}
};
})();<|fim▁end|>
|
*
* You should have received a copy of the GNU Affero General Public License
|
<|file_name|>0027_auto_20180116_1012.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-01-16 10:12
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('patients', '0026_clinicianother_user'),<|fim▁hole|> model_name='clinicianother',
name='use_other',
field=models.BooleanField(
default=False),
),
migrations.AlterField(
model_name='clinicianother',
name='user',
field=models.ForeignKey(
blank=True,
null=True,
on_delete=models.CASCADE,
to=settings.AUTH_USER_MODEL),
),
]<|fim▁end|>
|
]
operations = [
migrations.AddField(
|
<|file_name|>test_drivers.py<|end_file_name|><|fim▁begin|>import logging
import os.path
import shutil
import sys
import tempfile
import fiona
def test_options(tmpdir=None):
"""Test that setting CPL_DEBUG=ON works"""
if tmpdir is None:
tempdir = tempfile.mkdtemp()
logfile = os.path.join(tempdir, 'example.log')
else:
logfile = str(tmpdir.join('example.log'))
logger = logging.getLogger('Fiona')
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
with fiona.drivers(CPL_DEBUG=True):
c = fiona.open("docs/data/test_uk.shp")
c.close()
log = open(logfile).read()<|fim▁hole|>
if tempdir and tmpdir is None:
shutil.rmtree(tempdir)<|fim▁end|>
|
assert "Option CPL_DEBUG" in log
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>import os
from dotenv import load_dotenv
dotenv_path = os.path.join(os.path.dirname(__file__), '../.env')
load_dotenv(dotenv_path)
<|fim▁hole|># pylama:ignore=E402
from flask import Flask
from flask.ext.cors import CORS
from flask.ext.session import Session
from werkzeug.contrib.fixers import ProxyFix
from raven.contrib.flask import Sentry
from .blueprints import datastore, package, user, search
from .blueprints.logger import logger
def create():
"""Create application.
"""
# Create application
app = Flask('service', static_folder=None)
app.config['DEBUG'] = True
# Respect X-Forwarding-* headers
app.wsgi_app = ProxyFix(app.wsgi_app)
# CORS support
CORS(app, supports_credentials=True)
# Exception logging
Sentry(app, dsn=os.environ.get('SENTRY_DSN', ''))
# Session
sess = Session()
app.config['SESSION_TYPE'] = 'filesystem'
app.config['SECRET_KEY'] = 'openspending rocks'
sess.init_app(app)
# Register blueprints
logger.info("Creating Datastore Blueprint")
app.register_blueprint(datastore.create(), url_prefix='/datastore/')
logger.info("Creating Package Blueprint")
app.register_blueprint(package.create(), url_prefix='/package/')
logger.info("Creating Authentication Blueprint")
app.register_blueprint(user.oauth_create(), url_prefix='/oauth/')
logger.info("Creating Users Blueprint")
app.register_blueprint(user.create(), url_prefix='/user/')
logger.info("Creating Search Blueprint")
app.register_blueprint(search.create(), url_prefix='/search/')
# Return application
return app<|fim▁end|>
| |
<|file_name|>TestSin.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#pragma version(1)
#pragma rs java_package_name(android.renderscript.cts)
// Don't edit this file! It is auto-generated by frameworks/rs/api/gen_runtime.
float __attribute__((kernel)) testSinFloatFloat(float in) {
return sin(in);
}
float2 __attribute__((kernel)) testSinFloat2Float2(float2 in) {
return sin(in);
}
<|fim▁hole|>float4 __attribute__((kernel)) testSinFloat4Float4(float4 in) {
return sin(in);
}<|fim▁end|>
|
float3 __attribute__((kernel)) testSinFloat3Float3(float3 in) {
return sin(in);
}
|
<|file_name|>meetup.js<|end_file_name|><|fim▁begin|>var https = require('https'),
q = require('q'),
cache = require('./cache').cache;
var API_KEY = process.env.TF_MEETUP_API_KEY;
var fetch_events = function () {
var deferred = q.defer();
var options = {
host: 'api.meetup.com',
path: '/2/events?&sign=true&photo-host=public&group_urlname=GOTO-Night-Stockholm&page=20&key=' + API_KEY
};
var callback = function (response) {
var str = '';
response.on('data', function (chunk) {
str += chunk;
});
response.on('end', function () {
var json = JSON.parse(str);
deferred.resolve(json.results);
});
};
var req = https.request(options, callback);
req.on('error', function (e) {
deferred.reject(e);
});
req.end();
return deferred.promise;
};<|fim▁hole|><|fim▁end|>
|
module.exports.fetch_events = cache(fetch_events, 10000, true, []);
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.