prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>build.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const config_1 = require("../models/config");
const version_1 = require("../upgrade/version");
const common_tags_1 = require("common-tags");
const Command = require('../ember-cli/lib/models/command');
const config = config_1.CliConfig.fromProject() || config_1.CliConfig.fromGlobal();
const pollDefault = config.config.defaults && config.config.defaults.poll;
// defaults for BuildOptions
exports.baseBuildCommandOptions = [
{
name: 'target',
type: String,
default: 'development',
aliases: ['t', { 'dev': 'development' }, { 'prod': 'production' }],
description: 'Defines the build target.'
},
{
name: 'environment',
type: String,
aliases: ['e'],
description: 'Defines the build environment.'
},
{
name: 'output-path',
type: 'Path',
aliases: ['op'],
description: 'Path where output will be placed.'
},
{
name: 'aot',
type: Boolean,
description: 'Build using Ahead of Time compilation.'
},
{
name: 'sourcemaps',
type: Boolean,
aliases: ['sm', 'sourcemap'],
description: 'Output sourcemaps.'
},
{
name: 'vendor-chunk',
type: Boolean,
default: true,
aliases: ['vc'],
description: 'Use a separate bundle containing only vendor libraries.'
},
{
name: 'base-href',
type: String,
aliases: ['bh'],
description: 'Base url for the application being built.'
},
{
name: 'deploy-url',
type: String,
aliases: ['d'],
description: 'URL where files will be deployed.'
},
{
name: 'verbose',
type: Boolean,
default: false,<|fim▁hole|> description: 'Adds more details to output logging.'
},
{
name: 'progress',
type: Boolean,
default: true,
aliases: ['pr'],
description: 'Log progress to the console while building.'
},
{
name: 'i18n-file',
type: String,
description: 'Localization file to use for i18n.'
},
{
name: 'i18n-format',
type: String,
description: 'Format of the localization file specified with --i18n-file.'
},
{
name: 'locale',
type: String,
description: 'Locale to use for i18n.'
},
{
name: 'extract-css',
type: Boolean,
aliases: ['ec'],
description: 'Extract css from global styles onto css files instead of js ones.'
},
{
name: 'watch',
type: Boolean,
default: false,
aliases: ['w'],
description: 'Run build when files change.'
},
{
name: 'output-hashing',
type: String,
values: ['none', 'all', 'media', 'bundles'],
description: 'Define the output filename cache-busting hashing mode.',
aliases: ['oh']
},
{
name: 'poll',
type: Number,
default: pollDefault,
description: 'Enable and define the file watching poll time period (milliseconds).'
},
{
name: 'app',
type: String,
aliases: ['a'],
description: 'Specifies app name or index to use.'
},
{
name: 'delete-output-path',
type: Boolean,
default: true,
aliases: ['dop'],
description: 'Delete output path before build.'
},
{
name: 'preserve-symlinks',
type: Boolean,
default: false,
description: 'Do not use the real path when resolving modules.'
},
{
name: 'extract-licenses',
type: Boolean,
default: true,
description: 'Extract all licenses in a separate file, in the case of production builds only.'
}
];
const BuildCommand = Command.extend({
name: 'build',
description: 'Builds your app and places it into the output path (dist/ by default).',
aliases: ['b'],
availableOptions: exports.baseBuildCommandOptions.concat([
{
name: 'stats-json',
type: Boolean,
default: false,
description: common_tags_1.oneLine `Generates a \`stats.json\` file which can be analyzed using tools
such as: \`webpack-bundle-analyzer\` or https://webpack.github.io/analyse.`
}
]),
run: function (commandOptions) {
// Check angular version.
version_1.Version.assertAngularVersionIs2_3_1OrHigher(this.project.root);
const BuildTask = require('../tasks/build').default;
const buildTask = new BuildTask({
project: this.project,
ui: this.ui,
});
return buildTask.run(commandOptions);
}
});
BuildCommand.overrideCore = true;
exports.default = BuildCommand;
//# sourceMappingURL=/users/arick/angular-cli/commands/build.js.map<|fim▁end|>
|
aliases: ['v'],
|
<|file_name|>Mon_Wash_3_temp.py<|end_file_name|><|fim▁begin|>import sys
# where RobotControl.py, etc lives
sys.path.append('/home/pi/Desktop/ADL/YeastRobot/PythonLibrary')
from RobotControl import *
#################################
### Define Deck Layout
#################################
deck="""\
DW96W DW96W DW96W BLANK BLANK BLANK BLANK
DW96W DW96W DW96W BLANK BLANK BLANK BLANK
DW96W DW96W DW96W BLANK BLANK BLANK BLANK
BLANK BLANK BLANK BLANK BLANK BLANK BLANK
"""
# 2 3 4 5 6
# note the 1st user defined column is "2" not zero or one, since tips are at 0 & 1
##################################
OffsetDict={0: 'UL', 1: 'UR', 2: 'LL', 3: 'LR'}
# read in deck, etc
DefineDeck(deck)
printDeck()
InitializeRobot()
CurrentTipPosition = 2
<|fim▁hole|>for row in [0,1,2]:
for offset in [0,1,2,3]:
# get tips
CurrentTipPosition = retrieveTips(CurrentTipPosition)
extraSeatTips()
adjusted_depth = 94 + row
# pick up 300ul of YPD from C4, add to C2
position(row,4,position = OffsetDict[offset])
aspirate(300,depth=adjusted_depth + 2,speed=50, mix=3)
position(row,2,position = OffsetDict[offset])
dispense(300, depth=adjusted_depth, speed=50)
aspirate(300, depth=adjusted_depth, speed=50, mix = 3)
dispense(300, depth=adjusted_depth, speed=50)
# discard tips
#disposeTips()
manualDisposeTips()
position(0,0)
ShutDownRobot()
quit()<|fim▁end|>
|
# eventually row in 0,1,2,3
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// Break out the application running from the configuration definition to
// assist with testing.
require(['config'], function() {
// Kick off the application.
require(['app', 'router'], function(app, Router) {
// Define your master router on the application namespace and trigger all
// navigation from this instance.
app.router = new Router();
app.router.bind("all",function(route, router) {
$('#wrap').css('background-image', 'none');
$('.navbar').removeClass('bg').removeClass('bg-black');
$('.footer').removeClass('transparent');
$('.dropdown-menu').removeClass('bg-inverse');
});
// Trigger the initial route and enable HTML5 History API support, set the
// root folder to '/' by default. Change in app.js.
Backbone.history.start();<|fim▁hole|><|fim▁end|>
|
});
});
|
<|file_name|>UIManager3x.java<|end_file_name|><|fim▁begin|>package org.emdev.ui.uimanager;
<|fim▁hole|>import android.app.ActivityManager;
import android.app.ActivityManager.RunningServiceInfo;
import android.content.ComponentName;
import android.content.Context;
import android.view.View;
import android.view.Window;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import org.emdev.BaseDroidApp;
@TargetApi(11)
public class UIManager3x implements IUIManager {
private static final String SYS_UI_CLS = "com.android.systemui.SystemUIService";
private static final String SYS_UI_PKG = "com.android.systemui";
private static final ComponentName SYS_UI = new ComponentName(SYS_UI_PKG, SYS_UI_CLS);
private static final String SU_PATH1 = "/system/bin/su";
private static final String SU_PATH2 = "/system/xbin/su";
private static final String AM_PATH = "/system/bin/am";
private static final Map<ComponentName, Data> data = new HashMap<ComponentName, Data>() {
/**
*
*/
private static final long serialVersionUID = -6627308913610357179L;
@Override
public Data get(final Object key) {
Data existing = super.get(key);
if (existing == null) {
existing = new Data();
put((ComponentName) key, existing);
}
return existing;
}
};
@Override
public void setTitleVisible(final Activity activity, final boolean visible, final boolean firstTime) {
if (firstTime) {
try {
final Window window = activity.getWindow();
window.requestFeature(Window.FEATURE_ACTION_BAR);
window.requestFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
activity.setProgressBarIndeterminate(true);
activity.setProgressBarIndeterminateVisibility(true);
window.setFeatureInt(Window.FEATURE_INDETERMINATE_PROGRESS, 1);
} catch (final Throwable th) {
LCTX.e("Error on requestFeature call: " + th.getMessage());
}
}
try {
if (visible) {
activity.getActionBar().show();
} else {
activity.getActionBar().hide();
}
data.get(activity.getComponentName()).titleVisible = visible;
} catch (final Throwable th) {
LCTX.e("Error on requestFeature call: " + th.getMessage());
}
}
@Override
public boolean isTitleVisible(final Activity activity) {
return data.get(activity.getComponentName()).titleVisible;
}
@Override
public void setProgressSpinnerVisible(Activity activity, boolean visible) {
activity.setProgressBarIndeterminateVisibility(visible);
activity.getWindow().setFeatureInt(Window.FEATURE_INDETERMINATE_PROGRESS, visible ? 1 : 0);
}
@Override
public void setFullScreenMode(final Activity activity, final View view, final boolean fullScreen) {
data.get(activity.getComponentName()).fullScreen = fullScreen;
if (fullScreen) {
stopSystemUI(activity);
} else {
startSystemUI(activity);
}
}
@Override
public void openOptionsMenu(final Activity activity, final View view) {
activity.openOptionsMenu();
}
@Override
public void invalidateOptionsMenu(final Activity activity) {
activity.invalidateOptionsMenu();
}
@Override
public void onMenuOpened(final Activity activity) {
if (data.get(activity.getComponentName()).fullScreen
&& data.get(activity.getComponentName()).fullScreenState.get()) {
startSystemUI(activity);
}
}
@Override
public void onMenuClosed(final Activity activity) {
if (data.get(activity.getComponentName()).fullScreen
&& !data.get(activity.getComponentName()).fullScreenState.get()) {
stopSystemUI(activity);
}
}
@Override
public void onPause(final Activity activity) {
if (data.get(activity.getComponentName()).fullScreen
&& data.get(activity.getComponentName()).fullScreenState.get()) {
startSystemUI(activity);
}
}
@Override
public void onResume(final Activity activity) {
if (data.get(activity.getComponentName()).fullScreen
&& !data.get(activity.getComponentName()).fullScreenState.get()) {
stopSystemUI(activity);
}
}
@Override
public void onDestroy(final Activity activity) {
if (data.get(activity.getComponentName()).fullScreen
&& data.get(activity.getComponentName()).fullScreenState.get()) {
startSystemUI(activity);
}
}
@Override
public boolean isTabletUi(final Activity activity) {
return true;
}
protected void startSystemUI(final Activity activity) {
if (isSystemUIRunning()) {
data.get(activity.getComponentName()).fullScreenState.set(false);
return;
}
exec(false, activity, AM_PATH, "startservice", "-n", SYS_UI.flattenToString());
}
protected void stopSystemUI(final Activity activity) {
if (!isSystemUIRunning()) {
data.get(activity.getComponentName()).fullScreenState.set(true);
return;
}
final String su = getSuPath();
if (su == null) {
data.get(activity.getComponentName()).fullScreenState.set(false);
return;
}
exec(true, activity, su, "-c", "service call activity 79 s16 " + SYS_UI_PKG);
}
protected boolean isSystemUIRunning() {
final Context ctx = BaseDroidApp.context;
final ActivityManager am = (ActivityManager) ctx.getSystemService(Context.ACTIVITY_SERVICE);
final List<RunningServiceInfo> rsiList = am.getRunningServices(1000);
for (final RunningServiceInfo rsi : rsiList) {
LCTX.d("Service: " + rsi.service);
if (SYS_UI.equals(rsi.service)) {
LCTX.e("System UI service found");
return true;
}
}
return false;
}
protected void exec(final boolean expected, final Activity activity, final String... as) {
(new Thread(new Runnable() {
@Override
public void run() {
try {
final boolean result = execImpl(as);
data.get(activity.getComponentName()).fullScreenState.set(result ? expected : !expected);
} catch (final Throwable th) {
LCTX.e("Changing full screen mode failed: " + th.getCause());
data.get(activity.getComponentName()).fullScreenState.set(!expected);
}
}
})).start();
}
private boolean execImpl(final String... as) {
try {
LCTX.d("Execute: " + Arrays.toString(as));
final Process process = Runtime.getRuntime().exec(as);
final InputStreamReader r = new InputStreamReader(process.getInputStream());
final StringWriter w = new StringWriter();
final char ac[] = new char[8192];
int i = 0;
do {
i = r.read(ac);
if (i > 0) {
w.write(ac, 0, i);
}
} while (i != -1);
r.close();
process.waitFor();
final int exitValue = process.exitValue();
final String text = w.toString();
LCTX.d("Result code: " + exitValue);
LCTX.d("Output:\n" + text);
return 0 == exitValue;
} catch (final IOException e) {
throw new IllegalStateException(e);
} catch (final InterruptedException e) {
throw new IllegalStateException(e);
}
}
private static String getSuPath() {
final File su1 = new File(SU_PATH1);
if (su1.exists() && su1.isFile() && su1.canExecute()) {
return SU_PATH1;
}
final File su2 = new File(SU_PATH2);
if (su2.exists() && su2.isFile() && su2.canExecute()) {
return SU_PATH2;
}
return null;
}
private static class Data {
boolean fullScreen = false;
boolean titleVisible = true;
final AtomicBoolean fullScreenState = new AtomicBoolean();
}
}<|fim▁end|>
|
import android.annotation.TargetApi;
import android.app.Activity;
|
<|file_name|>clipboards.py<|end_file_name|><|fim▁begin|>import sys
import subprocess
from .exceptions import PyperclipException
EXCEPT_MSG = """
Pyperclip could not find a copy/paste mechanism for your system.
For more information, please visit https://pyperclip.readthedocs.org """
PY2 = sys.version_info[0] == 2
text_type = str if PY2 else str
def init_osx_clipboard():
def copy_osx(text):
p = subprocess.Popen(['pbcopy', 'w'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_osx():
p = subprocess.Popen(['pbpaste', 'r'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout
return copy_osx, paste_osx
def init_gtk_clipboard():
import gtk
def copy_gtk(text):
global cb
cb = gtk.Clipboard()
cb.set_text(text)
cb.store()
def paste_gtk():
clipboardContents = gtk.Clipboard().wait_for_text()
# for python 2, returns None if the clipboard is blank.
if clipboardContents is None:
return ''
else:
return clipboardContents
return copy_gtk, paste_gtk
def init_qt_clipboard():
# $DISPLAY should exist
from PyQt4.QtGui import QApplication
app = QApplication([])
def copy_qt(text):
cb = app.clipboard()
cb.setText(text)
def paste_qt():
cb = app.clipboard()
return text_type(cb.text())
return copy_qt, paste_qt
def init_xclip_clipboard():
def copy_xclip(text):
p = subprocess.Popen(['xclip', '-selection', 'c'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_xclip():
p = subprocess.Popen(['xclip', '-selection', 'c', '-o'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout
return copy_xclip, paste_xclip
def init_xsel_clipboard():
def copy_xsel(text):
p = subprocess.Popen(['xsel', '-b', '-i'],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=text)
def paste_xsel():
p = subprocess.Popen(['xsel', '-b', '-o'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
return stdout
return copy_xsel, paste_xsel
def init_klipper_clipboard():
def copy_klipper(text):
p = subprocess.Popen(
<|fim▁hole|> ['qdbus', 'org.kde.klipper', '/klipper', 'setClipboardContents',
text],
stdin=subprocess.PIPE, close_fds=True)
p.communicate(input=None)
def paste_klipper():
p = subprocess.Popen(
['qdbus', 'org.kde.klipper', '/klipper', 'getClipboardContents'],
stdout=subprocess.PIPE, close_fds=True)
stdout, stderr = p.communicate()
# Workaround for https://bugs.kde.org/show_bug.cgi?id=342874
# TODO: https://github.com/asweigart/pyperclip/issues/43
clipboardContents = stdout
# even if blank, Klipper will append a newline at the end
assert len(clipboardContents) > 0
# make sure that newline is there
assert clipboardContents.endswith('\n')
if clipboardContents.endswith('\n'):
clipboardContents = clipboardContents[:-1]
return clipboardContents
return copy_klipper, paste_klipper
def init_no_clipboard():
class ClipboardUnavailable(object):
def __call__(self, *args, **kwargs):
raise PyperclipException(EXCEPT_MSG)
if PY2:
def __nonzero__(self):
return False
else:
def __bool__(self):
return False
return ClipboardUnavailable(), ClipboardUnavailable()<|fim▁end|>
| |
<|file_name|>GenModel.java<|end_file_name|><|fim▁begin|>package com.baicai.util.help;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import org.beetl.core.Configuration;
<|fim▁hole|>/**
*
* @Description: 使用beetl来生成model类,未完成
* @author 猪肉有毒 [email protected]
* @date 2016年1月16日 下午11:31:04
* @version V1.0 我只为你回眸一笑,即使不够倾国倾城,我只为你付出此生,换来生再次相守
*/
public class GenModel {
public static final String RESOURCEPATH = "F:/data/eclipse/p2p/src/test/resources";
public static void generetor(String tableName) throws IOException {
FileResourceLoader resourceLo = new FileResourceLoader(RESOURCEPATH, "utf-8");
resourceLo.setRoot(RESOURCEPATH);
Configuration config = Configuration.defaultConfiguration();
config.getResourceMap().put("root", null);// 这里需要重置root,否则会去找配置文件
GroupTemplate gt = new GroupTemplate(resourceLo, config);
Template t = gt.getTemplate("model.htm");
TableBean tb = Generator.getTable(tableName);
String date = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss").format(new Date());
t.binding("table", tb);
t.binding("package", Generator.PACKAGE_BASE);
t.binding("createDate", date);
String str = t.render();
System.out.println(str);
File f = new File(Generator.OUTPUT_PATH+tb.getTableNameCapitalized()+".java");// 新建一个文件对象
FileWriter fw;
try {
fw = new FileWriter(f);// 新建一个FileWriter
fw.write(str);// 将字符串写入到指定的路径下的文件中
fw.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) throws IOException {
generetor("p2p_project");
}
}<|fim▁end|>
|
import org.beetl.core.GroupTemplate;
import org.beetl.core.Template;
import org.beetl.core.resource.FileResourceLoader;
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|>"""
A simple client to query a TensorFlow Serving instance.
Example:
$ python client.py \
--images IMG_0932_sm.jpg \
--num_results 10 \
--model_name inception \
--host localhost \
--port 9000 \
--timeout 10
Author: Grant Van Horn
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import time
import tfserver
def parse_args():
parser = argparse.ArgumentParser(description='Command line classification client. Sorts and prints the classification results.')
parser.add_argument('--images', dest='image_paths',
help='Path to one or more images to classify (jpeg or png).',
type=str, nargs='+', required=True)
parser.add_argument('--num_results', dest='num_results',
help='The number of results to print. Set to 0 to print all classes.',
required=False, type=int, default=0)
parser.add_argument('--model_name', dest='model_name',
help='The name of the model to query.',
required=False, type=str, default='inception')
<|fim▁hole|> parser.add_argument('--host', dest='host',
help='Machine host where the TensorFlow Serving model is.',
required=False, type=str, default='localhost')
parser.add_argument('--port', dest='port',
help='Port that the TensorFlow Server is listening on.',
required=False, type=int, default=9000)
parser.add_argument('--timeout', dest='timeout',
help='Amount of time to wait before failing.',
required=False, type=int, default=10)
args = parser.parse_args()
return args
def main():
args = parse_args()
# Read in the image bytes
image_data = []
for fp in args.image_paths:
with open(fp) as f:
data = f.read()
image_data.append(data)
# Get the predictions
t = time.time()
predictions = tfserver.predict(image_data, model_name=args.model_name,
host=args.host, port=args.port, timeout=args.timeout
)
dt = time.time() - t
print("Prediction call took %0.4f seconds" % (dt,))
# Process the results
results = tfserver.process_classification_prediction(predictions, max_classes=args.num_results)
# Print the results
for i, fp in enumerate(args.image_paths):
print("Results for image: %s" % (fp,))
for name, score in results[i]:
print("%s: %0.3f" % (name, score))
print()
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>tables.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables<|fim▁hole|>
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class DeleteSubnet(tables.DeleteAction):
data_type_singular = _("Subnet")
data_type_plural = _("Subnets")
def delete(self, request, obj_id):
try:
api.neutron.subnet_delete(request, obj_id)
except:
msg = _('Failed to delete subnet %s') % obj_id
LOG.info(msg)
network_id = self.table.kwargs['network_id']
redirect = reverse('horizon:admin:networks:detail',
args=[network_id])
exceptions.handle(request, msg, redirect=redirect)
class CreateSubnet(tables.LinkAction):
name = "create"
verbose_name = _("Create Subnet")
url = "horizon:admin:networks:addsubnet"
classes = ("ajax-modal", "btn-create")
def get_link_url(self, datum=None):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id,))
class UpdateSubnet(tables.LinkAction):
name = "update"
verbose_name = _("Edit Subnet")
url = "horizon:admin:networks:editsubnet"
classes = ("ajax-modal", "btn-edit")
def get_link_url(self, subnet):
network_id = self.table.kwargs['network_id']
return reverse(self.url, args=(network_id, subnet.id))
class SubnetsTable(tables.DataTable):
name = tables.Column("name", verbose_name=_("Name"),
link='horizon:admin:networks:subnets:detail')
cidr = tables.Column("cidr", verbose_name=_("CIDR"))
ip_version = tables.Column("ipver_str", verbose_name=_("IP Version"))
gateway_ip = tables.Column("gateway_ip", verbose_name=_("Gateway IP"))
def get_object_display(self, subnet):
return subnet.id
class Meta:
name = "subnets"
verbose_name = _("Subnets")
table_actions = (CreateSubnet, DeleteSubnet)
row_actions = (UpdateSubnet, DeleteSubnet,)<|fim▁end|>
| |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>import os
from copy import deepcopy
bids_schema = {
# BIDS identification bits
'modality': {
'type': 'string',
'required': True
},
'subject_id': {
'type': 'string',
'required': True
},
'session_id': {'type': 'string'},
'run_id': {'type': 'string'},
'acq_id': {'type': 'string'},
'task_id': {'type': 'string'},
'run_id': {'type': 'string'},
# BIDS metadata
'AccelNumReferenceLines': {'type': 'integer'},
'AccelerationFactorPE': {'type': 'integer'},
'AcquisitionMatrix': {'type': 'string'},
'CogAtlasID': {'type': 'string'},
'CogPOID': {'type': 'string'},
'CoilCombinationMethod': {'type': 'string'},
'ContrastBolusIngredient': {'type': 'string'},
'ConversionSoftware': {'type': 'string'},
'ConversionSoftwareVersion': {'type': 'string'},
'DelayTime': {'type': 'float'},
'DeviceSerialNumber': {'type': 'string'},
'EchoTime': {'type': 'float'},
'EchoTrainLength': {'type': 'integer'},
'EffectiveEchoSpacing': {'type': 'float'},
'FlipAngle': {'type': 'integer'},
'GradientSetType': {'type': 'string'},
'HardcopyDeviceSoftwareVersion': {'type': 'string'},
'ImagingFrequency': {'type': 'integer'},
'InPlanePhaseEncodingDirection': {'type': 'string'},
'InstitutionAddress': {'type': 'string'},
'InstitutionName': {'type': 'string'},
'Instructions': {'type': 'string'},
'InversionTime': {'type': 'float'},
'MRAcquisitionType': {'type': 'string'},
'MRTransmitCoilSequence': {'type': 'string'},
'MagneticFieldStrength': {'type': 'float'},
'Manufacturer': {'type': 'string'},
'ManufacturersModelName': {'type': 'string'},
'MatrixCoilMode': {'type': 'string'},
'MultibandAccelerationFactor': {'type': 'float'},
'NumberOfAverages': {'type': 'integer'},
'NumberOfPhaseEncodingSteps': {'type': 'integer'},
'NumberOfVolumesDiscardedByScanner': {'type': 'float'},
'NumberOfVolumesDiscardedByUser': {'type': 'float'},
'NumberShots': {'type': 'integer'},
'ParallelAcquisitionTechnique': {'type': 'string'},
'ParallelReductionFactorInPlane': {'type': 'float'},
'PartialFourier': {'type': 'boolean'},
'PartialFourierDirection': {'type': 'string'},
'PatientPosition': {'type': 'string'},
'PercentPhaseFieldOfView': {'type': 'integer'},
'PercentSampling': {'type': 'integer'},
'PhaseEncodingDirection': {'type': 'string'},
'PixelBandwidth': {'type': 'integer'},
'ProtocolName': {'type': 'string'},
'PulseSequenceDetails': {'type': 'string'},
'PulseSequenceType': {'type': 'string'},
'ReceiveCoilName': {'type': 'string'},
'RepetitionTime': {'type': 'float'},
'ScanOptions': {'type': 'string'},
'ScanningSequence': {'type': 'string'},
'SequenceName': {'type': 'string'},
'SequenceVariant': {'type': 'string'},
'SliceEncodingDirection': {'type': 'string'},
'SoftwareVersions': {'type': 'string'},
'TaskDescription': {'type': 'string'},
'TotalReadoutTime': {'type': 'float'},
'TotalScanTimeSec': {'type': 'integer'},
'TransmitCoilName': {'type': 'string'},
'VariableFlipAngleFlag': {'type': 'string'},
}
prov_schema = {
'version': {
'type': 'string',
'required': True
},
'md5sum': {
'type': 'string',
'required': True
},
'software': {
'type': 'string',
'required': True
},
'settings': {
'type': 'dict',
'schema': {
'fd_thres': {'type': 'float'},
'hmc_fsl': {'type': 'boolean'},
'testing': {'type': 'boolean'}
},
},
'mriqc_pred': {'type': 'integer'},
'email': {'type': 'string'},
}
bold_iqms_schema = {
'aor': {
'type': 'float',
'required': True
},
'aqi': {
'type': 'float',
'required': True
},
'dummy_trs': {'type': 'integer'},
'dvars_nstd': {
'type': 'float',
'required': True
},
'dvars_std': {
'type': 'float',
'required': True
},
'dvars_vstd': {
'type': 'float',
'required': True
},
'efc': {
'type': 'float',
'required': True
},
'fber': {
'type': 'float',
'required': True
},
'fd_mean': {
'type': 'float',
'required': True
},
'fd_num': {
'type': 'float',
'required': True
},
'fd_perc': {
'type': 'float',
'required': True
},
'fwhm_avg': {
'type': 'float',
'required': True
},
'fwhm_x': {
'type': 'float',
'required': True
},
'fwhm_y': {
'type': 'float',
'required': True
},
'fwhm_z': {
'type': 'float',
'required': True
},
'gcor': {
'type': 'float',
'required': True
},
'gsr_x': {
'type': 'float',
'required': True
},
'gsr_y': {
'type': 'float',
'required': True
},
'size_t': {
'type': 'float',
'required': True
},
'size_x': {
'type': 'float',
'required': True
},
'size_y': {
'type': 'float',
'required': True
},
'size_z': {
'type': 'float',
'required': True
},
'snr': {
'type': 'float',
'required': True
},
'spacing_tr': {
'type': 'float',
'required': True
},
'spacing_x': {
'type': 'float',
'required': True
},
'spacing_y': {
'type': 'float',
'required': True
},
'spacing_z': {
'type': 'float',
'required': True
},
'summary_bg_k': {
'type': 'float',
'required': True
},
'summary_bg_mean': {
'type': 'float',
'required': True
},
'summary_bg_median': {
'type': 'float',
'required': True
},
'summary_bg_mad': {
'type': 'float',
'required': True
},
'summary_bg_p05': {
'type': 'float',
'required': True
},
'summary_bg_p95': {
'type': 'float',
'required': True
},
'summary_bg_stdv': {
'type': 'float',
'required': True
},
'summary_bg_n': {
'type': 'float',
'required': True
},
'summary_fg_k': {
'type': 'float',
'required': True
},
'summary_fg_mean': {
'type': 'float',
'required': True
},
'summary_fg_median': {
'type': 'float',
'required': True
},
'summary_fg_mad': {
'type': 'float',
'required': True
},
'summary_fg_p05': {
'type': 'float',
'required': True
},
'summary_fg_p95': {
'type': 'float',
'required': True
},
'summary_fg_stdv': {
'type': 'float',
'required': True
},
'summary_fg_n': {
'type': 'float',
'required': True
},
'tsnr': {
'type': 'float',
'required': True
},
}
struct_iqms_schema = {
'cjv': {
'type': 'float',
'required': True
},
'cnr': {
'type': 'float',
'required': True
},
'efc': {
'type': 'float',
'required': True
},
'fber': {
'type': 'float',
'required': True
},
'fwhm_avg': {
'type': 'float',
'required': True
},
'fwhm_x': {
'type': 'float',
'required': True
},
'fwhm_y': {
'type': 'float',
'required': True
},
'fwhm_z': {
'type': 'float',
'required': True
},
'icvs_csf': {
'type': 'float',
'required': True<|fim▁hole|> 'type': 'float',
'required': True
},
'icvs_wm': {
'type': 'float',
'required': True
},
'inu_med': {
'type': 'float',
'required': True
},
'inu_range': {
'type': 'float',
'required': True
},
'qi_1': {
'type': 'float',
'required': True
},
'qi_2': {
'type': 'float',
'required': True
},
'rpve_csf': {
'type': 'float',
'required': True
},
'rpve_gm': {
'type': 'float',
'required': True
},
'rpve_wm': {
'type': 'float',
'required': True
},
'size_x': {
'type': 'integer',
'required': True
},
'size_y': {
'type': 'integer',
'required': True
},
'size_z': {
'type': 'integer',
'required': True
},
'snr_csf': {
'type': 'float',
'required': True
},
'snr_gm': {
'type': 'float',
'required': True
},
'snr_total': {
'type': 'float',
'required': True
},
'snr_wm': {
'type': 'float',
'required': True
},
'snrd_csf': {
'type': 'float',
'required': True
},
'snrd_gm': {
'type': 'float',
'required': True
},
'snrd_total': {
'type': 'float',
'required': True
},
'snrd_wm': {
'type': 'float',
'required': True
},
'spacing_x': {
'type': 'float',
'required': True
},
'spacing_y': {
'type': 'float',
'required': True
},
'spacing_z': {
'type': 'float',
'required': True
},
'summary_bg_k': {
'type': 'float',
'required': True
},
'summary_bg_mean': {
'type': 'float',
'required': True
},
'summary_bg_median': {
'type': 'float'
},
'summary_bg_mad': {
'type': 'float'
},
'summary_bg_p05': {
'type': 'float',
'required': True
},
'summary_bg_p95': {
'type': 'float',
'required': True
},
'summary_bg_stdv': {
'type': 'float',
'required': True
},
'summary_bg_n': {
'type': 'float'
},
'summary_csf_k': {
'type': 'float',
'required': True
},
'summary_csf_mean': {
'type': 'float',
'required': True
},
'summary_csf_median': {
'type': 'float'
},
'summary_csf_mad': {
'type': 'float'
},
'summary_csf_p05': {
'type': 'float',
'required': True
},
'summary_csf_p95': {
'type': 'float',
'required': True
},
'summary_csf_stdv': {
'type': 'float',
'required': True
},
'summary_csf_n': {
'type': 'float'
},
'summary_gm_k': {
'type': 'float',
'required': True
},
'summary_gm_mean': {
'type': 'float',
'required': True
},
'summary_gm_median': {
'type': 'float'
},
'summary_gm_mad': {
'type': 'float'
},
'summary_gm_p05': {
'type': 'float',
'required': True
},
'summary_gm_p95': {
'type': 'float',
'required': True
},
'summary_gm_stdv': {
'type': 'float',
'required': True
},
'summary_gm_n': {
'type': 'float'
},
'summary_wm_k': {
'type': 'float',
'required': True
},
'summary_wm_mean': {
'type': 'float',
'required': True
},
'summary_wm_median': {
'type': 'float'
},
'summary_wm_mad': {
'type': 'float'
},
'summary_wm_p05': {
'type': 'float',
'required': True
},
'summary_wm_p95': {
'type': 'float',
'required': True
},
'summary_wm_stdv': {
'type': 'float',
'required': True
},
'summary_wm_n': {
'type': 'float'
},
'tpm_overlap_csf': {
'type': 'float',
'required': True
},
'tpm_overlap_gm': {
'type': 'float',
'required': True
},
'tpm_overlap_wm': {
'type': 'float',
'required': True
},
'wm2max': {
'type': 'float',
'required': True
},
}
settings = {
'URL_PREFIX': 'api',
'API_VERSION': 'v1',
'ALLOWED_FILTERS': ['*'],
'MONGO_HOST': os.environ.get('MONGODB_HOST', ''),
'MONGO_PORT': int(os.environ.get('MONGODB_PORT', 27017)),
'MONGO_DBNAME': 'mriqc_api',
'PUBLIC_METHODS': ['GET'],
'PUBLIC_ITEM_METHODS': ['GET'],
'RESOURCE_METHODS': ['GET', 'POST'],
'ITEM_METHODS': ['GET'],
'X_DOMAINS': '*',
'X_HEADERS': ['Authorization', 'Content-Type'],
'DOMAIN': {
'bold': {
'item_title': 'bold',
},
'T1w': {
'item_title': 'T1w',
},
'T2w': {
'item_title': 'T2w',
}
}
}
rating_schema = {
'rating': {
'type': 'string',
'required': True
},
'name': {
'type': 'string',
'required': False
},
'comment': {
'type': 'string',
'required': False
},
'md5sum': {
'type': 'string',
'required': True
}
}
nipype_schema = {
'interface_class_name': {
'type': 'string',
'required': True
},
'version': {
'type': 'string',
'required': True
},
'mem_peak_gb': {
'type': 'float',
'required': True
},
'duration_sec': {
'type': 'float',
'required': True
},
'inputs': {
'type': 'dict',
'required': True
}
}
settings['DOMAIN']['nipype_telemetry'] = {
'type': 'dict',
'required': False,
'schema': deepcopy(nipype_schema)
}
settings['DOMAIN']['rating'] ={
'type': 'dict',
'required': False,
'schema': deepcopy(rating_schema)
}
settings['DOMAIN']['rating_counts'] = {
'datasource': {
'source': 'rating',
'aggregation': {
'pipeline': [
{"$match": {"md5sum": "$value"}},
{"$unwind": "$rating"},
{"$group": {"_id": "$rating", "count": {"$sum": 1}}},
],
}
}
}
settings['DOMAIN']['bold']['schema'] = deepcopy(bold_iqms_schema)
settings['DOMAIN']['bold']['schema'].update(
{
'bids_meta': {
'type': 'dict',
'required': True,
'allow_unknown': True,
'schema': deepcopy(bids_schema)
},
'provenance': {
'type': 'dict',
'required': True,
'schema': deepcopy(prov_schema)
},
'rating': {
'type': 'dict',
'required': False,
'schema': deepcopy(rating_schema)
},
}
)
settings['DOMAIN']['bold']['schema']['bids_meta']['schema'].update({
'TaskName': {
'type': 'string',
'required': True
},
})
settings['DOMAIN']['T1w']['schema'] = deepcopy(struct_iqms_schema)
settings['DOMAIN']['T1w']['schema'].update(
{
'bids_meta': {
'type': 'dict',
'required': True,
'allow_unknown': True,
'schema': deepcopy(bids_schema)
},
'provenance': {
'type': 'dict',
'required': True,
'schema': deepcopy(prov_schema)
},
}
)
settings['DOMAIN']['T2w']['schema'] = deepcopy(settings['DOMAIN']['T1w']['schema'])<|fim▁end|>
|
},
'icvs_gm': {
|
<|file_name|>pathvector2.cc<|end_file_name|><|fim▁begin|>/* A RapidNet application. Generated by RapidNet compiler. */
#include "pathvector2.h"
#include <cstdlib>
#include "ns3/nstime.h"
#include "ns3/simulator.h"
#include "ns3/type-ids.h"
#include "ns3/rapidnet-types.h"
#include "ns3/rapidnet-utils.h"
#include "ns3/assignor.h"
#include "ns3/selector.h"
#include "ns3/rapidnet-functions.h"
using namespace std;
using namespace ns3;
using namespace ns3::rapidnet;
using namespace ns3::rapidnet::pathvector2;
const string Pathvector2::BESTPATH = "bestPath";
const string Pathvector2::LINK = "link";
const string Pathvector2::PATH = "path";
const string Pathvector2::PATHDELETE = "pathDelete";
const string Pathvector2::R2LOCAL1R2LINKZSEND = "r2Local1r2linkZsend";
const string Pathvector2::R2LOCAL2PATHSEND = "r2Local2pathsend";
const string Pathvector2::R2LINKZ = "r2linkZ";
NS_LOG_COMPONENT_DEFINE ("Pathvector2");
NS_OBJECT_ENSURE_REGISTERED (Pathvector2);
TypeId
Pathvector2::GetTypeId (void)
{
static TypeId tid = TypeId ("ns3::rapidnet::pathvector2::Pathvector2")
.SetParent<Discovery> ()
.AddConstructor<Pathvector2> ()
;
return tid;
}
Pathvector2::Pathvector2()
{
NS_LOG_FUNCTION_NOARGS ();
}
Pathvector2::~Pathvector2()
{
NS_LOG_FUNCTION_NOARGS ();
}
void
Pathvector2::DoDispose (void)
{
NS_LOG_FUNCTION_NOARGS ();
Discovery::DoDispose ();
}
void
Pathvector2::StartApplication (void)
{
NS_LOG_FUNCTION_NOARGS ();
Discovery::StartApplication ();
RAPIDNET_LOG_INFO("Pathvector2 Application Started");
}
<|fim▁hole|>Pathvector2::StopApplication ()
{
NS_LOG_FUNCTION_NOARGS ();
Discovery::StopApplication ();
RAPIDNET_LOG_INFO("Pathvector2 Application Stopped");
}
void
Pathvector2::InitDatabase ()
{
//Discovery::InitDatabase ();
AddRelationWithKeys (BESTPATH, attrdeflist (
attrdef ("bestPath_attr2", IPV4)));
AddRelationWithKeys (LINK, attrdeflist (
attrdef ("link_attr1", IPV4),
attrdef ("link_attr2", IPV4)),
Seconds (11));
AddRelationWithKeys (PATH, attrdeflist (
attrdef ("path_attr4", LIST)));
AddRelationWithKeys (R2LINKZ, attrdeflist (
attrdef ("r2linkZ_attr1", IPV4),
attrdef ("r2linkZ_attr2", IPV4)));
m_aggr_bestpathMinC = AggrMin::New (BESTPATH,
this,
attrdeflist (
attrdeftype ("bestPath_attr1", ANYTYPE),
attrdeftype ("bestPath_attr2", ANYTYPE),
attrdeftype ("bestPath_attr3", ANYTYPE),
attrdeftype ("bestPath_attr4", ANYTYPE)),
3);
}
void
Pathvector2::DemuxRecv (Ptr<Tuple> tuple)
{
Discovery::DemuxRecv (tuple);
if (IsInsertEvent (tuple, LINK))
{
R1Eca0Ins (tuple);
}
if (IsRefreshEvent (tuple, LINK))
{
R1Eca0Ref (tuple);
}
if (IsRecvEvent (tuple, R2LOCAL1R2LINKZSEND))
{
R2Local1Eca0RemoteIns (tuple);
}
if (IsInsertEvent (tuple, LINK))
{
R2Local1Eca0Ins (tuple);
}
if (IsRefreshEvent (tuple, LINK))
{
R2Local1Eca0Ref (tuple);
}
if (IsRecvEvent (tuple, R2LOCAL2PATHSEND))
{
R2Local2Eca0RemoteIns (tuple);
}
if (IsRecvEvent (tuple, PATHDELETE))
{
R2Local2Eca0RemoteDel (tuple);
}
if (IsInsertEvent (tuple, R2LINKZ))
{
R2Local2Eca0Ins (tuple);
}
if (IsDeleteEvent (tuple, R2LINKZ))
{
R2Local2Eca0Del (tuple);
}
if (IsInsertEvent (tuple, BESTPATH))
{
R2Local2Eca1Ins (tuple);
}
if (IsDeleteEvent (tuple, BESTPATH))
{
R2Local2Eca1Del (tuple);
}
if (IsInsertEvent (tuple, PATH))
{
R3eca (tuple);
}
if (IsDeleteEvent (tuple, PATH))
{
R3eca2 (tuple);
}
}
void
Pathvector2::R1Eca0Ins (Ptr<Tuple> link)
{
RAPIDNET_LOG_INFO ("R1Eca0Ins triggered");
Ptr<Tuple> result = link;
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("link_attr1"))));
result->Assign (Assignor::New ("P2",
FAppend::New (
VarExpr::New ("link_attr2"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("P2"))));
result = result->Project (
PATH,
strlist ("link_attr1",
"link_attr2",
"link_attr3",
"P"),
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"));
Insert (result);
}
void
Pathvector2::R1Eca0Ref (Ptr<Tuple> link)
{
RAPIDNET_LOG_INFO ("R1Eca0Ref triggered");
Ptr<Tuple> result = link;
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("link_attr1"))));
result->Assign (Assignor::New ("P2",
FAppend::New (
VarExpr::New ("link_attr2"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("P2"))));
result = result->Project (
PATH,
strlist ("link_attr1",
"link_attr2",
"link_attr3",
"P"),
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"));
Insert (result);
}
void
Pathvector2::R2Local1Eca0RemoteIns (Ptr<Tuple> r2Local1r2linkZsend)
{
RAPIDNET_LOG_INFO ("R2Local1Eca0RemoteIns triggered");
Ptr<Tuple> result = r2Local1r2linkZsend;
result = result->Project (
R2LINKZ,
strlist ("r2Local1r2linkZsend_attr1",
"r2Local1r2linkZsend_attr2",
"r2Local1r2linkZsend_attr3"),
strlist ("r2linkZ_attr1",
"r2linkZ_attr2",
"r2linkZ_attr3"));
Insert (result);
}
void
Pathvector2::R2Local1Eca0Ins (Ptr<Tuple> link)
{
RAPIDNET_LOG_INFO ("R2Local1Eca0Ins triggered");
Ptr<Tuple> result = link;
result = result->Project (
R2LOCAL1R2LINKZSEND,
strlist ("link_attr1",
"link_attr2",
"link_attr3",
"link_attr2"),
strlist ("r2Local1r2linkZsend_attr1",
"r2Local1r2linkZsend_attr2",
"r2Local1r2linkZsend_attr3",
RN_DEST));
Send (result);
}
void
Pathvector2::R2Local1Eca0Ref (Ptr<Tuple> link)
{
RAPIDNET_LOG_INFO ("R2Local1Eca0Ref triggered");
Ptr<Tuple> result = link;
result = result->Project (
R2LOCAL1R2LINKZSEND,
strlist ("link_attr1",
"link_attr2",
"link_attr3",
"link_attr2"),
strlist ("r2Local1r2linkZsend_attr1",
"r2Local1r2linkZsend_attr2",
"r2Local1r2linkZsend_attr3",
RN_DEST));
Send (result);
}
void
Pathvector2::R2Local2Eca0RemoteIns (Ptr<Tuple> r2Local2pathsend)
{
RAPIDNET_LOG_INFO ("R2Local2Eca0RemoteIns triggered");
Ptr<Tuple> result = r2Local2pathsend;
result = result->Project (
PATH,
strlist ("r2Local2pathsend_attr1",
"r2Local2pathsend_attr2",
"r2Local2pathsend_attr3",
"r2Local2pathsend_attr4"),
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"));
Insert (result);
}
void
Pathvector2::R2Local2Eca0RemoteDel (Ptr<Tuple> pathDelete)
{
RAPIDNET_LOG_INFO ("R2Local2Eca0RemoteDel triggered");
Ptr<Tuple> result = pathDelete;
result = result->Project (
PATH,
strlist ("pathDelete_attr1",
"pathDelete_attr2",
"pathDelete_attr3",
"pathDelete_attr4"),
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"));
Delete (result);
}
void
Pathvector2::R2Local2Eca0Ins (Ptr<Tuple> r2linkZ)
{
RAPIDNET_LOG_INFO ("R2Local2Eca0Ins triggered");
Ptr<RelationBase> result;
result = GetRelation (BESTPATH)->Join (
r2linkZ,
strlist ("bestPath_attr1"),
strlist ("r2linkZ_attr2"));
result->Assign (Assignor::New ("C",
Operation::New (RN_PLUS,
VarExpr::New ("r2linkZ_attr3"),
VarExpr::New ("bestPath_attr3"))));
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("r2linkZ_attr1"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("bestPath_attr4"))));
result = result->Select (Selector::New (
Operation::New (RN_EQ,
FMember::New (
VarExpr::New ("bestPath_attr4"),
VarExpr::New ("r2linkZ_attr1")),
ValueExpr::New (Int32Value::New (0)))));
result = result->Project (
R2LOCAL2PATHSEND,
strlist ("r2linkZ_attr1",
"bestPath_attr2",
"C",
"P",
"r2linkZ_attr1"),
strlist ("r2Local2pathsend_attr1",
"r2Local2pathsend_attr2",
"r2Local2pathsend_attr3",
"r2Local2pathsend_attr4",
RN_DEST));
Send (result);
}
void
Pathvector2::R2Local2Eca0Del (Ptr<Tuple> r2linkZ)
{
RAPIDNET_LOG_INFO ("R2Local2Eca0Del triggered");
Ptr<RelationBase> result;
result = GetRelation (BESTPATH)->Join (
r2linkZ,
strlist ("bestPath_attr1"),
strlist ("r2linkZ_attr2"));
result->Assign (Assignor::New ("C",
Operation::New (RN_PLUS,
VarExpr::New ("r2linkZ_attr3"),
VarExpr::New ("bestPath_attr3"))));
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("r2linkZ_attr1"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("bestPath_attr4"))));
result = result->Select (Selector::New (
Operation::New (RN_EQ,
FMember::New (
VarExpr::New ("bestPath_attr4"),
VarExpr::New ("r2linkZ_attr1")),
ValueExpr::New (Int32Value::New (0)))));
result = result->Project (
PATHDELETE,
strlist ("r2linkZ_attr1",
"bestPath_attr2",
"C",
"P",
"r2linkZ_attr1"),
strlist ("pathDelete_attr1",
"pathDelete_attr2",
"pathDelete_attr3",
"pathDelete_attr4",
RN_DEST));
Send (result);
}
void
Pathvector2::R2Local2Eca1Ins (Ptr<Tuple> bestPath)
{
RAPIDNET_LOG_INFO ("R2Local2Eca1Ins triggered");
Ptr<RelationBase> result;
result = GetRelation (R2LINKZ)->Join (
bestPath,
strlist ("r2linkZ_attr2"),
strlist ("bestPath_attr1"));
result->Assign (Assignor::New ("C",
Operation::New (RN_PLUS,
VarExpr::New ("r2linkZ_attr3"),
VarExpr::New ("bestPath_attr3"))));
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("r2linkZ_attr1"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("bestPath_attr4"))));
result = result->Select (Selector::New (
Operation::New (RN_EQ,
FMember::New (
VarExpr::New ("bestPath_attr4"),
VarExpr::New ("r2linkZ_attr1")),
ValueExpr::New (Int32Value::New (0)))));
result = result->Project (
R2LOCAL2PATHSEND,
strlist ("r2linkZ_attr1",
"bestPath_attr2",
"C",
"P",
"r2linkZ_attr1"),
strlist ("r2Local2pathsend_attr1",
"r2Local2pathsend_attr2",
"r2Local2pathsend_attr3",
"r2Local2pathsend_attr4",
RN_DEST));
Send (result);
}
void
Pathvector2::R2Local2Eca1Del (Ptr<Tuple> bestPath)
{
RAPIDNET_LOG_INFO ("R2Local2Eca1Del triggered");
Ptr<RelationBase> result;
result = GetRelation (R2LINKZ)->Join (
bestPath,
strlist ("r2linkZ_attr2"),
strlist ("bestPath_attr1"));
result->Assign (Assignor::New ("C",
Operation::New (RN_PLUS,
VarExpr::New ("r2linkZ_attr3"),
VarExpr::New ("bestPath_attr3"))));
result->Assign (Assignor::New ("P1",
FAppend::New (
VarExpr::New ("r2linkZ_attr1"))));
result->Assign (Assignor::New ("P",
FConcat::New (
VarExpr::New ("P1"),
VarExpr::New ("bestPath_attr4"))));
result = result->Select (Selector::New (
Operation::New (RN_EQ,
FMember::New (
VarExpr::New ("bestPath_attr4"),
VarExpr::New ("r2linkZ_attr1")),
ValueExpr::New (Int32Value::New (0)))));
result = result->Project (
PATHDELETE,
strlist ("r2linkZ_attr1",
"bestPath_attr2",
"C",
"P",
"r2linkZ_attr1"),
strlist ("pathDelete_attr1",
"pathDelete_attr2",
"pathDelete_attr3",
"pathDelete_attr4",
RN_DEST));
Send (result);
}
void
Pathvector2::R3eca (Ptr<Tuple> path)
{
RAPIDNET_LOG_INFO ("R3eca triggered");
Ptr<Tuple> result = path;
result = result->Project (
BESTPATH,
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"),
strlist ("bestPath_attr1",
"bestPath_attr2",
"bestPath_attr3",
"bestPath_attr4"));
m_aggr_bestpathMinC->Insert (result);
}
void
Pathvector2::R3eca2 (Ptr<Tuple> path)
{
RAPIDNET_LOG_INFO ("R3eca2 triggered");
Ptr<Tuple> result = path;
result = result->Project (
BESTPATH,
strlist ("path_attr1",
"path_attr2",
"path_attr3",
"path_attr4"),
strlist ("bestPath_attr1",
"bestPath_attr2",
"bestPath_attr3",
"bestPath_attr4"));
m_aggr_bestpathMinC->Delete (result);
}<|fim▁end|>
|
void
|
<|file_name|>logistic_regression.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# -*- coding:utf-8 -*-
import sklearn.datasets as skds
import numpy as np
import random
import theano.tensor as T
import theano
import matplotlib.pyplot as plt
import math
#I don't know what the jesus 'housing.data' means so I used self-generated dataset
x = np.arange(-50., 50., 1)
y = np.array(map(lambda tmp: 1.0/(1 + math.exp(-3 * tmp + 5.0)), x))
noise = np.random.uniform(-0.1, .1, size=len(x))
y += noise
print x
print y
#declarations
theta = theano.shared(np.random.uniform(-0.1, 0.1))
omega = theano.shared(np.random.uniform(-0.1, 0.1))<|fim▁hole|>Y = T.dscalar('Y')
#functions
prediction = 1/(1 + T.exp(-omega * X + theta))
loss1 = -Y * T.log(prediction)
loss2 = 1/2.0 * (prediction - Y) ** 2
predict = theano.function([X], prediction)
calculate_loss = theano.function([X, Y], loss2)
print predict(1.0)
#derivatives
dX = T.grad(loss2, X)
dtheta = T.grad(loss2, theta)
domega = T.grad(loss2, omega)
epsilon = .01
#gradient function
gradient_step = theano.function(
[X, Y],
updates=((omega, omega - epsilon * domega),
(theta, theta - epsilon * dtheta)))
#optimization
for i in range(100):
loss = 0
for j in range(len(x)):
gradient_step(x[j], y[j])
loss += calculate_loss(x[j], y[j])
print 'loss after' + str(i) + 'iterations.' + str(loss)
print x
print y
mul = 1 - 1/len(x)
plt.xlim(x.min() * mul, x.max() * mul)
plt.ylim(y.min() * mul, y.max() * mul)
plt.xlabel('x')
plt.ylabel('y')
plt.title('lr test')
plt.plot(x, y, 'ro')
xx = np.arange(x.min(), x.max(), 0.1)
yy = map(lambda abc: predict(abc), xx)
plt.plot(xx, yy, 'b')
plt.show()
# vim: ts=4 sw=4 sts=4 expandtab<|fim▁end|>
|
X = T.dscalar('X')
|
<|file_name|>test_quantile.py<|end_file_name|><|fim▁begin|>import numpy as np
import pytest
import pandas as pd
from pandas import DataFrame, Index
import pandas._testing as tm
@pytest.mark.parametrize(
"interpolation", ["linear", "lower", "higher", "nearest", "midpoint"]
)
@pytest.mark.parametrize(
"a_vals,b_vals",
[
# Ints
([1, 2, 3, 4, 5], [5, 4, 3, 2, 1]),
([1, 2, 3, 4], [4, 3, 2, 1]),
([1, 2, 3, 4, 5], [4, 3, 2, 1]),
# Floats
([1.0, 2.0, 3.0, 4.0, 5.0], [5.0, 4.0, 3.0, 2.0, 1.0]),
# Missing data
([1.0, np.nan, 3.0, np.nan, 5.0], [5.0, np.nan, 3.0, np.nan, 1.0]),
([np.nan, 4.0, np.nan, 2.0, np.nan], [np.nan, 4.0, np.nan, 2.0, np.nan]),
# Timestamps
(
list(pd.date_range("1/1/18", freq="D", periods=5)),
list(pd.date_range("1/1/18", freq="D", periods=5))[::-1],
),
# All NA
([np.nan] * 5, [np.nan] * 5),
],
)
@pytest.mark.parametrize("q", [0, 0.25, 0.5, 0.75, 1])
def test_quantile(interpolation, a_vals, b_vals, q):
if interpolation == "nearest" and q == 0.5 and b_vals == [4, 3, 2, 1]:
pytest.skip(
"Unclear numpy expectation for nearest result with equidistant data"
)
a_expected = pd.Series(a_vals).quantile(q, interpolation=interpolation)
b_expected = pd.Series(b_vals).quantile(q, interpolation=interpolation)
df = DataFrame(
{"key": ["a"] * len(a_vals) + ["b"] * len(b_vals), "val": a_vals + b_vals}
)
expected = DataFrame(
[a_expected, b_expected], columns=["val"], index=Index(["a", "b"], name="key")
)
result = df.groupby("key").quantile(q, interpolation=interpolation)
tm.assert_frame_equal(result, expected)
def test_quantile_array():
# https://github.com/pandas-dev/pandas/issues/27526
df = DataFrame({"A": [0, 1, 2, 3, 4]})
result = df.groupby([0, 0, 1, 1, 1]).quantile([0.25])
index = pd.MultiIndex.from_product([[0, 1], [0.25]])
expected = DataFrame({"A": [0.25, 2.50]}, index=index)
tm.assert_frame_equal(result, expected)
df = DataFrame({"A": [0, 1, 2, 3], "B": [4, 5, 6, 7]})
index = pd.MultiIndex.from_product([[0, 1], [0.25, 0.75]])
result = df.groupby([0, 0, 1, 1]).quantile([0.25, 0.75])
expected = DataFrame(
{"A": [0.25, 0.75, 2.25, 2.75], "B": [4.25, 4.75, 6.25, 6.75]}, index=index
)
tm.assert_frame_equal(result, expected)
def test_quantile_array2():
# https://github.com/pandas-dev/pandas/pull/28085#issuecomment-524066959
df = DataFrame(
np.random.RandomState(0).randint(0, 5, size=(10, 3)), columns=list("ABC")
)
result = df.groupby("A").quantile([0.3, 0.7])
expected = DataFrame(
{
"B": [0.9, 2.1, 2.2, 3.4, 1.6, 2.4, 2.3, 2.7, 0.0, 0.0],
"C": [1.2, 2.8, 1.8, 3.0, 0.0, 0.0, 1.9, 3.1, 3.0, 3.0],
},
index=pd.MultiIndex.from_product(
[[0, 1, 2, 3, 4], [0.3, 0.7]], names=["A", None]
),
)
tm.assert_frame_equal(result, expected)
def test_quantile_array_no_sort():
df = DataFrame({"A": [0, 1, 2], "B": [3, 4, 5]})
result = df.groupby([1, 0, 1], sort=False).quantile([0.25, 0.5, 0.75])
expected = DataFrame(
{"A": [0.5, 1.0, 1.5, 1.0, 1.0, 1.0], "B": [3.5, 4.0, 4.5, 4.0, 4.0, 4.0]},
index=pd.MultiIndex.from_product([[1, 0], [0.25, 0.5, 0.75]]),
)
tm.assert_frame_equal(result, expected)
result = df.groupby([1, 0, 1], sort=False).quantile([0.75, 0.25])
expected = DataFrame(
{"A": [1.5, 0.5, 1.0, 1.0], "B": [4.5, 3.5, 4.0, 4.0]},
index=pd.MultiIndex.from_product([[1, 0], [0.75, 0.25]]),
)
tm.assert_frame_equal(result, expected)
def test_quantile_array_multiple_levels():
df = DataFrame(
{"A": [0, 1, 2], "B": [3, 4, 5], "c": ["a", "a", "a"], "d": ["a", "a", "b"]}
)
result = df.groupby(["c", "d"]).quantile([0.25, 0.75])
index = pd.MultiIndex.from_tuples(
[("a", "a", 0.25), ("a", "a", 0.75), ("a", "b", 0.25), ("a", "b", 0.75)],
names=["c", "d", None],
)
expected = DataFrame(
{"A": [0.25, 0.75, 2.0, 2.0], "B": [3.25, 3.75, 5.0, 5.0]}, index=index
)
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("frame_size", [(2, 3), (100, 10)])
@pytest.mark.parametrize("groupby", [[0], [0, 1]])
@pytest.mark.parametrize("q", [[0.5, 0.6]])
def test_groupby_quantile_with_arraylike_q_and_int_columns(frame_size, groupby, q):
# GH30289
nrow, ncol = frame_size
df = DataFrame(np.array([ncol * [_ % 4] for _ in range(nrow)]), columns=range(ncol))
idx_levels = [list(range(min(nrow, 4)))] * len(groupby) + [q]
idx_codes = [[x for x in range(min(nrow, 4)) for _ in q]] * len(groupby) + [
list(range(len(q))) * min(nrow, 4)
]
expected_index = pd.MultiIndex(
levels=idx_levels, codes=idx_codes, names=groupby + [None]
)
expected_values = [
[float(x)] * (ncol - len(groupby)) for x in range(min(nrow, 4)) for _ in q
]
expected_columns = [x for x in range(ncol) if x not in groupby]
expected = DataFrame(
expected_values, index=expected_index, columns=expected_columns
)
result = df.groupby(groupby).quantile(q)
tm.assert_frame_equal(result, expected)
def test_quantile_raises():
df = DataFrame([["foo", "a"], ["foo", "b"], ["foo", "c"]], columns=["key", "val"])
with pytest.raises(TypeError, match="cannot be performed against 'object' dtypes"):
df.groupby("key").quantile()
def test_quantile_out_of_bounds_q_raises():
# https://github.com/pandas-dev/pandas/issues/27470
df = DataFrame({"a": [0, 0, 0, 1, 1, 1], "b": range(6)})
g = df.groupby([0, 0, 0, 1, 1, 1])
with pytest.raises(ValueError, match="Got '50.0' instead"):
g.quantile(50)
with pytest.raises(ValueError, match="Got '-1.0' instead"):
g.quantile(-1)
def test_quantile_missing_group_values_no_segfaults():
# GH 28662
data = np.array([1.0, np.nan, 1.0])
df = DataFrame({"key": data, "val": range(3)})
# Random segfaults; would have been guaranteed in loop
grp = df.groupby("key")
for _ in range(100):
grp.quantile()
@pytest.mark.parametrize(
"key, val, expected_key, expected_val",
[
([1.0, np.nan, 3.0, np.nan], range(4), [1.0, 3.0], [0.0, 2.0]),
([1.0, np.nan, 2.0, 2.0], range(4), [1.0, 2.0], [0.0, 2.5]),<|fim▁hole|> ([0], [42], [0], [42.0]),
([], [], np.array([], dtype="float64"), np.array([], dtype="float64")),
],
)
def test_quantile_missing_group_values_correct_results(
key, val, expected_key, expected_val
):
# GH 28662, GH 33200, GH 33569
df = DataFrame({"key": key, "val": val})
expected = DataFrame(
expected_val, index=Index(expected_key, name="key"), columns=["val"]
)
grp = df.groupby("key")
result = grp.quantile(0.5)
tm.assert_frame_equal(result, expected)
result = grp.quantile()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"values",
[
pd.array([1, 0, None] * 2, dtype="Int64"),
pd.array([True, False, None] * 2, dtype="boolean"),
],
)
@pytest.mark.parametrize("q", [0.5, [0.0, 0.5, 1.0]])
def test_groupby_quantile_nullable_array(values, q):
# https://github.com/pandas-dev/pandas/issues/33136
df = DataFrame({"a": ["x"] * 3 + ["y"] * 3, "b": values})
result = df.groupby("a")["b"].quantile(q)
if isinstance(q, list):
idx = pd.MultiIndex.from_product((["x", "y"], q), names=["a", None])
true_quantiles = [0.0, 0.5, 1.0]
else:
idx = Index(["x", "y"], name="a")
true_quantiles = [0.5]
expected = pd.Series(true_quantiles * 2, index=idx, name="b")
tm.assert_series_equal(result, expected)
@pytest.mark.parametrize("q", [0.5, [0.0, 0.5, 1.0]])
def test_groupby_quantile_skips_invalid_dtype(q):
df = DataFrame({"a": [1], "b": [2.0], "c": ["x"]})
result = df.groupby("a").quantile(q)
expected = df.groupby("a")[["b"]].quantile(q)
tm.assert_frame_equal(result, expected)
def test_groupby_timedelta_quantile():
# GH: 29485
df = DataFrame(
{"value": pd.to_timedelta(np.arange(4), unit="s"), "group": [1, 1, 2, 2]}
)
result = df.groupby("group").quantile(0.99)
expected = DataFrame(
{
"value": [
pd.Timedelta("0 days 00:00:00.990000"),
pd.Timedelta("0 days 00:00:02.990000"),
]
},
index=Index([1, 2], name="group"),
)
tm.assert_frame_equal(result, expected)
def test_columns_groupby_quantile():
# GH 33795
df = DataFrame(
np.arange(12).reshape(3, -1),
index=list("XYZ"),
columns=pd.Series(list("ABAB"), name="col"),
)
result = df.groupby("col", axis=1).quantile(q=[0.8, 0.2])
expected = DataFrame(
[
[1.6, 0.4, 2.6, 1.4],
[5.6, 4.4, 6.6, 5.4],
[9.6, 8.4, 10.6, 9.4],
],
index=list("XYZ"),
columns=pd.MultiIndex.from_tuples(
[("A", 0.8), ("A", 0.2), ("B", 0.8), ("B", 0.2)], names=["col", None]
),
)
tm.assert_frame_equal(result, expected)<|fim▁end|>
|
(["a", "b", "b", np.nan], range(4), ["a", "b"], [0, 1.5]),
|
<|file_name|>BlockFlag.java<|end_file_name|><|fim▁begin|>package mcid.anubisset.letsmodreboot.block;
import mcid.anubisset.letsmodreboot.creativetab.CreativeTabLMRB;<|fim▁hole|> * Created by Luke on 30/08/2014.
*/
public class BlockFlag extends BlockLMRB
{
public BlockFlag()
{
super();
this.setBlockName("flag");
this.setBlockTextureName("flag");
}
}<|fim▁end|>
|
/**
|
<|file_name|>linkedList.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { IIterator } from 'vs/base/common/iterator';
class Node<E> {
element: E;
next: Node<E>;
prev: Node<E>;
constructor(element: E) {
this.element = element;
}
}
export class LinkedList<E> {
private _first: Node<E>;
private _last: Node<E>;
isEmpty(): boolean {
return !this._first;
}
clear(): void {
this._first = undefined;
this._last = undefined;
}
unshift(element: E) {
return this.insert(element, false);
}
push(element: E) {
return this.insert(element, true);
}
private insert(element: E, atTheEnd: boolean) {
const newNode = new Node(element);
if (!this._first) {
this._first = newNode;
this._last = newNode;
} else if (atTheEnd) {
// push
const oldLast = this._last;
this._last = newNode;
newNode.prev = oldLast;
oldLast.next = newNode;
} else {
// unshift
const oldFirst = this._first;
this._first = newNode;
newNode.next = oldFirst;
oldFirst.prev = newNode;
}
return () => {
for (let candidate = this._first; candidate instanceof Node; candidate = candidate.next) {
if (candidate !== newNode) {
continue;
}
if (candidate.prev && candidate.next) {
// middle
let anchor = candidate.prev;
anchor.next = candidate.next;
candidate.next.prev = anchor;
} else if (!candidate.prev && !candidate.next) {
// only node
this._first = undefined;
this._last = undefined;
} else if (!candidate.next) {
// last
this._last = this._last.prev;
this._last.next = undefined;
} else if (!candidate.prev) {
// first
this._first = this._first.next;
this._first.prev = undefined;
}
// done
break;
}
};
}
iterator(): IIterator<E> {
let _done: boolean;
let _value: E;
let element = {
get done() { return _done; },
get value() { return _value; }
};
let node = this._first;
return {
next(): { done: boolean; value: E } {
if (!node) {
_done = true;
_value = undefined;
} else {
_done = false;
_value = node.element;
node = node.next;<|fim▁hole|> }
toArray(): E[] {
let result: E[] = [];
for (let node = this._first; node instanceof Node; node = node.next) {
result.push(node.element);
}
return result;
}
}<|fim▁end|>
|
}
return element;
}
};
|
<|file_name|>translateFromPosition.js<|end_file_name|><|fim▁begin|>import { SplashEffect } from "./effect.js";
import { Linear } from "../interpolation.js";
export class TranslateFromPosition extends SplashEffect {
constructor(element, options) {
super(element);
options = options || {};
this.x = options.x || 0;
this.y = options.y || 0;
// Can be whatever, but the effect won't do
// anything if it isn't a valid css unit.
this.unit = options.unit || "px";
// Can be either "transform" or "position"
this.translationType = options.translationType || "transform";
this.interpolation = options.interpolation || new Linear();
}
in(value) {<|fim▁hole|> }
out(value) {
this._set(
this.interpolation.out(value) * this.x,
this.interpolation.out(value) * this.y
);
}
_set(x, y) {
if (this.translationType = "transform") {
this.setTransform("translateX", x + this.unit);
this.setTransform("translateY", y + this.unit);
} else if (this.translationType = "position") {
this.setStyle("left", x + this.unit);
this.setStyle("top", y + this.unit);
} else {
console.error("Unknown translation type: " + this.translationType);
}
}
}<|fim▁end|>
|
this._set(
this.interpolation.in(value * -1 + 1) * this.x,
this.interpolation.in(value * -1 + 1) * this.y
);
|
<|file_name|>Todo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import vim
import itertools as it
from orgmode._vim import echom, ORGMODE, apply_count, repeat, realign_tags
from orgmode import settings
from orgmode.liborgmode.base import Direction
from orgmode.menu import Submenu, ActionEntry
from orgmode.keybinding import Keybinding, Plug
from orgmode.exceptions import PluginError
# temporary todo states for differnent orgmode buffers
ORGTODOSTATES = {}
from orgmode.py3compat.xrange_compatibility import *
from orgmode.py3compat.encode_compatibility import *
from orgmode.py3compat.unicode_compatibility import *
from orgmode.py3compat.py_py3_string import *<|fim▁hole|>def split_access_key(t, sub=None):
u""" Split access key
Args:
t (str): Todo state
sub: A value that will be returned instead of access key if there was
not access key
Returns:
tuple: Todo state and access key separated (TODO, ACCESS_KEY)
Example:
>>> split_access_key('TODO(t)')
>>> ('TODO', '(t)')
>>> split_access_key('WANT', sub='(hi)')
>>> ('WANT', '(hi)')
"""
if type(t) != unicode:
echom("String must be unicode")
return (None, None)
idx = t.find(u'(')
v, k = (t, sub)
if idx != -1 and t[idx + 1:-1]:
v, k = (t[:idx], t[idx + 1:-1])
return (v, k)
class Todo(object):
u"""
Todo plugin.
Description taken from orgmode.org:
You can use TODO keywords to indicate different sequential states in the
process of working on an item, for example:
["TODO", "FEEDBACK", "VERIFY", "|", "DONE", "DELEGATED"]
The vertical bar separates the TODO keywords (states that need action) from
the DONE states (which need no further action). If you don't provide the
separator bar, the last state is used as the DONE state. With this setup,
the command ``,d`` will cycle an entry from TODO to FEEDBACK, then to
VERIFY, and finally to DONE and DELEGATED.
"""
def __init__(self):
u""" Initialize plugin """
object.__init__(self)
# menu entries this plugin should create
self.menu = ORGMODE.orgmenu + Submenu(u'&TODO Lists')
# key bindings for this plugin
# key bindings are also registered through the menu so only additional
# bindings should be put in this variable
self.keybindings = []
@classmethod
def _process_all_states(cls, all_states):
u""" verify if states defined by user is valid.
Return cleaned_todo and flattened if is. Raise Exception if not.
Valid checking:
* no two state share a same name
"""
# TODO Write tests. -- Ron89
cleaned_todos = [[
split_access_key(todo)[0] for todo in it.chain.from_iterable(x)]
for x in all_states] + [[None]]
flattened_todos = list(it.chain.from_iterable(cleaned_todos))
if len(flattened_todos) != len(set(flattened_todos)):
raise PluginError(u"Duplicate names detected in TODO keyword list. Please examine `g/b:org_todo_keywords`")
# TODO This is the case when there are 2 todo states with the same
# name. It should be handled by making a simple class to hold TODO
# states, which would avoid mixing 2 todo states with the same name
# since they would have a different reference (but same content),
# albeit this can fail because python optimizes short strings (i.e.
# they hold the same ref) so care should be taken in implementation
return (cleaned_todos, flattened_todos)
@classmethod
def _get_next_state(
cls, current_state, all_states, direction=Direction.FORWARD,
next_set=False):
u""" Get the next todo state
Args:
current_state (str): The current todo state
all_states (list): A list containing all todo states within
sublists. The todo states may contain access keys
direction: Direction of state or keyword set change (forward or
backward)
next_set: Advance to the next keyword set in defined direction.
Returns:
str or None: next todo state, or None if there is no next state.
Note: all_states should have the form of:
[(['TODO(t)'], ['DONE(d)']),
(['REPORT(r)', 'BUG(b)', 'KNOWNCAUSE(k)'], ['FIXED(f)']),
([], ['CANCELED(c)'])]
"""
cleaned_todos, flattened_todos = cls._process_all_states(all_states)
# backward direction should really be -1 not 2
next_dir = -1 if direction == Direction.BACKWARD else 1
# work only with top level index
if next_set:
top_set = next((
todo_set[0] for todo_set in enumerate(cleaned_todos)
if current_state in todo_set[1]), -1)
ind = (top_set + next_dir) % len(cleaned_todos)
if ind != len(cleaned_todos) - 1:
echom("Using set: %s" % str(all_states[ind]))
else:
echom("Keyword removed.")
return cleaned_todos[ind][0]
# No next set, cycle around everything
else:
ind = next((
todo_iter[0] for todo_iter in enumerate(flattened_todos)
if todo_iter[1] == current_state), -1)
return flattened_todos[(ind + next_dir) % len(flattened_todos)]
@classmethod
@realign_tags
@repeat
@apply_count
def toggle_todo_state(
cls, direction=Direction.FORWARD, interactive=False, next_set=False):
u""" Toggle state of TODO item
:returns: The changed heading
"""
d = ORGMODE.get_document(allow_dirty=True)
# get heading
heading = d.find_current_heading()
if not heading:
vim.eval(u'feedkeys("^", "n")')
return
todo_states = d.get_todo_states(strip_access_key=False)
# get todo states
if not todo_states:
echom(u'No todo keywords configured.')
return
current_state = heading.todo
# get new state interactively
if interactive:
# determine position of the interactive prompt
prompt_pos = settings.get(u'org_todo_prompt_position', u'botright')
if prompt_pos not in [u'botright', u'topleft']:
prompt_pos = u'botright'
# pass todo states to new window
ORGTODOSTATES[d.bufnr] = todo_states
settings.set(
u'org_current_state_%d' % d.bufnr,
current_state if current_state is not None else u'', overwrite=True)
todo_buffer_exists = bool(int(vim.eval(u_encode(
u'bufexists("org:todo/%d")' % (d.bufnr, )))))
if todo_buffer_exists:
# if the buffer already exists, reuse it
vim.command(u_encode(
u'%s sbuffer org:todo/%d' % (prompt_pos, d.bufnr, )))
else:
# create a new window
vim.command(u_encode(
u'keepalt %s %dsplit org:todo/%d' % (prompt_pos, len(todo_states), d.bufnr)))
else:
new_state = Todo._get_next_state(
current_state, todo_states, direction=direction,
next_set=next_set)
cls.set_todo_state(new_state)
# plug
plug = u'OrgTodoForward'
if direction == Direction.BACKWARD:
plug = u'OrgTodoBackward'
return plug
@classmethod
def set_todo_state(cls, state):
u""" Set todo state for buffer.
:bufnr: Number of buffer the todo state should be updated for
:state: The new todo state
"""
lineno, colno = vim.current.window.cursor
d = ORGMODE.get_document(allow_dirty=True)
heading = d.find_current_heading()
if not heading:
return
current_state = heading.todo
# set new headline
heading.todo = state
d.write_heading(heading)
# move cursor along with the inserted state only when current position
# is in the heading; otherwite do nothing
if heading.start_vim == lineno and colno > heading.level:
if current_state is not None and \
colno <= heading.level + len(current_state):
# the cursor is actually on the todo keyword
# move it back to the beginning of the keyword in that case
vim.current.window.cursor = (lineno, heading.level + 1)
else:
# the cursor is somewhere in the text, move it along
if current_state is None and state is None:
offset = 0
elif current_state is None and state is not None:
offset = len(state) + 1
elif current_state is not None and state is None:
offset = -len(current_state) - 1
else:
offset = len(state) - len(current_state)
vim.current.window.cursor = (lineno, colno + offset)
@classmethod
def init_org_todo(cls):
u""" Initialize org todo selection window.
"""
bufnr = int(vim.current.buffer.name.split('/')[-1])
all_states = ORGTODOSTATES.get(bufnr, None)
vim_commands = [
u'let g:org_sav_timeoutlen=&timeoutlen',
u'au orgmode BufEnter <buffer> :if ! exists("g:org_sav_timeoutlen")|let g:org_sav_timeoutlen=&timeoutlen|set timeoutlen=1|endif',
u'au orgmode BufLeave <buffer> :if exists("g:org_sav_timeoutlen")|let &timeoutlen=g:org_sav_timeoutlen|unlet g:org_sav_timeoutlen|endif',
u'setlocal nolist tabstop=16 buftype=nofile timeout timeoutlen=1 winfixheight',
u'setlocal statusline=Org\\ todo\\ (%s)' % vim.eval(u_encode(u'fnameescape(fnamemodify(bufname(%d), ":t"))' % bufnr)),
u'nnoremap <silent> <buffer> <Esc> :%sbw<CR>' % vim.eval(u_encode(u'bufnr("%")')),
u'nnoremap <silent> <buffer> <CR> :let g:org_state = fnameescape(expand("<cword>"))<Bar>bw<Bar>exec "%s ORGMODE.plugins[u\'Todo\'].set_todo_state(\'".g:org_state."\')"<Bar>unlet! g:org_state<CR>' % VIM_PY_CALL,
]
# because timeoutlen can only be set globally it needs to be stored and
# restored later
# make window a scratch window and set the statusline differently
for cmd in vim_commands:
vim.command(u_encode(cmd))
if all_states is None:
vim.command(u_encode(u'bw'))
echom(u'No todo states avaiable for buffer %s' % vim.current.buffer.name)
for idx, state in enumerate(all_states):
pairs = [split_access_key(x, sub=u' ') for x in it.chain(*state)]
line = u'\t'.join(u''.join((u'[%s] ' % x[1], x[0])) for x in pairs)
vim.current.buffer.append(u_encode(line))
for todo, key in pairs:
# FIXME if double key is used for access modified this doesn't work
vim.command(u_encode(u'nnoremap <silent> <buffer> %s :bw<CR><c-w><c-p>%s ORGMODE.plugins[u"Todo"].set_todo_state("%s")<CR>' % (key, VIM_PY_CALL, u_decode(todo))))
# position the cursor of the current todo item
vim.command(u_encode(u'normal! G'))
current_state = settings.unset(u'org_current_state_%d' % bufnr)
if current_state is not None and current_state != '':
for i, buf in enumerate(vim.current.buffer):
idx = buf.find(current_state)
if idx != -1:
vim.current.window.cursor = (i + 1, idx)
break
else:
vim.current.window.cursor = (2, 4)
# finally make buffer non modifiable
vim.command(u_encode(u'setfiletype orgtodo'))
vim.command(u_encode(u'setlocal nomodifiable'))
# remove temporary todo states for the current buffer
del ORGTODOSTATES[bufnr]
def register(self):
u"""
Registration of plugin. Key bindings and other initialization should be done.
"""
self.keybindings.append(Keybinding(u'<localleader>ct', Plug(
u'OrgTodoToggleNonInteractive',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state(interactive=False)<CR>' % VIM_PY_CALL)))
self.menu + ActionEntry(u'&TODO/DONE/-', self.keybindings[-1])
self.keybindings.append(Keybinding(u'<localleader>d', Plug(
u'OrgTodoToggleInteractive',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state(interactive=True)<CR>' % VIM_PY_CALL)))
self.menu + ActionEntry(u'&TODO/DONE/- (interactiv)', self.keybindings[-1])
# add submenu
submenu = self.menu + Submenu(u'Select &keyword')
self.keybindings.append(Keybinding(u'<S-Right>', Plug(
u'OrgTodoForward',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state()<CR>' % VIM_PY_CALL)))
submenu + ActionEntry(u'&Next keyword', self.keybindings[-1])
self.keybindings.append(Keybinding(u'<S-Left>', Plug(
u'OrgTodoBackward',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state(direction=2)<CR>' % VIM_PY_CALL)))
submenu + ActionEntry(u'&Previous keyword', self.keybindings[-1])
self.keybindings.append(Keybinding(u'<C-S-Right>', Plug(
u'OrgTodoSetForward',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state(next_set=True)<CR>' % VIM_PY_CALL)))
submenu + ActionEntry(u'Next keyword &set', self.keybindings[-1])
self.keybindings.append(Keybinding(u'<C-S-Left>', Plug(
u'OrgTodoSetBackward',
u'%s ORGMODE.plugins[u"Todo"].toggle_todo_state(direction=2, next_set=True)<CR>' % VIM_PY_CALL)))
submenu + ActionEntry(u'Previous &keyword set', self.keybindings[-1])
settings.set(u'org_todo_keywords', [u_encode(u'TODO'), u_encode(u'|'), u_encode(u'DONE')])
settings.set(u'org_todo_prompt_position', u'botright')
vim.command(u_encode(u'au orgmode BufReadCmd org:todo/* %s ORGMODE.plugins[u"Todo"].init_org_todo()' % VIM_PY_CALL))
# vim: set noexpandtab:<|fim▁end|>
| |
<|file_name|>uncompress.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2004 Conectiva, Inc.
#
# Written by Gustavo Niemeyer <[email protected]>
#
# This file is part of Smart Package Manager.
#
# Smart Package Manager is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# Smart Package Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Smart Package Manager; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
from smart.const import BLOCKSIZE
from smart import *
class Uncompressor(object):
_handlers = []
def addHandler(self, handler):
self._handlers.append(handler())
addHandler = classmethod(addHandler)
def getHandler(self, localpath):
for handler in self._handlers:<|fim▁hole|>
def uncompress(self, localpath):
for handler in self._handlers:
if handler.query(localpath):
return handler.uncompress(localpath)
else:
raise Error, _("Unknown compressed file: %s") % localpath
class UncompressorHandler(object):
def query(self, localpath):
return None
def getTargetPath(self, localpath):
return None
def uncompress(self, localpath):
raise Error, _("Unsupported file type")
class BZ2Handler(UncompressorHandler):
def query(self, localpath):
if localpath.endswith(".bz2"):
return True
def getTargetPath(self, localpath):
return localpath[:-4]
def uncompress(self, localpath):
import bz2
try:
input = bz2.BZ2File(localpath)
output = open(self.getTargetPath(localpath), "w")
data = input.read(BLOCKSIZE)
while data:
output.write(data)
data = input.read(BLOCKSIZE)
except (IOError, OSError), e:
raise Error, "%s: %s" % (localpath, e)
Uncompressor.addHandler(BZ2Handler)
class GZipHandler(UncompressorHandler):
def query(self, localpath):
if localpath.endswith(".gz"):
return True
def getTargetPath(self, localpath):
return localpath[:-3]
def uncompress(self, localpath):
import gzip
try:
input = gzip.GzipFile(localpath)
output = open(self.getTargetPath(localpath), "w")
data = input.read(BLOCKSIZE)
while data:
output.write(data)
data = input.read(BLOCKSIZE)
except (IOError, OSError), e:
raise Error, "%s: %s" % (localpath, e)
Uncompressor.addHandler(GZipHandler)<|fim▁end|>
|
if handler.query(localpath):
return handler
getHandler = classmethod(getHandler)
|
<|file_name|>wordfreq-morph.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Скрипт извлекает слова из текстового файла и сортирует их по частоте.
# С помощью модуля pymorphy2 можно привести слова к начальной форме (единственное число, именительный падеж).
# Нужен pymorphy2 и русскоязычный словарь для него!
# pip install --user pymorphy2
# Примеры:
# ./wordfreq-morph.py ./text-file.txt | less
# xclip -o | ./wordfreq-morph.py -m
# Проверялся на интерпретаторе:
# Python 3.6.1 on linux
import sys
import sqlite3
import os
import re
import argparse
# Сортировка вывода словарей:
from collections import OrderedDict
#------------------------------------------------------------------------------
# Опции:
# Проверочный морфологический словарь (в каталоге скрипта):
NORMAL_DICT_PATH = 'dict.opencorpora-sing-nom.txt'
NORMAL_DICT_DIR = 'word-length-dicts'
database_name = 'opencorpora-sing-nom.sqlite'
#-------------------------------------------------------------------------
# Аргументы командной строки:
def create_parser():
"""Список доступных параметров скрипта."""
parser = argparse.ArgumentParser()
parser.add_argument('file',
nargs='*',
help='Русскоязычный текстовый файл в UTF-8'
)
parser.add_argument('-m', '--morph',
action='store_true', default='False',
help='Преобразование слов в начальную форму (нужен pymorphy2)'
)
return parser
#-------------------------------------------------------------------------
# Функции:
def metadict_path (metadict_dir):
"""Возвращает абсолютный путь к каталогу словарей."""
# Получаем абсолютный путь к каталогу скрипта:
script_path = os.path.dirname(os.path.abspath(__file__))
# Добавляем к пути каталог словарей:
metadict_path = script_path + '/' + metadict_dir
return metadict_path
def find_files (directory):
"""Возвращает список путей ко всем файлам каталога, включая подкаталоги."""
path_f = []
for d, dirs, files in os.walk(directory):
for f in files:
# Формирование адреса:
path = os.path.join(d,f)
# Добавление адреса в список:
path_f.append(path)
return path_f
def lowercase (text):
"""Создаёт из текста список слов в нижнем регистре"""
# Переводим текст в нижний регистр:
text = str(text.lower())
# Регексп вытаскивает из текста слова:
words = re.findall(r"(\w+)", text, re.UNICODE)
# Восстанавливаются ссылки:
urls = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', text)
words = words + urls
return words
def wordfreq_old (words):
"""Создаёт словарь с частотой слов"""
stats = {}
# Слово -- ключ словаря, значение, это его частота:
for word in words:
stats[word] = stats.get(word, 0) + 1
return stats
def word_test_slow (word):
"""Светяет слово со словарём, выбирая словарь по длине слова."""
# Определяем длину слова:
search_string = '-' + str(len(word)) + '.txt'
dicts_list = find_files(metadict_path(NORMAL_DICT_DIR))
test = False
# Подключаем словарь для проверки:
for dict in dicts_list:
if search_string in dict:
normal_dict_file = open(dict, "r")
normal_dict = normal_dict_file.read()
normal_dict_file.close()
if word in normal_dict:
return True
else:
return False
def word_test_sql (word,cursor):
"""Проверяет, есть ли слово в базе данных"""
# Номер таблицы, это длина слова:
word_lenght = len(word)
# А вот не нужно хардкодить (число таблиц в базе данных может измениться)
if word_lenght > 32:
word_lenght = 32
table_name = 'opencorpora' + str(word_lenght)
#database = sqlite3.connect(metadict_path(database_name))
#cursor = database.cursor()
cursor.execute("SELECT words FROM "+table_name+" WHERE words=?",(word,))
result = cursor.fetchall()
#database.close()
if result:
return True
else:
return False
def wordfreq_morph (words):
"""Создаёт словарь с частотой слов (в начальной форме)"""
# Морфологический анализатор:
import pymorphy2
stats = {}
n_stats = {}
for word in words:
stats[word] = stats.get(word, 0) + 1
morph = pymorphy2.MorphAnalyzer()
for item in stats:
# Слово приводится к начальной форме:
n_word = morph.parse(item)[0].normal_form
# Неологизмы оставляем без изменений:
if word_test_sql(n_word,cursor) is not True:
n_word = item
# Создаётся новый ключ, или прибавляется значение к существующему:
if n_word not in n_stats:
n_stats[n_word] = stats[item]
else:
n_stats[n_word] = n_stats[n_word] + stats[item]
return n_stats
def dict_sort (stats):
"""Сортировка словаря по частоте и алфавиту"""
stats_sort = OrderedDict(sorted(stats.items(), key=lambda x: x[0], reverse=False))
stats_list = OrderedDict(sorted(stats_sort.items(), key=lambda x: x[1], reverse=True))
return stats_list
#-------------------------------------------------------------------------
# Тело программы:<|fim▁hole|># Создаётся список аргументов скрипта:
parser = create_parser()
namespace = parser.parse_args()
# Проверяем, существует ли указанный файл:
file_patch = ' '.join(namespace.file)
if namespace.file is not None and os.path.exists(file_patch):
file = open(file_patch, "r")
text = file.read()
file.close()
# Если нет, читаем стандартный ввод:
else:
text = sys.stdin.read()
# Извлекаем из текста слова:
words = lowercase(text)
# Подключение к базе данных:
database = sqlite3.connect(metadict_path(database_name))
cursor = database.cursor()
# Если указано преобразование слов:
if namespace.morph is True:
wordfreq = wordfreq_morph(words)
else:
wordfreq = wordfreq_old(words)
# Отключаемся от базы данных:
database.close()
# Вывод словаря:
wordfreq_sort=dict_sort(wordfreq)
for word, count in wordfreq_sort.items():
print (count, word)<|fim▁end|>
| |
<|file_name|>Colleague1.java<|end_file_name|><|fim▁begin|>package gof.behaviour.mediator;
public class Colleague1 extends Colleague {
public Colleague1(Mediator m){
super(m);
}
@Override
public void action() {
<|fim▁hole|>
}<|fim▁end|>
|
System.out.println("Colleague1");
}
|
<|file_name|>Taowa.py<|end_file_name|><|fim▁begin|>user_input = raw_input()
class Taowa:
def __init__(self):
return
def setSize(self, m1, m2):
self.width = m1
self.height = m2
self.area = m1 * m2
class Sorter:
def __init__(self):
self.taowas = []
self.lastWidth = 0
self.lastHeight = 0
return
def setArray(self, str):
self.arr = str.split(' ')
for idx in range(len(self.arr)):
self.arr[idx] = int(self.arr[idx])
def makeTaowa(self):
l = len(self.arr) / 2<|fim▁hole|> m2 = self.arr[idx*2+1]
taowa = Taowa()
taowa.setSize(m1, m2)
self.taowas.append(taowa)
def sortTaowa(self):
self.taowas.sort(key=lambda taowa:taowa.width)
def calculate(self):
l = len(self.taowas)
self.lastHeight = self.taowas[0].height
self.lastWidth = self.taowas[0].width
m = 1
for idx in range(1, l, 1):
taowa = self.taowas[idx]
w = taowa.width
h = taowa.height
if w > self.lastWidth and h > self.lastHeight :
m = m+1
self.lastWidth = w
self.lastHeight = h
return m
sorter = Sorter()
sorter.setArray(user_input)
sorter.makeTaowa()
sorter.sortTaowa()
user_input = sorter.calculate()
print user_input<|fim▁end|>
|
for idx in range(l):
m1 = self.arr[idx*2]
|
<|file_name|>webjournal_tests.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of CDS Invenio.
## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008 CERN.
##
## CDS Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## CDS Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with CDS Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Unit tests for WebJournal."""
__revision__ = \
"$Id$"
# pylint cds-invenio/modules/webjournal/lib/webjournal_tests.py
import unittest
from invenio.webjournal_utils import compare_issues
from invenio.webjournal import issue_is_later_than
#from invenio import webjournal_utils
from invenio.testutils import make_test_suite, run_test_suite
#from invenio.config import CFG_SITE_URL
class TestCompareIssues(unittest.TestCase):
"""Tests for comparing issues."""
def test_compare_issues(self):
"""webjournal - tests comparing issues"""
issue1 = '06/2009'
issue2 = '07/2009'
self.assertEqual(compare_issues(issue1, issue2), -1)
issue1 = '07/2009'
issue2 = '06/2009'
self.assertEqual(compare_issues(issue1, issue2), 1)
issue1 = '07/2009'
issue2 = '07/2009'
self.assertEqual(compare_issues(issue1, issue2), 0)
issue1 = '07/2009'<|fim▁hole|> self.assertEqual(compare_issues(issue1, issue2), 1)
issue1 = '07/2008'
issue2 = '07/2009'
self.assertEqual(compare_issues(issue1, issue2), -1)
def test_issue1_is_later_than(self):
"""webjournal - tests comparing issue1 is later than issue2 """
issue1 = '07/2009'
issue2 = '07/2008'
self.assertEqual(issue_is_later_than(issue1, issue2), True)
issue1 = '07/2008'
issue2 = '07/2009'
self.assertEqual(issue_is_later_than(issue1, issue2), False)
issue1 = '07/2009'
issue2 = '06/2009'
self.assertEqual(issue_is_later_than(issue1, issue2), True)
issue1 = '06/2009'
issue2 = '07/2009'
self.assertEqual(issue_is_later_than(issue1, issue2), False)
issue1 = '07/2009'
issue2 = '07/2009'
self.assertEqual(issue_is_later_than(issue1, issue2), False)
TEST_SUITE = make_test_suite(TestCompareIssues)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)<|fim▁end|>
|
issue2 = '07/2008'
|
<|file_name|>receiver.js<|end_file_name|><|fim▁begin|>let connectionIdx = 0;
let messageIdx = 0;
function addConnection(connection) {
connection.connectionId = ++connectionIdx;
addMessage('New connection #' + connectionIdx);
connection.addEventListener('message', function(event) {
messageIdx++;
const data = JSON.parse(event.data);
const logString = 'Message ' + messageIdx + ' from connection #' +
connection.connectionId + ': ' + data.message;
addMessage(logString, data.lang);
maybeSetFruit(data.message);
connection.send('Received message ' + messageIdx);
});
connection.addEventListener('close', function(event) {
addMessage('Connection #' + connection.connectionId + ' closed, reason = ' +
event.reason + ', message = ' + event.message);
});
};
/* Utils */
const fruitEmoji = {
'grapes': '\u{1F347}',
'watermelon': '\u{1F349}',
'melon': '\u{1F348}',
'tangerine': '\u{1F34A}',
'lemon': '\u{1F34B}',
'banana': '\u{1F34C}',
'pineapple': '\u{1F34D}',
'green apple': '\u{1F35F}',
'apple': '\u{1F34E}',
'pear': '\u{1F350}',
'peach': '\u{1F351}',
'cherries': '\u{1F352}',
'strawberry': '\u{1F353}'
};
function addMessage(content, language) {
const listItem = document.createElement("li");
if (language) {
listItem.lang = language;
}
listItem.textContent = content;<|fim▁hole|> const fruit = message.toLowerCase();
if (fruit in fruitEmoji) {
document.querySelector('#main').textContent = fruitEmoji[fruit];
}
};
document.addEventListener('DOMContentLoaded', function() {
if (navigator.presentation.receiver) {
navigator.presentation.receiver.connectionList.then(list => {
list.connections.map(connection => addConnection(connection));
list.addEventListener('connectionavailable', function(event) {
addConnection(event.connection);
});
});
}
});<|fim▁end|>
|
document.querySelector("#message-list").appendChild(listItem);
};
function maybeSetFruit(message) {
|
<|file_name|>acetic.rs<|end_file_name|><|fim▁begin|>#![feature(env)]
#![feature(core)]
#![feature(os)]
extern crate acetic;
#[macro_use] extern crate log;
use std::env;
fn main() {
for arg in env::args().skip(1) {
debug!("Parsing: {:?}", arg);
let krate = acetic::parse_crate(&arg.into_string().unwrap()[]);<|fim▁hole|><|fim▁end|>
|
acetic::compile_crate(krate)
}
}
|
<|file_name|>p0016.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
<|fim▁hole|>def sum_digits(n):
return sum(int(d) for d in str(n))
print(sum_digits(2**1000))<|fim▁end|>
| |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# pkpgcounter : a generic Page Description Language parser
#
# (c) 2003-2009 Jerome Alet <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# $Id$
#
#
import sys
import glob
import os
import shutil
try :
from distutils.core import setup
except ImportError as msg :
sys.stderr.write("%s\n" % msg)
sys.stderr.write("You need the DistUtils Python module.\nunder Debian, you may have to install the python-dev package.\nOf course, YMMV.\n")
sys.exit(-1)
try :
from PIL import Image
except ImportError :
sys.stderr.write("You need the Python Imaging Library (aka PIL).\nYou can grab it from http://www.pythonware.com\n")
sys.exit(-1)
sys.path.insert(0, "pkpgpdls")
from pkpgpdls.version import __version__, __doc__
data_files = []
mofiles = glob.glob(os.sep.join(["po", "*", "*.mo"]))
for mofile in mofiles :<|fim▁hole|> data_files.append((directory, [ mofile ]))
docdir = "share/doc/pkpgcounter"
docfiles = ["README", "COPYING", "BUGS", "CREDITS", "AUTHORS", "TODO"]
data_files.append((docdir, docfiles))
if os.path.exists("ChangeLog") :
data_files.append((docdir, ["ChangeLog"]))
directory = os.sep.join(["share", "man", "man1"])
manpages = glob.glob(os.sep.join(["man", "*.1"]))
data_files.append((directory, manpages))
setup(name = "pkpgcounter", version = __version__,
license = "GNU GPL",
description = __doc__,
author = "Jerome Alet",
author_email = "[email protected]",
url = "http://www.pykota.com/software/pkpgcounter/",
packages = [ "pkpgpdls" ],
scripts = [ "bin/pkpgcounter" ],
data_files = data_files)<|fim▁end|>
|
lang = mofile.split(os.sep)[1]
directory = os.sep.join(["share", "locale", lang, "LC_MESSAGES"])
|
<|file_name|>localip.js<|end_file_name|><|fim▁begin|>"use strict";
var dns = require('dns'),
os = require('os');
// Because localtunnel doesn't work reliably enough,
// hack the config to point at the local ip.
module.exports = function(ctx, done){
if(ctx.environment !== 'development'){<|fim▁hole|> return done();
}
dns.lookup(os.hostname(), function (err, add, fam){
ctx.url = 'http://' + add + ':8080';
done();
});
};<|fim▁end|>
|
// Only run this hack in dev for real.
|
<|file_name|>cycle-projection-based-on-where-clause.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Example cycle where a bound on `T` uses a shorthand for `T`. This
// creates a cycle because we have to know the bounds on `T` to figure
// out what trait defines `Item`, but we can't know the bounds on `T`
// without knowing how to handle `T::Item`.
//
// Note that in the future cases like this could perhaps become legal,
// if we got more fine-grained about our cycle detection or changed
// how we handle `T::Item` resolution.
use std::ops::Add;
// Preamble.
trait Trait { type Item; }
struct A<T><|fim▁hole|>{
data: T
}
fn main() {
}<|fim▁end|>
|
where T : Trait,
T : Add<T::Item>
//~^ ERROR unsupported cyclic reference between types/traits detected
|
<|file_name|>modal.spec.ts<|end_file_name|><|fim▁begin|>import {CommonModule} from '@angular/common';
import {Component, Injectable, Injector, NgModule, OnDestroy, ViewChild} from '@angular/core';
import {ComponentFixture, fakeAsync, TestBed, tick} from '@angular/core/testing';
import {NgbModalConfig} from './modal-config';
import {NgbActiveModal, NgbModal, NgbModalModule, NgbModalRef} from './modal.module';
import {createKeyEvent, isBrowser, isBrowserVisible} from '../test/common';
import {NgbConfig} from '..';
import {NgbConfigAnimation} from '../test/ngb-config-animation';
import createSpy = jasmine.createSpy;
import {Key} from 'src/util/key';
const NOOP = () => {};
@Injectable()
class SpyService {
called = false;
}
@Injectable()
class CustomSpyService {
called = false;
}
describe('ngb-modal', () => {
let fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
jasmine.addMatchers({
toHaveModal: function(util, customEqualityTests) {
return {
compare: function(actual, content?, selector?) {
const allModalsContent = document.querySelector(selector || 'body').querySelectorAll('.modal-content');
let pass = true;
let errMsg;
if (!content) {
pass = allModalsContent.length > 0;
errMsg = 'at least one modal open but found none';
} else if (Array.isArray(content)) {
pass = allModalsContent.length === content.length;
errMsg = `${content.length} modals open but found ${allModalsContent.length}`;
} else {
pass = allModalsContent.length === 1 && allModalsContent[0].textContent.trim() === content;
errMsg = `exactly one modal open but found ${allModalsContent.length}`;
}
return {pass: pass, message: `Expected ${actual.outerHTML} to have ${errMsg}`};
},
negativeCompare: function(actual) {
const allOpenModals = actual.querySelectorAll('ngb-modal-window');
return {
pass: allOpenModals.length === 0,
message: `Expected ${actual.outerHTML} not to have any modals open but found ${allOpenModals.length}`
};
}
};
}
});
jasmine.addMatchers({
toHaveBackdrop: function(util, customEqualityTests) {
return {
compare: function(actual) {
return {
pass: document.querySelectorAll('ngb-modal-backdrop').length === 1,
message: `Expected ${actual.outerHTML} to have exactly one backdrop element`
};
},
negativeCompare: function(actual) {
const allOpenModals = document.querySelectorAll('ngb-modal-backdrop');
return {
pass: allOpenModals.length === 0,
message: `Expected ${actual.outerHTML} not to have any backdrop elements`
};
}
};
}
});
});
afterEach(() => {
// detect left-over modals and report errors when found
const remainingModalWindows = document.querySelectorAll('ngb-modal-window');
if (remainingModalWindows.length) {
fail(`${remainingModalWindows.length} modal windows were left in the DOM.`);
}
const remainingModalBackdrops = document.querySelectorAll('ngb-modal-backdrop');
if (remainingModalBackdrops.length) {
fail(`${remainingModalBackdrops.length} modal backdrops were left in the DOM.`);
}
});
describe('default configuration', () => {
beforeEach(() => {
TestBed.configureTestingModule({imports: [NgbModalTestModule]});
fixture = TestBed.createComponent(TestComponent);
});
describe('basic functionality', () => {
it('should open and close modal with default options', () => {
const modalInstance = fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
const modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
expect(modalEl).not.toHaveClass('fade');
expect(modalEl).toHaveClass('show');
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should open and close modal from a TemplateRef content', () => {
const modalInstance = fixture.componentInstance.openTpl();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Hello, World!');
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should properly destroy TemplateRef content', () => {
const spyService = fixture.debugElement.injector.get(SpyService);
const modalInstance = fixture.componentInstance.openDestroyableTpl();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Some content');
expect(spyService.called).toBeFalsy();
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(spyService.called).toBeTruthy();
});
it('should open and close modal from a component type', () => {
const spyService = fixture.debugElement.injector.get(SpyService);
const modalInstance = fixture.componentInstance.openCmpt(DestroyableCmpt);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Some content');
expect(spyService.called).toBeFalsy();
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(spyService.called).toBeTruthy();
});
it('should inject active modal ref when component is used as content', () => {
fixture.componentInstance.openCmpt(WithActiveModalCmpt);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Close');
(<HTMLElement>document.querySelector('button.closeFromInside')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should expose component used as modal content', () => {
const modalInstance = fixture.componentInstance.openCmpt(WithActiveModalCmpt);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Close');
expect(modalInstance.componentInstance instanceof WithActiveModalCmpt).toBeTruthy();
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(modalInstance.componentInstance).toBe(undefined);
});
it('should open and close modal from inside', () => {
fixture.componentInstance.openTplClose();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#close')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should open and dismiss modal from inside', () => {
fixture.componentInstance.openTplDismiss().result.catch(NOOP);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should open and close modal from template implicit context', () => {
fixture.componentInstance.openTplImplicitContext();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#close')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should open and dismiss modal from template implicit context', () => {
fixture.componentInstance.openTplImplicitContext().result.catch(NOOP);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it(`should emit 'closed' on close`, () => {
const closedSpy = createSpy();
fixture.componentInstance.openTplClose().closed.subscribe(closedSpy);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#close')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(closedSpy).toHaveBeenCalledWith('myResult');
});
it(`should emit 'dismissed' on dismissal`, () => {
const dismissSpy = createSpy();
fixture.componentInstance.openTplDismiss().dismissed.subscribe(dismissSpy);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(dismissSpy).toHaveBeenCalledWith('myReason');
});
it('should resolve result promise on close', () => {
let resolvedResult;
fixture.componentInstance.openTplClose().result.then((result) => resolvedResult = result);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#close')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
fixture.whenStable().then(() => { expect(resolvedResult).toBe('myResult'); });
});
it('should reject result promise on dismiss', () => {
let rejectReason;
fixture.componentInstance.openTplDismiss().result.catch((reason) => rejectReason = reason);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
fixture.whenStable().then(() => { expect(rejectReason).toBe('myReason'); });
});
it(`should emit 'shown' and 'hidden' events`, () => {
const shownSpy = createSpy();
const hiddenSpy = createSpy();
const modalRef = fixture.componentInstance.openTplClose();
modalRef.shown.subscribe(shownSpy);
modalRef.hidden.subscribe(hiddenSpy);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
expect(shownSpy).toHaveBeenCalledWith(undefined);
(<HTMLElement>document.querySelector('button#close')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(hiddenSpy).toHaveBeenCalledWith(undefined);
});
it('should add / remove "modal-open" class to body when modal is open', fakeAsync(() => {
const modalRef = fixture.componentInstance.open('bar');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
expect(document.body).toHaveCssClass('modal-open');
modalRef.close('bar result');
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
expect(document.body).not.toHaveCssClass('modal-open');
}));
it('should not throw when close called multiple times', () => {
const modalInstance = fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should dismiss with dismissAll', () => {
fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
fixture.componentInstance.dismissAll('dismissAllArg');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should not throw when dismissAll called with no active modal', () => {
expect(fixture.nativeElement).not.toHaveModal();
fixture.componentInstance.dismissAll();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should not throw when dismiss called multiple times', () => {
const modalRef = fixture.componentInstance.open('foo');
modalRef.result.catch(NOOP);
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
modalRef.dismiss('some reason');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
modalRef.dismiss('some reason');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should indicate if there are open modal windows', fakeAsync(() => {
fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(fixture.componentInstance.modalService.hasOpenModals()).toBeTruthy();
fixture.componentInstance.dismissAll();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
expect(fixture.componentInstance.modalService.hasOpenModals()).toBeFalsy();
}));
});
describe('backdrop options', () => {
it('should have backdrop by default', () => {
const modalInstance = fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(fixture.nativeElement).toHaveBackdrop();
modalInstance.close('some reason');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(fixture.nativeElement).not.toHaveBackdrop();
});
it('should open and close modal without backdrop', () => {
const modalInstance = fixture.componentInstance.open('foo', {backdrop: false});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(fixture.nativeElement).not.toHaveBackdrop();
modalInstance.close('some reason');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(fixture.nativeElement).not.toHaveBackdrop();
});
it('should open and close modal without backdrop from template content', () => {
const modalInstance = fixture.componentInstance.openTpl({backdrop: false});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Hello, World!');
expect(fixture.nativeElement).not.toHaveBackdrop();
modalInstance.close('some reason');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
expect(fixture.nativeElement).not.toHaveBackdrop();
});
it('should not dismiss on clicks that result in detached elements', () => {
const modalInstance = fixture.componentInstance.openTplIf({});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#if')).click();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('beforeDismiss options', () => {
it('should not dismiss when the callback returns false', () => {
const modalInstance = fixture.componentInstance.openTplDismiss({beforeDismiss: () => { return false; }});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should dismiss when the callback does not return false', () => {
fixture.componentInstance.openTplDismiss({beforeDismiss: () => {}});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should not dismiss when the returned promise is resolved with false', fakeAsync(() => {
const modalInstance =
fixture.componentInstance.openTplDismiss({beforeDismiss: () => Promise.resolve(false)});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).toHaveModal();
modalInstance.close();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
}));
it('should not dismiss when the returned promise is rejected', fakeAsync(() => {
const modalInstance =
fixture.componentInstance.openTplDismiss({beforeDismiss: () => Promise.reject('error')});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).toHaveModal();
modalInstance.close();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
}));
it('should dismiss when the returned promise is not resolved with false', fakeAsync(() => {
fixture.componentInstance.openTplDismiss({beforeDismiss: () => Promise.resolve()});
fixture.detectChanges();<|fim▁hole|> (<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
}));
it('should dismiss when the callback is not defined', () => {
fixture.componentInstance.openTplDismiss({});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal();
(<HTMLElement>document.querySelector('button#dismiss')).click();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('container options', () => {
it('should attach window and backdrop elements to the specified container', () => {
const modalInstance = fixture.componentInstance.open('foo', {container: '#testContainer'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo', '#testContainer');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should attach window and backdrop elements to the specified container DOM element', () => {
const containerDomEl = document.querySelector('div#testContainer');
const modalInstance = fixture.componentInstance.open('foo', {container: containerDomEl});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo', '#testContainer');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should throw when the specified container element doesn\'t exist', () => {
const brokenSelector = '#notInTheDOM';
expect(() => {
fixture.componentInstance.open('foo', {container: brokenSelector});
}).toThrowError(`The specified modal container "${brokenSelector}" was not found in the DOM.`);
});
});
describe('size options', () => {
it('should render modals with specified size', () => {
const modalInstance = fixture.componentInstance.open('foo', {size: 'sm'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-sm');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should accept any strings as modal size', () => {
const modalInstance = fixture.componentInstance.open('foo', {size: 'ginormous'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-ginormous');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('window custom class options', () => {
it('should render modals with the correct window custom classes', () => {
const modalInstance = fixture.componentInstance.open('foo', {windowClass: 'bar'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('ngb-modal-window')).toHaveCssClass('bar');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('backdrop custom class options', () => {
it('should render modals with the correct backdrop custom classes', () => {
const modalInstance = fixture.componentInstance.open('foo', {backdropClass: 'my-fancy-backdrop'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('ngb-modal-backdrop')).toHaveCssClass('my-fancy-backdrop');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('custom injector option', () => {
it('should render modal with a custom injector', () => {
const customInjector =
Injector.create({providers: [{provide: CustomSpyService, useClass: CustomSpyService, deps: []}]});
const modalInstance = fixture.componentInstance.openCmpt(CustomInjectorCmpt, {injector: customInjector});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('Some content');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('focus management', () => {
describe('initial focus', () => {
it('should focus the proper specified element when [ngbAutofocus] is used', () => {
fixture.detectChanges();
const modal = fixture.componentInstance.openCmpt(WithAutofocusModalCmpt);
fixture.detectChanges();
expect(document.activeElement).toBe(document.querySelector('button.withNgbAutofocus'));
modal.close();
});
it('should focus the first focusable element when [ngbAutofocus] is not used', () => {
fixture.detectChanges();
const modal = fixture.componentInstance.openCmpt(WithFirstFocusableModalCmpt);
fixture.detectChanges();
expect(document.activeElement).toBe(document.querySelector('button.firstFocusable'));
modal.close();
fixture.detectChanges();
});
it('should skip element with tabindex=-1 when finding the first focusable element', () => {
fixture.detectChanges();
const modal = fixture.componentInstance.openCmpt(WithSkipTabindexFirstFocusableModalCmpt);
fixture.detectChanges();
expect(document.activeElement).toBe(document.querySelector('button.other'));
modal.close();
fixture.detectChanges();
});
it('should focus modal window as a default fallback option', () => {
fixture.detectChanges();
const modal = fixture.componentInstance.open('content');
fixture.detectChanges();
expect(document.activeElement).toBe(document.querySelector('ngb-modal-window'));
modal.close();
fixture.detectChanges();
});
});
});
describe('window element ordering', () => {
it('should place newer windows on top of older ones', () => {
const modalInstance1 = fixture.componentInstance.open('foo', {windowClass: 'window-1'});
fixture.detectChanges();
const modalInstance2 = fixture.componentInstance.open('bar', {windowClass: 'window-2'});
fixture.detectChanges();
let windows = document.querySelectorAll('ngb-modal-window');
expect(windows.length).toBe(2);
expect(windows[0]).toHaveCssClass('window-1');
expect(windows[1]).toHaveCssClass('window-2');
modalInstance2.close();
modalInstance1.close();
fixture.detectChanges();
});
it('should iterate over multiple modal instances', fakeAsync(() => {
let n;
const observable = fixture.componentInstance.activeInstances;
observable.subscribe(list => { n = list.length; });
expect(n).toBeUndefined();
fixture.componentInstance.open('foo', {windowClass: 'window-1'});
fixture.detectChanges();
expect(n).toBe(1);
fixture.componentInstance.open('bar', {windowClass: 'window-2'});
fixture.detectChanges();
expect(n).toBe(2);
let windows = document.querySelectorAll('ngb-modal-window');
expect(windows.length).toBe(2);
expect(windows[0]).toHaveCssClass('window-1');
expect(windows[1]).toHaveCssClass('window-2');
fixture.componentInstance.dismissAll();
fixture.detectChanges();
tick();
expect(fixture.nativeElement).not.toHaveModal();
expect(n).toBe(0);
}));
});
describe('vertically centered', () => {
it('should render modals vertically centered', () => {
const modalInstance = fixture.componentInstance.open('foo', {centered: true});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-dialog-centered');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('scrollable content', () => {
it('should render scrollable content modals', () => {
const modalInstance = fixture.componentInstance.open('foo', {scrollable: true});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-dialog-scrollable');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should add specific styling to content component host', () => {
const modalInstance = fixture.componentInstance.openCmpt(DestroyableCmpt, {scrollable: true});
fixture.detectChanges();
expect(document.querySelector('destroyable-cmpt')).toHaveCssClass('component-host-scrollable');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
});
describe('accessibility', () => {
it('should support aria-labelledby', () => {
const id = 'aria-labelledby-id';
const modalInstance = fixture.componentInstance.open('foo', {ariaLabelledBy: id});
fixture.detectChanges();
const modalElement = <HTMLElement>document.querySelector('ngb-modal-window');
expect(modalElement.getAttribute('aria-labelledby')).toBe(id);
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should support aria-describedby', () => {
const id = 'aria-describedby-id';
const modalInstance = fixture.componentInstance.open('foo', {ariaDescribedBy: id});
fixture.detectChanges();
const modalElement = <HTMLElement>document.querySelector('ngb-modal-window');
expect(modalElement.getAttribute('aria-describedby')).toBe(id);
modalInstance.close('some result');
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should have aria-modal attribute', () => {
const a11yFixture = TestBed.createComponent(TestA11yComponent);
const modalInstance = a11yFixture.componentInstance.open();
a11yFixture.detectChanges();
const modalElement = <HTMLElement>document.querySelector('ngb-modal-window');
expect(modalElement.getAttribute('aria-modal')).toBe('true');
modalInstance.close();
fixture.detectChanges();
expect(fixture.nativeElement).not.toHaveModal();
});
it('should add aria-hidden attributes to siblings when attached to body', fakeAsync(() => {
const a11yFixture = TestBed.createComponent(TestA11yComponent);
const modalInstance = a11yFixture.componentInstance.open();
a11yFixture.detectChanges();
const modal = document.querySelector('ngb-modal-window') !;
const backdrop = document.querySelector('ngb-modal-backdrop') !;
const application = document.querySelector('div[ng-version]') !;
let ariaHidden = document.querySelectorAll('[aria-hidden]');
expect(ariaHidden.length).toBeGreaterThan(2); // 2 exist in the DOM initially
expect(document.body.hasAttribute('aria-hidden')).toBe(false);
expect(application.getAttribute('aria-hidden')).toBe('true');
expect(backdrop.getAttribute('aria-hidden')).toBe('true');
expect(modal.hasAttribute('aria-hidden')).toBe(false);
modalInstance.close();
fixture.detectChanges();
tick();
ariaHidden = document.querySelectorAll('[aria-hidden]');
expect(ariaHidden.length).toBe(2); // 2 exist in the DOM initially
expect(a11yFixture.nativeElement).not.toHaveModal();
}));
it('should add aria-hidden attributes to siblings when attached to a container', fakeAsync(() => {
const a11yFixture = TestBed.createComponent(TestA11yComponent);
const modalInstance = a11yFixture.componentInstance.open({container: '#container'});
a11yFixture.detectChanges();
const modal = document.querySelector('ngb-modal-window') !;
const backdrop = document.querySelector('ngb-modal-backdrop') !;
const application = document.querySelector('div[ng-version]') !;
const ariaRestoreTrue = document.querySelector('.to-restore-true') !;
const ariaRestoreFalse = document.querySelector('.to-restore-false') !;
expect(document.body.hasAttribute('aria-hidden')).toBe(false);
expect(application.hasAttribute('aria-hidden')).toBe(false);
expect(modal.hasAttribute('aria-hidden')).toBe(false);
expect(backdrop.getAttribute('aria-hidden')).toBe('true');
expect(ariaRestoreTrue.getAttribute('aria-hidden')).toBe('true');
expect(ariaRestoreFalse.getAttribute('aria-hidden')).toBe('true');
Array.from(document.querySelectorAll('.to-hide')).forEach(element => {
expect(element.getAttribute('aria-hidden')).toBe('true');
});
Array.from(document.querySelectorAll('.not-to-hide')).forEach(element => {
expect(element.hasAttribute('aria-hidden')).toBe(false);
});
modalInstance.close();
fixture.detectChanges();
tick();
const ariaHidden = document.querySelectorAll('[aria-hidden]');
expect(ariaHidden.length).toBe(2); // 2 exist in the DOM initially
expect(ariaRestoreTrue.getAttribute('aria-hidden')).toBe('true');
expect(ariaRestoreFalse.getAttribute('aria-hidden')).toBe('false');
expect(a11yFixture.nativeElement).not.toHaveModal();
}));
it('should add aria-hidden attributes with modal stacks', fakeAsync(() => {
const a11yFixture = TestBed.createComponent(TestA11yComponent);
const firstModalInstance = a11yFixture.componentInstance.open();
const secondModalInstance = a11yFixture.componentInstance.open();
a11yFixture.detectChanges();
let modals = document.querySelectorAll('ngb-modal-window');
let backdrops = document.querySelectorAll('ngb-modal-backdrop');
let ariaHidden = document.querySelectorAll('[aria-hidden]');
const hiddenElements = ariaHidden.length;
expect(hiddenElements).toBeGreaterThan(2); // 2 exist in the DOM initially
expect(modals.length).toBe(2);
expect(backdrops.length).toBe(2);
expect(modals[0].hasAttribute('aria-hidden')).toBe(true);
expect(backdrops[0].hasAttribute('aria-hidden')).toBe(true);
expect(modals[1].hasAttribute('aria-hidden')).toBe(false);
expect(backdrops[1].hasAttribute('aria-hidden')).toBe(true);
secondModalInstance.close();
fixture.detectChanges();
tick();
ariaHidden = document.querySelectorAll('[aria-hidden]');
expect(document.querySelectorAll('ngb-modal-window').length).toBe(1);
expect(document.querySelectorAll('ngb-modal-backdrop').length).toBe(1);
expect(ariaHidden.length).toBe(hiddenElements - 2);
expect(modals[0].hasAttribute('aria-hidden')).toBe(false);
expect(backdrops[0].hasAttribute('aria-hidden')).toBe(true);
firstModalInstance.close();
fixture.detectChanges();
tick();
ariaHidden = document.querySelectorAll('[aria-hidden]');
expect(ariaHidden.length).toBe(2); // 2 exist in the DOM initially
expect(a11yFixture.nativeElement).not.toHaveModal();
}));
});
});
describe('custom global configuration', () => {
beforeEach(() => {
TestBed.configureTestingModule(
{imports: [NgbModalTestModule], providers: [{provide: NgbModalConfig, useValue: {size: 'sm'}}]});
fixture = TestBed.createComponent(TestComponent);
});
it('should accept global configuration under the NgbModalConfig token', () => {
const modalInstance = fixture.componentInstance.open('foo');
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-sm');
modalInstance.close('some reason');
fixture.detectChanges();
});
it('should override global configuration with local options', () => {
const modalInstance = fixture.componentInstance.open('foo', {size: 'lg'});
fixture.detectChanges();
expect(fixture.nativeElement).toHaveModal('foo');
expect(document.querySelector('.modal-dialog')).toHaveCssClass('modal-lg');
expect(document.querySelector('.modal-dialog')).not.toHaveCssClass('modal-sm');
modalInstance.close('some reason');
fixture.detectChanges();
});
});
if (isBrowserVisible('ngb-modal animations')) {
describe('ngb-modal animations', () => {
@Component({
template: `
<ng-template #content let-close="close" let-dismiss="dismiss">
<div id="inside-div">Bla bla</div>
<button class="btn btn-primary" id="close" (click)="close('myResult')">Close me</button>
</ng-template>
`
})
class TestAnimationComponent {
@ViewChild('content', {static: true}) content;
constructor(private modalService: NgbModal) {}
open(backdrop: boolean | 'static' = true, keyboard = true) {
return this.modalService.open(this.content, {backdrop, keyboard});
}
}
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestAnimationComponent],
imports: [NgbModalModule],
providers: [{provide: NgbConfig, useClass: NgbConfigAnimation}]
});
});
afterEach(() => document.body.classList.remove('ngb-reduce-motion'));
[true, false].forEach(reduceMotion => {
// this test is flaky in IE in CI
if (!isBrowser('ie')) {
it(`should run fade transition when opening/closing modal (force-reduced-motion = ${reduceMotion})`,
(done) => {
if (reduceMotion) {
document.body.classList.add('ngb-reduce-motion');
}
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
const modalRef = component.componentInstance.open();
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
expect(window.getComputedStyle(modalEl).opacity).toBe('1');
expect(modalEl).toHaveClass('fade');
expect(modalEl).toHaveClass('show');
closeButton.click();
});
modalRef.hidden.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window');
expect(modalEl).toBeNull();
done();
});
component.detectChanges();
modalEl = document.querySelector('ngb-modal-window');
// if reducedMotion is true, modal would be opened and closed already at this point
if (modalEl) {
expect(window.getComputedStyle(modalEl).opacity).toBe('0');
}
});
}
it(`should bump modal window if backdrop is static (force-reduced-motion = ${reduceMotion})`, (done) => {
if (reduceMotion) {
document.body.classList.add('ngb-reduce-motion');
}
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
const modalRef = component.componentInstance.open('static');
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
modalEl.click();
component.detectChanges();
if (reduceMotion) {
expect(modalEl).not.toHaveClass('modal-static');
} else {
expect(modalEl).toHaveClass('modal-static');
}
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
closeButton.click();
});
modalRef.hidden.subscribe(() => { done(); });
component.detectChanges();
});
});
it(`should not bump modal window on click if backdrop is not static`, (done) => {
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
const modalRef = component.componentInstance.open();
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
modalEl.click();
component.detectChanges();
expect(modalEl).not.toHaveClass('modal-static');
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
closeButton.click();
});
modalRef.hidden.subscribe(() => { done(); });
component.detectChanges();
});
it(`should not bump modal window if backdrop is static and modal itself is clicked)`, (done) => {
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
const modalRef = component.componentInstance.open('static');
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
const insideDiv = document.querySelector('#inside-div') as HTMLElement;
insideDiv.click();
component.detectChanges();
expect(modalEl).not.toHaveClass('modal-static');
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
closeButton.click();
});
modalRef.hidden.subscribe(() => { done(); });
component.detectChanges();
});
it(`should bump modal window on Escape if backdrop is static`, (done) => {
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
// currently, to keep backward compatibility, the modal is closed on escape if keyboard is true,
// even if backdrop is static. This will be fixed in the future.
const modalRef = component.componentInstance.open('static', false);
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
const event = createKeyEvent(Key.Escape, {type: 'keydown'});
modalEl.dispatchEvent(event);
component.detectChanges();
expect(modalEl).toHaveClass('modal-static');
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
closeButton.click();
});
modalRef.hidden.subscribe(() => { done(); });
component.detectChanges();
});
it(`should not bump modal window on Escape if backdrop is not static`, (done) => {
const component = TestBed.createComponent(TestAnimationComponent);
component.detectChanges();
const modalRef = component.componentInstance.open();
let modalEl: HTMLElement | null = null;
modalRef.shown.subscribe(() => {
modalEl = document.querySelector('ngb-modal-window') as HTMLElement;
const event = createKeyEvent(Key.Escape, {type: 'keydown'});
modalEl.dispatchEvent(event);
component.detectChanges();
expect(modalEl).not.toHaveClass('modal-static');
const closeButton = document.querySelector('button#close') as HTMLButtonElement;
closeButton.click();
});
modalRef.hidden.subscribe(() => { done(); });
component.detectChanges();
});
});
}
});
@Component({selector: 'custom-injector-cmpt', template: 'Some content'})
export class CustomInjectorCmpt implements OnDestroy {
constructor(private _spyService: CustomSpyService) {}
ngOnDestroy(): void { this._spyService.called = true; }
}
@Component({selector: 'destroyable-cmpt', template: 'Some content'})
export class DestroyableCmpt implements OnDestroy {
constructor(private _spyService: SpyService) {}
ngOnDestroy(): void { this._spyService.called = true; }
}
@Component(
{selector: 'modal-content-cmpt', template: '<button class="closeFromInside" (click)="close()">Close</button>'})
export class WithActiveModalCmpt {
constructor(public activeModal: NgbActiveModal) {}
close() { this.activeModal.close('from inside'); }
}
@Component(
{selector: 'modal-autofocus-cmpt', template: `<button class="withNgbAutofocus" ngbAutofocus>Click Me</button>`})
export class WithAutofocusModalCmpt {
}
@Component({
selector: 'modal-firstfocusable-cmpt',
template: `
<button class="firstFocusable close">Close</button>
<button class="other">Other button</button>
`
})
export class WithFirstFocusableModalCmpt {
}
@Component({
selector: 'modal-skip-tabindex-firstfocusable-cmpt',
template: `
<button tabindex="-1" class="firstFocusable close">Close</button>
<button class="other">Other button</button>
`
})
export class WithSkipTabindexFirstFocusableModalCmpt {
}
@Component({
selector: 'test-cmpt',
template: `
<div id="testContainer"></div>
<ng-template #content>Hello, {{name}}!</ng-template>
<ng-template #destroyableContent><destroyable-cmpt></destroyable-cmpt></ng-template>
<ng-template #contentWithClose let-close="close">
<button id="close" (click)="close('myResult')">Close me</button>
</ng-template>
<ng-template #contentWithDismiss let-dismiss="dismiss">
<button id="dismiss" (click)="dismiss('myReason')">Dismiss me</button>
</ng-template>
<ng-template #contentWithImplicitContext let-modal>
<button id="close" (click)="modal.close('myResult')">Close me</button>
<button id="dismiss" (click)="modal.dismiss('myReason')">Dismiss me</button>
</ng-template>
<ng-template #contentWithIf>
<ng-template [ngIf]="show">
<button id="if" (click)="show = false">Click me</button>
</ng-template>
</ng-template>
<button id="open" (click)="open('from button')">Open</button>
<div id="open-no-focus" (click)="open('from non focusable element')">Open</div>
<div
id="open-no-focus-ie"
(click)="open('from non focusable element but stored as activeElement on IE')"
style="display: inline-block;"
>Open</div>
`
})
class TestComponent {
name = 'World';
openedModal: NgbModalRef;
show = true;
@ViewChild('content', {static: true}) tplContent;
@ViewChild('destroyableContent', {static: true}) tplDestroyableContent;
@ViewChild('contentWithClose', {static: true}) tplContentWithClose;
@ViewChild('contentWithDismiss', {static: true}) tplContentWithDismiss;
@ViewChild('contentWithImplicitContext', {static: true}) tplContentWithImplicitContext;
@ViewChild('contentWithIf', {static: true}) tplContentWithIf;
constructor(public modalService: NgbModal) {}
open(content: string, options?: Object) {
this.openedModal = this.modalService.open(content, options);
return this.openedModal;
}
close() {
if (this.openedModal) {
this.openedModal.close('ok');
}
}
dismissAll(reason?: any) { this.modalService.dismissAll(reason); }
openTpl(options?: Object) { return this.modalService.open(this.tplContent, options); }
openCmpt(cmptType: any, options?: Object) { return this.modalService.open(cmptType, options); }
openDestroyableTpl(options?: Object) { return this.modalService.open(this.tplDestroyableContent, options); }
openTplClose(options?: Object) { return this.modalService.open(this.tplContentWithClose, options); }
openTplDismiss(options?: Object) { return this.modalService.open(this.tplContentWithDismiss, options); }
openTplImplicitContext(options?: Object) {
return this.modalService.open(this.tplContentWithImplicitContext, options);
}
openTplIf(options?: Object) { return this.modalService.open(this.tplContentWithIf, options); }
get activeInstances() { return this.modalService.activeInstances; }
}
@Component({
selector: 'test-a11y-cmpt',
template: `
<div class="to-hide to-restore-true" aria-hidden="true">
<div class="not-to-hide"></div>
</div>
<div class="not-to-hide">
<div class="to-hide">
<div class="not-to-hide"></div>
</div>
<div class="not-to-hide" id="container"></div>
<div class="to-hide">
<div class="not-to-hide"></div>
</div>
</div>
<div class="to-hide to-restore-false" aria-hidden="false">
<div class="not-to-hide"></div>
</div>
`
})
class TestA11yComponent {
constructor(private modalService: NgbModal) {}
open(options?: any) { return this.modalService.open('foo', options); }
}
@NgModule({
declarations: [
TestComponent, CustomInjectorCmpt, DestroyableCmpt, WithActiveModalCmpt, WithAutofocusModalCmpt,
WithFirstFocusableModalCmpt, WithSkipTabindexFirstFocusableModalCmpt, TestA11yComponent
],
exports: [TestComponent, DestroyableCmpt],
imports: [CommonModule, NgbModalModule],
entryComponents: [
CustomInjectorCmpt, DestroyableCmpt, WithActiveModalCmpt, WithAutofocusModalCmpt, WithFirstFocusableModalCmpt,
WithSkipTabindexFirstFocusableModalCmpt
],
providers: [SpyService]
})
class NgbModalTestModule {
}<|fim▁end|>
|
expect(fixture.nativeElement).toHaveModal();
|
<|file_name|>incremental.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use flow::{self, AFFECTS_COUNTERS, Flow, HAS_COUNTER_AFFECTING_CHILDREN, IS_ABSOLUTELY_POSITIONED};
use std::fmt;
use std::sync::Arc;
use style::computed_values::{display, float};
use style::dom::TRestyleDamage;
use style::properties::{ComputedValues, ServoComputedValues};
bitflags! {
#[doc = "Individual layout actions that may be necessary after restyling."]
flags RestyleDamage: u8 {
#[doc = "Repaint the node itself."]
#[doc = "Currently unused; need to decide how this propagates."]
const REPAINT = 0x01,
#[doc = "Recompute the overflow regions (bounding box of object and all descendants)."]
#[doc = "Propagates down the flow tree because the computation is bottom-up."]
const STORE_OVERFLOW = 0x02,
#[doc = "Recompute intrinsic inline_sizes (minimum and preferred)."]
#[doc = "Propagates down the flow tree because the computation is"]
#[doc = "bottom-up."]
const BUBBLE_ISIZES = 0x04,
#[doc = "Recompute actual inline-sizes and block-sizes, only taking out-of-flow children \
into account. \
Propagates up the flow tree because the computation is top-down."]
const REFLOW_OUT_OF_FLOW = 0x08,
#[doc = "Recompute actual inline_sizes and block_sizes."]
#[doc = "Propagates up the flow tree because the computation is"]
#[doc = "top-down."]
const REFLOW = 0x10,
#[doc = "Re-resolve generated content. \
Propagates up the flow tree because the computation is inorder."]
const RESOLVE_GENERATED_CONTENT = 0x20,
#[doc = "The entire flow needs to be reconstructed."]
const RECONSTRUCT_FLOW = 0x40
}
}
bitflags! {
flags SpecialRestyleDamage: u8 {
#[doc = "If this flag is set, we need to reflow the entire document. This is more or less a \
temporary hack to deal with cases that we don't handle incrementally yet."]
const REFLOW_ENTIRE_DOCUMENT = 0x01,
}
}
impl TRestyleDamage for RestyleDamage {
type ConcreteComputedValues = ServoComputedValues;
fn compute(old: Option<&Arc<ServoComputedValues>>, new: &ServoComputedValues) ->
RestyleDamage { compute_damage(old, new) }
/// Returns a bitmask that represents a flow that needs to be rebuilt and reflowed.
///
/// Use this instead of `RestyleDamage::all()` because `RestyleDamage::all()` will result in
/// unnecessary sequential resolution of generated content.
fn rebuild_and_reflow() -> RestyleDamage {
REPAINT | STORE_OVERFLOW | BUBBLE_ISIZES | REFLOW_OUT_OF_FLOW | REFLOW | RECONSTRUCT_FLOW
}
}
impl RestyleDamage {
/// Supposing a flow has the given `position` property and this damage, returns the damage that
/// we should add to the *parent* of this flow.
pub fn damage_for_parent(self, child_is_absolutely_positioned: bool) -> RestyleDamage {
if child_is_absolutely_positioned {
self & (REPAINT | STORE_OVERFLOW | REFLOW_OUT_OF_FLOW | RESOLVE_GENERATED_CONTENT)
} else {
self & (REPAINT | STORE_OVERFLOW | REFLOW | REFLOW_OUT_OF_FLOW |
RESOLVE_GENERATED_CONTENT)
}
}
/// Supposing the *parent* of a flow with the given `position` property has this damage,
/// returns the damage that we should add to this flow.
pub fn damage_for_child(self,
parent_is_absolutely_positioned: bool,
child_is_absolutely_positioned: bool)
-> RestyleDamage {
match (parent_is_absolutely_positioned, child_is_absolutely_positioned) {
(false, true) => {
// Absolute children are out-of-flow and therefore insulated from changes.
//
// FIXME(pcwalton): Au contraire, if the containing block dimensions change!
self & REPAINT
}
(true, false) => {
// Changing the position of an absolutely-positioned block requires us to reflow
// its kids.
if self.contains(REFLOW_OUT_OF_FLOW) {
self | REFLOW
} else {
self
}
}
_ => {
// TODO(pcwalton): Take floatedness into account.
self & (REPAINT | REFLOW)
}
}
}
}
impl fmt::Display for RestyleDamage {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
let mut first_elem = true;
let to_iter =
[ (REPAINT, "Repaint")
, (STORE_OVERFLOW, "StoreOverflow")
, (BUBBLE_ISIZES, "BubbleISizes")
, (REFLOW_OUT_OF_FLOW, "ReflowOutOfFlow")
, (REFLOW, "Reflow")
, (RESOLVE_GENERATED_CONTENT, "ResolveGeneratedContent")
, (RECONSTRUCT_FLOW, "ReconstructFlow")
];
for &(damage, damage_str) in &to_iter {
if self.contains(damage) {
if !first_elem { try!(write!(f, " | ")); }
try!(write!(f, "{}", damage_str));
first_elem = false;
}
}
if first_elem {
try!(write!(f, "NoDamage"));
}
Ok(())
}
}
// NB: We need the braces inside the RHS due to Rust #8012. This particular
// version of this macro might be safe anyway, but we want to avoid silent
// breakage on modifications.
macro_rules! add_if_not_equal(
($old:ident, $new:ident, $damage:ident,
[ $($effect:ident),* ], [ $($style_struct_getter:ident.$name:ident),* ]) => ({
if $( ($old.$style_struct_getter().$name != $new.$style_struct_getter().$name) )||* {
$damage.insert($($effect)|*);
true
} else {
false
}
})
);
pub fn compute_damage(old: Option<&Arc<ServoComputedValues>>, new: &ServoComputedValues) -> RestyleDamage {
let old: &ServoComputedValues = match old {
None => return RestyleDamage::rebuild_and_reflow(),
Some(cv) => &**cv,
};
let mut damage = RestyleDamage::empty();
// This should check every CSS property, as enumerated in the fields of
// http://doc.servo.org/style/properties/struct.ServoComputedValues.html
// FIXME: Test somehow that every property is included.
add_if_not_equal!(old, new, damage,
[
REPAINT,
STORE_OVERFLOW,
BUBBLE_ISIZES,
REFLOW_OUT_OF_FLOW,
REFLOW,
RECONSTRUCT_FLOW
], [
get_box.float, get_box.display, get_box.position, get_counters.content,
get_counters.counter_reset, get_counters.counter_increment,
get_list.quotes, get_list.list_style_type,
// If these text or font properties change, we need to reconstruct the flow so that
// text shaping is re-run.
get_inheritedtext.letter_spacing, get_inheritedtext.text_rendering,
get_inheritedtext.text_transform, get_inheritedtext.word_spacing,
get_inheritedtext.overflow_wrap, get_inheritedtext.text_justify,
get_inheritedtext.white_space, get_inheritedtext.word_break, get_text.text_overflow,<|fim▁hole|> get_inheritedbox.text_orientation,
get_text.text_decoration, get_text.unicode_bidi,
get_inheritedtable.empty_cells, get_inheritedtable.caption_side,
get_column.column_width, get_column.column_count
]) || (new.get_box().display == display::T::inline &&
add_if_not_equal!(old, new, damage,
[REPAINT, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW, REFLOW,
RECONSTRUCT_FLOW], [
// For inline boxes only, border/padding styles are used in flow construction (to decide
// whether to create fragments for empty flows).
get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left
])) || add_if_not_equal!(old, new, damage,
[ REPAINT, STORE_OVERFLOW, BUBBLE_ISIZES, REFLOW_OUT_OF_FLOW, REFLOW ],
[get_border.border_top_width, get_border.border_right_width,
get_border.border_bottom_width, get_border.border_left_width,
get_margin.margin_top, get_margin.margin_right,
get_margin.margin_bottom, get_margin.margin_left,
get_padding.padding_top, get_padding.padding_right,
get_padding.padding_bottom, get_padding.padding_left,
get_box.width, get_box.height,
get_inheritedtext.line_height,
get_inheritedtext.text_align, get_inheritedtext.text_indent,
get_table.table_layout,
get_inheritedtable.border_collapse,
get_inheritedtable.border_spacing,
get_column.column_gap,
get_position.flex_direction,
get_position.flex_basis,
get_position.order
]) || add_if_not_equal!(old, new, damage,
[ REPAINT, STORE_OVERFLOW, REFLOW_OUT_OF_FLOW ], [
get_position.top, get_position.left,
get_position.right, get_position.bottom
]) || add_if_not_equal!(old, new, damage,
[ REPAINT ], [
get_color.color, get_background.background_color,
get_background.background_image, get_background.background_position,
get_background.background_repeat, get_background.background_attachment,
get_background.background_clip, get_background.background_origin,
get_background.background_size,
get_border.border_top_color, get_border.border_right_color,
get_border.border_bottom_color, get_border.border_left_color,
get_border.border_top_style, get_border.border_right_style,
get_border.border_bottom_style, get_border.border_left_style,
get_border.border_top_left_radius, get_border.border_top_right_radius,
get_border.border_bottom_left_radius, get_border.border_bottom_right_radius,
get_position.z_index, get_box._servo_overflow_clip_box,
get_inheritedtext._servo_text_decorations_in_effect,
get_pointing.cursor, get_pointing.pointer_events,
get_effects.box_shadow, get_effects.clip, get_inheritedtext.text_shadow, get_effects.filter,
get_effects.transform, get_effects.backface_visibility, get_effects.transform_style,
get_effects.transform_origin, get_effects.perspective, get_effects.perspective_origin,
get_effects.mix_blend_mode, get_inheritedbox.image_rendering,
// Note: May require REFLOW et al. if `visibility: collapse` is implemented.
get_inheritedbox.visibility
]);
// If the layer requirements of this flow have changed due to the value
// of the transform, then reflow is required to rebuild the layers.
if old.transform_requires_layer() != new.transform_requires_layer() {
damage.insert(RestyleDamage::rebuild_and_reflow());
}
damage
}
pub trait LayoutDamageComputation {
fn compute_layout_damage(self) -> SpecialRestyleDamage;
fn reflow_entire_document(self);
}
impl<'a> LayoutDamageComputation for &'a mut Flow {
fn compute_layout_damage(self) -> SpecialRestyleDamage {
let mut special_damage = SpecialRestyleDamage::empty();
let is_absolutely_positioned = flow::base(self).flags.contains(IS_ABSOLUTELY_POSITIONED);
// In addition to damage, we use this phase to compute whether nodes affect CSS counters.
let mut has_counter_affecting_children = false;
{
let self_base = flow::mut_base(self);
for kid in self_base.children.iter_mut() {
let child_is_absolutely_positioned =
flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED);
flow::mut_base(kid).restyle_damage
.insert(self_base.restyle_damage.damage_for_child(
is_absolutely_positioned,
child_is_absolutely_positioned));
{
let kid: &mut Flow = kid;
special_damage.insert(kid.compute_layout_damage());
}
self_base.restyle_damage
.insert(flow::base(kid).restyle_damage.damage_for_parent(
child_is_absolutely_positioned));
has_counter_affecting_children = has_counter_affecting_children ||
flow::base(kid).flags.intersects(AFFECTS_COUNTERS |
HAS_COUNTER_AFFECTING_CHILDREN);
}
}
let self_base = flow::mut_base(self);
if self_base.flags.float_kind() != float::T::none &&
self_base.restyle_damage.intersects(REFLOW) {
special_damage.insert(REFLOW_ENTIRE_DOCUMENT);
}
if has_counter_affecting_children {
self_base.flags.insert(HAS_COUNTER_AFFECTING_CHILDREN)
} else {
self_base.flags.remove(HAS_COUNTER_AFFECTING_CHILDREN)
}
special_damage
}
fn reflow_entire_document(self) {
let self_base = flow::mut_base(self);
self_base.restyle_damage.insert(RestyleDamage::rebuild_and_reflow());
self_base.restyle_damage.remove(RECONSTRUCT_FLOW);
for kid in self_base.children.iter_mut() {
kid.reflow_entire_document();
}
}
}<|fim▁end|>
|
get_font.font_family, get_font.font_style, get_font.font_variant, get_font.font_weight,
get_font.font_size, get_font.font_stretch,
get_inheritedbox.direction, get_inheritedbox.writing_mode,
|
<|file_name|>palindromes_test.py<|end_file_name|><|fim▁begin|>import unittest
from palindromes import is_palindrome
cases = (
('lsdkjfskf', False),
('radar', True),
('racecar', True),
)
<|fim▁hole|> for word, expectation in cases:
self.assertEqual(is_palindrome(word), expectation)<|fim▁end|>
|
class TestCorrectness(unittest.TestCase):
def test_identifies_palindromes(self):
|
<|file_name|>GruposCallback.java<|end_file_name|><|fim▁begin|>package br.ufpe.sabertecnologias.acervoapp.ui.callbacks;
import java.util.ArrayList;
import br.ufpe.sabertecnologias.acervoapp.modelo.dados.Grupo;
/**<|fim▁hole|> * Created by joaotrindade on 25/10/16.
*/
public interface GruposCallback{
public void abrirGrupo(Grupo g);
public void exit();
void notifyGrupoController(ArrayList<Grupo> mGrupos);
}<|fim▁end|>
| |
<|file_name|>ignore-scripts.js<|end_file_name|><|fim▁begin|>var test = require("tap").test
var npm = require.resolve("../../bin/npm-cli.js")
var spawn = require("child_process").spawn
var node = process.execPath
// ignore-scripts/package.json has scripts that always exit with non-zero error
// codes. The "install" script is omitted so that npm tries to run node-gyp,
// which should also fail.
var pkg = __dirname + "/ignore-scripts"
test("ignore-scripts: install using the option", function(t) {
createChild([npm, "install", "--ignore-scripts"]).on("close", function(code) {
t.equal(code, 0)
t.end()
})
})
test("ignore-scripts: install NOT using the option", function(t) {
createChild([npm, "install"]).on("close", function(code) {
t.notEqual(code, 0)
t.end()
})
})
var scripts = [
"prepublish", "publish", "postpublish",
"preinstall", "install", "postinstall",
"preuninstall", "uninstall", "postuninstall",
"preupdate", "update", "postupdate",
"pretest", "test", "posttest",
"prestop", "stop", "poststop",
"prestart", "start", "poststart",
"prerestart", "restart", "postrestart"
]
scripts.forEach(function(script) {
test("ignore-scripts: run-script"+script+" using the option", function(t) {
createChild([npm, "--ignore-scripts", "run-script", script])
.on("close", function(code) {
t.equal(code, 0)
t.end()
})
})
})
scripts.forEach(function(script) {
test("ignore-scripts: run-script "+script+" NOT using the option", function(t) {
createChild([npm, "run-script", script]).on("close", function(code) {
t.notEqual(code, 0)
t.end()
})
})
})
function createChild (args) {
var env = {<|fim▁hole|> PATH: process.env.PATH
}
if (process.platform === "win32")
env.npm_config_cache = "%APPDATA%\\npm-cache"
return spawn(node, args, {
cwd: pkg,
stdio: "inherit",
env: env
})
}<|fim▁end|>
|
HOME: process.env.HOME,
Path: process.env.PATH,
|
<|file_name|>capabilities_test.go<|end_file_name|><|fim▁begin|>package agouti_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/sclevine/agouti"
)
var _ = Describe("Capabilities", func() {
var capabilities Capabilities
BeforeEach(func() {
capabilities = NewCapabilities("firstEnabled", "secondEnabled")
})
It("should successfully encode all provided options into JSON", func() {
capabilities.Browser("some-browser").Version("v100").Platform("some-os")
capabilities.With("withEnabled").Without("withoutDisabled")
capabilities.Set("deviceName", "some-device-name").Set("udid", "some-udid")
capabilities.Proxy(ProxyConfig{
ProxyType: "manual",
HTTPProxy: "some-http-proxy",
SSLProxy: "some-http-proxy",
})
Expect(capabilities.JSON()).To(MatchJSON(`{
"browserName": "some-browser",
"version": "v100",
"platform": "some-os",
"withEnabled": true,
"withoutDisabled": false,
"deviceName": "some-device-name",
"udid": "some-udid",<|fim▁hole|> "httpProxy": "some-http-proxy",
"sslProxy": "some-http-proxy"
}
}`))
})
Context("when the provided options cannot be converted to JSON", func() {
It("should return an error", func() {
capabilities["some-feature"] = func() {}
_, err := capabilities.JSON()
Expect(err).To(MatchError("json: unsupported type: func()"))
})
})
})<|fim▁end|>
|
"firstEnabled": true,
"secondEnabled": true,
"proxy": {
"proxyType": "manual",
|
<|file_name|>const.py<|end_file_name|><|fim▁begin|>"""Provides the constants needed for component."""
from typing import Final
SUPPORT_ALARM_ARM_HOME: Final = 1
SUPPORT_ALARM_ARM_AWAY: Final = 2
SUPPORT_ALARM_ARM_NIGHT: Final = 4
SUPPORT_ALARM_TRIGGER: Final = 8<|fim▁hole|>CONDITION_TRIGGERED: Final = "is_triggered"
CONDITION_DISARMED: Final = "is_disarmed"
CONDITION_ARMED_HOME: Final = "is_armed_home"
CONDITION_ARMED_AWAY: Final = "is_armed_away"
CONDITION_ARMED_NIGHT: Final = "is_armed_night"
CONDITION_ARMED_VACATION: Final = "is_armed_vacation"
CONDITION_ARMED_CUSTOM_BYPASS: Final = "is_armed_custom_bypass"<|fim▁end|>
|
SUPPORT_ALARM_ARM_CUSTOM_BYPASS: Final = 16
SUPPORT_ALARM_ARM_VACATION: Final = 32
|
<|file_name|>SwtFileFolderMenu.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Namihiko Matsumura (https://github.com/n-i-e/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.n_i_e.deepfolderview;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.StringSelection;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Date;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.beans.PojoProperties;
import org.eclipse.core.databinding.observable.Realm;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.jface.databinding.swt.SWTObservables;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.DisposeEvent;
import org.eclipse.swt.events.DisposeListener;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.ProgressBar;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.wb.swt.SWTResourceManager;
import com.github.n_i_e.dirtreedb.Assertion;
import com.github.n_i_e.dirtreedb.DBPathEntry;
import com.github.n_i_e.dirtreedb.PathEntry;
import com.github.n_i_e.dirtreedb.lazy.LazyRunnable;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater;
import com.github.n_i_e.dirtreedb.lazy.LazyUpdater.Dispatcher;
import com.ibm.icu.text.NumberFormat;
import com.ibm.icu.text.SimpleDateFormat;
public class SwtFileFolderMenu extends SwtCommonFileFolderMenu {
@SuppressWarnings("unused")
private DataBindingContext m_bindingContext;
protected Shell shell;
private FormToolkit formToolkit = new FormToolkit(Display.getDefault());
private Text txtLocation;
private Composite compositeToolBar;
private Table table;
private Label lblStatusBar;
private Composite compositeStatusBar;
private ProgressBar progressBar;
@Override protected Shell getShell() { return shell; }
@Override protected Table getTable() { return table; }
@Override protected Label getLblStatusBar() { return lblStatusBar; }
@Override protected ProgressBar getProgressBar() { return progressBar; }
public static void main(String[] args) {
final Display display = Display.getDefault();
Realm.runWithDefault(SWTObservables.getRealm(display), new Runnable() {
public void run() {
try {
final SwtFileFolderMenu window = new SwtFileFolderMenu();
window.open();
/*
display.asyncExec(new Runnable() {
public void run() {
TableItem tableItem = new TableItem(window.table, SWT.NONE);
tableItem.setText(new String[] {"C:\\", "2015-01-01 00:00:00", "1", "2", "3"});
TableItem tableItem_1 = new TableItem(window.table, SWT.NONE);
tableItem_1.setText(new String[] {"D:\\", "2014-01-01 00:00:00", "100", "200", "1"});
}
});*/
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
public void open() {
Display display = Display.getDefault();
//createContents();
//shell.open();
//shell.layout();
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep();
}
}
}
public SwtFileFolderMenu() {
createContents();
shell.open();
shell.layout();
location = new NavigatableList<Location>();
location.add(new Location());
}
/**
* Create contents of the window.
*/
private void createContents() {
shell = new Shell();
shell.addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent arg0) {
Point p = shell.getSize();
PreferenceRW.setSwtFileFolderMenuWindowWidth(p.x);
PreferenceRW.setSwtFileFolderMenuWindowHeight(p.y);
}
});
shell.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/drive-harddisk.png"));
shell.setMinimumSize(new Point(300, 200));
shell.setSize(PreferenceRW.getSwtFileFolderMenuWindowWidth(), PreferenceRW.getSwtFileFolderMenuWindowHeight());
GridLayout gl_shell = new GridLayout(1, false);
gl_shell.verticalSpacing = 6;
gl_shell.marginWidth = 3;
gl_shell.marginHeight = 3;
gl_shell.horizontalSpacing = 6;
shell.setLayout(gl_shell);
Menu menu = new Menu(shell, SWT.BAR);
shell.setMenuBar(menu);
MenuItem mntmFile = new MenuItem(menu, SWT.CASCADE);
mntmFile.setText(Messages.mntmFile_text);
Menu menuFile = new Menu(mntmFile);
mntmFile.setMenu(menuFile);
MenuItem mntmOpen_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpen_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen_1.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenInNew_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew_1.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails_1 = new MenuItem(menuFile, SWT.NONE);
mntmOpenDuplicateDetails_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails_1.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyTo_2 = new MenuItem(menuFile, SWT.NONE);
mntmCopyTo_2.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_2.setText(Messages.mntmCopyTo_text);
MenuItem mntmClose = new MenuItem(menuFile, SWT.NONE);
mntmClose.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCloseSelected();
}
});
mntmClose.setText(Messages.mntmClose_text);
MenuItem mntmQuit = new MenuItem(menuFile, SWT.NONE);
mntmQuit.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onQuitSelected();
}
});
mntmQuit.setText(Messages.mntmQuit_text);
MenuItem mntmEdit = new MenuItem(menu, SWT.CASCADE);
mntmEdit.setText(Messages.mntmEdit_text);
Menu menuEdit = new Menu(mntmEdit);
mntmEdit.setMenu(menuEdit);
MenuItem mntmRun_1 = new MenuItem(menuEdit, SWT.NONE);
mntmRun_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun_1.setText(Messages.mntmRun_text);
MenuItem mntmCopyAsString_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyAsString_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString_1.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo_1 = new MenuItem(menuEdit, SWT.NONE);
mntmCopyTo_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo_1.setText(Messages.mntmCopyTo_text);
MenuItem mntmVisibility = new MenuItem(menu, SWT.CASCADE);
mntmVisibility.setText(Messages.mntmVisibility_text);
Menu menuVisibility = new Menu(mntmVisibility);
mntmVisibility.setMenu(menuVisibility);
final MenuItem mntmFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFoldersVisible.setSelection(true);
mntmFoldersVisible.setText(Messages.mntmFoldersVisible_text);
final MenuItem mntmFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmFilesVisible.setSelection(true);
mntmFilesVisible.setText(Messages.mntmFilesVisible_text);
final MenuItem mntmCompressedFoldersVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFoldersVisible.setSelection(true);
mntmCompressedFoldersVisible.setText(Messages.mntmCompressedFoldersVisible_text);
final MenuItem mntmCompressedFilesVisible = new MenuItem(menuVisibility, SWT.CHECK);
mntmCompressedFilesVisible.setSelection(true);
mntmCompressedFilesVisible.setText(Messages.mntmCompressedFilesVisible_text);
MenuItem mntmHelp = new MenuItem(menu, SWT.CASCADE);
mntmHelp.setText(Messages.mntmHelp_text);
Menu menuHelp = new Menu(mntmHelp);
mntmHelp.setMenu(menuHelp);
MenuItem mntmOpenSourceLicenses = new MenuItem(menuHelp, SWT.NONE);
mntmOpenSourceLicenses.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
new SwtOpenSourceLicenses(shell, SWT.TITLE|SWT.MIN|SWT.MAX|SWT.CLOSE).open();
}
});
mntmOpenSourceLicenses.setText(Messages.mntmOpenSourceLicenses_text);
compositeToolBar = new Composite(shell, SWT.NONE);
compositeToolBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeToolBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
compositeToolBar.setFont(SWTResourceManager.getFont("Meiryo UI", 12, SWT.NORMAL));
GridLayout gl_compositeToolBar = new GridLayout(5, false);
gl_compositeToolBar.horizontalSpacing = 0;
<|fim▁hole|> gl_compositeToolBar.marginHeight = 0;
compositeToolBar.setLayout(gl_compositeToolBar);
formToolkit.adapt(compositeToolBar);
formToolkit.paintBordersFor(compositeToolBar);
Button btnLeft = new Button(compositeToolBar, SWT.NONE);
btnLeft.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-previous.png"));
btnLeft.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigatePreviousSelected(e);
}
});
btnLeft.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnLeft, true, true);
Button btnRight = new Button(compositeToolBar, SWT.NONE);
btnRight.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-next.png"));
btnRight.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onNavigateNextSelected(e);
}
});
btnRight.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(btnRight, true, true);
Button btnUp = new Button(compositeToolBar, SWT.NONE);
btnUp.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/go-up.png"));
btnUp.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onUpperFolderSelected(e);
}
});
formToolkit.adapt(btnUp, true, true);
txtLocation = new Text(compositeToolBar, SWT.BORDER);
txtLocation.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent arg0) {
onLocationModified(arg0);
}
});
GridData gd_txtLocation = new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1);
gd_txtLocation.widthHint = 200;
txtLocation.setLayoutData(gd_txtLocation);
txtLocation.setFont(SWTResourceManager.getFont("Meiryo UI", 11, SWT.NORMAL));
formToolkit.adapt(txtLocation, true, true);
Button btnRefresh = new Button(compositeToolBar, SWT.NONE);
btnRefresh.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
refresh();
}
});
btnRefresh.setImage(SWTResourceManager.getImage(SwtFileFolderMenu.class, "/com/github/n_i_e/deepfolderview/icon/view-refresh.png"));
formToolkit.adapt(btnRefresh, true, true);
final TableViewer tableViewer = new TableViewer(shell, SWT.MULTI | SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table = tableViewer.getTable();
table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
//table = new Table(scrolledComposite, SWT.BORDER | SWT.FULL_SELECTION | SWT.VIRTUAL);
table.setHeaderVisible(true);
table.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onTableSelected(e);
}
@Override
public void widgetDefaultSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
formToolkit.adapt(table);
formToolkit.paintBordersFor(table);
final TableColumn tblclmnPath = new TableColumn(table, SWT.LEFT);
tblclmnPath.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnPath);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnPathSelected(tblclmnPath, e);
}
});
tblclmnPath.setWidth(230);
tblclmnPath.setText(Messages.tblclmnPath_text);
setTableSortDirection(tblclmnPath, "path", order);
final TableColumn tblclmnDateLastModified = new TableColumn(table, SWT.LEFT);
tblclmnDateLastModified.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDateLastModified);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDateLastModifiedSelected(tblclmnDateLastModified, e);
}
});
tblclmnDateLastModified.setWidth(136);
tblclmnDateLastModified.setText(Messages.tblclmnDateLastModified_text);
setTableSortDirection(tblclmnDateLastModified, "datelastmodified", order);
final TableColumn tblclmnSize = new TableColumn(table, SWT.RIGHT);
tblclmnSize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnSize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnSizeSelected(tblclmnSize, e);
}
});
tblclmnSize.setWidth(110);
tblclmnSize.setText(Messages.tblclmnSize_text);
setTableSortDirection(tblclmnSize, "size", order);
final TableColumn tblclmnCompressedsize = new TableColumn(table, SWT.RIGHT);
tblclmnCompressedsize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnCompressedsize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnCompressedsizeSelected(tblclmnCompressedsize, e);
}
});
tblclmnCompressedsize.setWidth(110);
tblclmnCompressedsize.setText(Messages.tblclmnCompressedesize_text);
setTableSortDirection(tblclmnCompressedsize, "compressedsize", order);
final TableColumn tblclmnDuplicate = new TableColumn(table, SWT.NONE);
tblclmnDuplicate.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDuplicate);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDuplicateSelected(tblclmnDuplicate, e);
}
});
tblclmnDuplicate.setWidth(35);
tblclmnDuplicate.setText(Messages.tblclmnDuplicate_text);
setTableSortDirection(tblclmnDuplicate, "duplicate", order);
final TableColumn tblclmnDedupablesize = new TableColumn(table, SWT.RIGHT);
tblclmnDedupablesize.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
table.setSortColumn(tblclmnDedupablesize);
if (table.getSortDirection() == SWT.UP) {
table.setSortDirection(SWT.DOWN);
} else {
table.setSortDirection(SWT.UP);
}
onTblclmnDedupablesizeSelected(tblclmnDedupablesize, e);
}
});
tblclmnDedupablesize.setWidth(110);
tblclmnDedupablesize.setText(Messages.tblclmnDedupablesize_text);
setTableSortDirection(tblclmnDedupablesize, "dedupablesize", order);
Menu popupMenu = new Menu(table);
table.setMenu(popupMenu);
MenuItem mntmRun = new MenuItem(popupMenu, SWT.NONE);
mntmRun.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onRunSelected();
}
});
mntmRun.setText(Messages.mntmRun_text);
MenuItem mntmOpen = new MenuItem(popupMenu, SWT.NONE);
mntmOpen.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenSelected(e);
}
});
mntmOpen.setText(Messages.mntmOpen_text);
MenuItem mntmOpenInNew = new MenuItem(popupMenu, SWT.NONE);
mntmOpenInNew.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenInNewWindowSelected(e);
}
});
mntmOpenInNew.setText(Messages.mntmOpenInNewWindow_text);
MenuItem mntmOpenDuplicateDetails = new MenuItem(popupMenu, SWT.NONE);
mntmOpenDuplicateDetails.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onOpenDuplicateDetailsSelected(e);
}
});
mntmOpenDuplicateDetails.setText(Messages.mntmOpenDuplicateDetails_text);
MenuItem mntmCopyAsString = new MenuItem(popupMenu, SWT.NONE);
mntmCopyAsString.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyAsStringSelected();
}
});
mntmCopyAsString.setText(Messages.mntmCopyAsString_text);
MenuItem mntmCopyTo = new MenuItem(popupMenu, SWT.NONE);
mntmCopyTo.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
onCopyToSelected();
}
});
mntmCopyTo.setText(Messages.mntmCopyTo_text);
MenuItem menuItem = new MenuItem(popupMenu, SWT.SEPARATOR);
menuItem.setText("Visibility");
final MenuItem mntmFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFoldersVisible_1.setSelection(true);
mntmFoldersVisible_1.setText(Messages.mntmFoldersVisible_text);
mntmFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible.setSelection(mntmFoldersVisible_1.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
final MenuItem mntmFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmFilesVisible_1.setSelection(true);
mntmFilesVisible_1.setText(Messages.mntmFilesVisible_text);
mntmFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible.setSelection(mntmFilesVisible_1.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
final MenuItem mntmCompressedFoldersVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFoldersVisible_1.setSelection(true);
mntmCompressedFoldersVisible_1.setText(Messages.mntmCompressedFoldersVisible_text);
mntmCompressedFoldersVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible.setSelection(mntmCompressedFoldersVisible_1.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
final MenuItem mntmCompressedFilesVisible_1 = new MenuItem(popupMenu, SWT.CHECK);
mntmCompressedFilesVisible_1.setSelection(true);
mntmCompressedFilesVisible_1.setText(Messages.mntmCompressedFilesVisible_text);
mntmCompressedFilesVisible_1.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible.setSelection(mntmCompressedFilesVisible_1.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
mntmFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFoldersVisible_1.setSelection(mntmFoldersVisible.getSelection());
onFoldersVisibleChecked(mntmFoldersVisible.getSelection());
}
});
mntmFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmFilesVisible_1.setSelection(mntmFilesVisible.getSelection());
onFilesVisibleChecked(mntmFilesVisible.getSelection());
}
});
mntmCompressedFoldersVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFoldersVisible_1.setSelection(mntmCompressedFoldersVisible.getSelection());
onCompressedFoldersVisibleChecked(mntmCompressedFoldersVisible.getSelection());
}
});
mntmCompressedFilesVisible.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
mntmCompressedFilesVisible_1.setSelection(mntmCompressedFilesVisible.getSelection());
onCompressedFilesVisibleSelected(mntmCompressedFilesVisible.getSelection());
}
});
compositeStatusBar = new Composite(shell, SWT.NONE);
compositeStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
compositeStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
GridLayout gl_compositeStatusBar = new GridLayout(2, false);
gl_compositeStatusBar.marginWidth = 0;
gl_compositeStatusBar.marginHeight = 0;
compositeStatusBar.setLayout(gl_compositeStatusBar);
formToolkit.adapt(compositeStatusBar);
formToolkit.paintBordersFor(compositeStatusBar);
lblStatusBar = new Label(compositeStatusBar, SWT.NONE);
lblStatusBar.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
lblStatusBar.setBackground(SWTResourceManager.getColor(SWT.COLOR_WIDGET_BACKGROUND));
formToolkit.adapt(lblStatusBar, true, true);
lblStatusBar.setText("");
progressBar = new ProgressBar(compositeStatusBar, SWT.NONE);
formToolkit.adapt(progressBar, true, true);
m_bindingContext = initDataBindings();
}
/*
* event handlers
*/
protected void onCopyAsStringSelected() {
ArrayList<String> s = new ArrayList<String>();
for (PathEntry p: getSelectedPathEntries()) {
s.add(p.getPath());
}
StringSelection ss = new StringSelection(String.join("\n", s));
Clipboard clip = Toolkit.getDefaultToolkit().getSystemClipboard();
clip.setContents(ss, ss);
}
protected void onOpenSelected(SelectionEvent e) {
DBPathEntry entry = getSelectedPathEntry();
if (entry != null) {
setLocationAndRefresh(entry);
}
}
protected void onOpenInNewWindowSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtFileFolderMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtFileFolderMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onOpenDuplicateDetailsSelected(SelectionEvent e) {
DBPathEntry p = getSelectedPathEntry();
if (p == null) {
p = location.get().getPathEntry();
}
if (p != null) {
new SwtDuplicateMenu().setLocationAndRefresh(p);
} else if (location.get().getPathString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathString());
} else if (location.get().getSearchString() != null) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getSearchString());
} else if (location.get().getPathId() != 0L) {
new SwtDuplicateMenu().setLocationAndRefresh(location.get().getPathId());
}
}
protected void onNavigatePreviousSelected(SelectionEvent e) {
location.navigatePrevious();
setLocationAndRefresh(location.get());
}
protected void onNavigateNextSelected(SelectionEvent e) {
location.navigateNext();
setLocationAndRefresh(location.get());
}
protected void onUpperFolderSelected(SelectionEvent e) {
DBPathEntry p = location.get().getPathEntry();
if (p != null && p.getParentId() != 0L) {
setLocationAndRefresh(p.getParentId());
} else {
writeStatusBar("Not ready for going up operation; be patient.");
}
}
protected void onLocationModified(ModifyEvent arg0) {
String newstring = txtLocation.getText();
Assertion.assertNullPointerException(newstring != null);
writeStatusBar(String.format("New path string is: %s", newstring));
shell.setText(newstring);
Location oldloc = location.get();
if (newstring.equals(oldloc.getPathString())) {
// noop
} else if (newstring.equals(oldloc.getSearchString())) {
oldloc.setPathEntry(null);
oldloc.setPathId(0L);
oldloc.setPathString(null);
} else {
Location newloc = new Location();
newloc.setPathString(newstring);
location.add(newloc);
}
refresh();
}
protected void onTableSelected(SelectionEvent e) {}
private String order = PreferenceRW.getSwtFileFolderMenuSortOrder();
private boolean isFolderChecked = true;
private boolean isFileChecked = true;
private boolean isCompressedFolderChecked = true;
private boolean isCompressedFileChecked = true;
protected void onTblclmnPathSelected(TableColumn tblclmnPath, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "path";
} else {
order = "path DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDateLastModifiedSelected(TableColumn tblclmnDateLastModified, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "datelastmodified";
} else {
order = "datelastmodified DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnSizeSelected(TableColumn tblclmnSize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "size";
} else {
order = "size DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnCompressedsizeSelected(TableColumn tblclmnCompressedesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "compressedsize";
} else {
order = "compressedsize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDuplicateSelected(TableColumn tblclmnDuplicate, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "duplicate";
} else {
order = "duplicate DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onTblclmnDedupablesizeSelected(TableColumn tblclmnDedupablesize, SelectionEvent e) {
if (table.getSortDirection() == SWT.UP) {
order = "dedupablesize";
} else {
order = "dedupablesize DESC";
}
PreferenceRW.setSwtFileFolderMenuSortOrder(order);
refresh();
}
protected void onFoldersVisibleChecked(boolean checked) {
isFolderChecked = checked;
refresh();
}
protected void onFilesVisibleChecked(boolean checked) {
isFileChecked = checked;
refresh();
}
protected void onCompressedFoldersVisibleChecked(boolean checked) {
isCompressedFolderChecked = checked;
refresh();
}
protected void onCompressedFilesVisibleSelected(boolean checked) {
isCompressedFileChecked = checked;
refresh();
}
public void setLocationAndRefresh(final String text) {
Display.getDefault().asyncExec(new Runnable() {
public void run() {
txtLocation.setText(text); // onLocationModified() is automatically called here.
}
});
}
/*
* setLocationAndRefresh and related
*/
public void setLocationAndRefresh(final DBPathEntry entry) {
Assertion.assertNullPointerException(entry != null);
Assertion.assertNullPointerException(location != null);
Location oldloc = location.get();
if (oldloc.getPathEntry() != null && oldloc.getPathEntry().getPathId() == entry.getPathId()) {
// noop
} else if (oldloc.getPathString() != null && oldloc.getPathString().equals(entry.getPath())) {
oldloc.setPathEntry(entry);
oldloc.setPathId(entry.getPathId());
} else {
Location newloc = new Location();
newloc.setPathEntry(entry);
newloc.setPathId(entry.getPathId());
newloc.setPathString(entry.getPath());
location.add(newloc);
}
setLocationAndRefresh(entry.getPath());
}
public void setLocationAndRefresh(long id) {
writeStatusBar(String.format("Starting query; new ID is: %d", id));
Location oldloc = location.get();
if (oldloc.getPathId() == id) {
// null
} else {
Location newloc = new Location();
newloc.setPathId(id);
location.add(newloc);
}
refresh(new LazyRunnable() {
@Override
public void run() throws SQLException, InterruptedException {
Debug.writelog("-- SwtFileFolderMenu SetLocationAndRefresh LOCAL PATTERN (id based) --");
Location loc = location.get();
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
if (p != null) {
loc.setPathEntry(p);
loc.setPathString(p.getPath());
loc.setSearchString(null);
setLocationAndRefresh(loc.getPathString());
}
}
});
}
public void setLocationAndRefresh(final Location loc) {
if (loc.getPathString() != null) {
setLocationAndRefresh(loc.getPathString());
} else if (loc.getPathEntry() != null) {
setLocationAndRefresh(loc.getPathEntry().getPath());
} else if (loc.getSearchString() != null) {
setLocationAndRefresh(loc.getSearchString());
} else {
setLocationAndRefresh("");
}
}
/*
* normal refresh
*/
private Scenario scenario = new Scenario();
protected synchronized void refresh() {
refresh(scenario);
}
class Scenario extends SwtCommonFileFolderMenu.Scenario {
@Override
public void run() throws SQLException, InterruptedException {
writeProgress(10);
Location loc = location.get();
if (loc.getPathEntry() == null && loc.getSearchString() == null &&
(loc.getPathEntry() != null || loc.getPathId() != 0L
|| (loc.getPathString() != null && !"".equals(loc.getPathString())))) {
writeProgress(50);
if (loc.getPathString() != null) {
DBPathEntry p = getDB().getDBPathEntryByPath(loc.getPathString());
if (p != null) {
loc.setPathEntry(p);
loc.setPathId(p.getPathId());
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 1 (path based entry detection) --");
} else {
loc.setSearchString(loc.getPathString());
loc.setPathString(null);
loc.setPathId(0L);
loc.setPathEntry(null);
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 2 (searchstring=" + loc.getSearchString() + ") --");
}
} else if (loc.getPathId() != 0L) {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 3 (id based) --");
DBPathEntry p = getDB().getDBPathEntryByPathId(loc.getPathId());
assert(p != null);
setLocationAndRefresh(p);
return;
} else {
Debug.writelog("-- SwtFileFolderMenu PREPROCESS PATTERN 4 (show all paths) --");
}
}
try {
threadWait();
cleanupTable();
ArrayList<String> typelist = new ArrayList<String> ();
if (isFolderChecked) {
typelist.add("type=0");
}
if (isFileChecked) {
typelist.add("type=1");
}
if (isCompressedFolderChecked) {
typelist.add("type=2");
}
if (isCompressedFileChecked) {
typelist.add("type=3");
}
String typeWhere = typelist.size() == 0 ? "" : String.join(" OR ", typelist);
threadWait();
writeStatusBar("Querying...");
writeProgress(70);
String searchSubSQL;
ArrayList<String> searchStringElement = new ArrayList<String> ();
if (loc.getSearchString() == null || "".equals(loc.getSearchString())) {
searchSubSQL = "";
} else {
ArrayList<String> p = new ArrayList<String> ();
for (String s: loc.getSearchString().split(" ")) {
if (! "".equals(s)) {
p.add("path LIKE ?");
searchStringElement.add(s);
}
}
searchSubSQL = " AND (" + String.join(" AND ", p) + ")";
}
threadWait();
DBPathEntry locationPathEntry = null;
PreparedStatement ps;
if (loc.getPathString() == null || "".equals(loc.getPathString())) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
} else if ((locationPathEntry = loc.getPathEntry()) != null) {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND (pathid=? OR EXISTS (SELECT * FROM upperlower WHERE upper=? AND lower=pathid))"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setLong(c++, locationPathEntry.getPathId());
ps.setLong(c++, locationPathEntry.getPathId());
Debug.writelog(locationPathEntry.getPath());
} else {
String sql = "SELECT * FROM directory AS d1 WHERE (" + typeWhere + ") " + searchSubSQL
+ " AND path LIKE ?"
+ " AND (parentid=0 OR EXISTS (SELECT * FROM directory AS d2 WHERE d1.parentid=d2.pathid))"
+ " ORDER BY " + order;
Debug.writelog(sql);
ps = getDB().prepareStatement(sql);
int c = 1;
for (String s: searchStringElement) {
ps.setString(c, "%" + s + "%");
Debug.writelog(c + " %" + s + "%");
c++;
}
ps.setString(c++, loc.getPathString() + "%");
Debug.writelog(loc.getPathString());
}
try {
LazyUpdater.Dispatcher disp = getDB().getDispatcher();
disp.setList(Dispatcher.NONE);
disp.setCsum(Dispatcher.NONE);
ResultSet rs = ps.executeQuery();
try {
threadWait();
Debug.writelog("QUERY FINISHED");
writeStatusBar("Listing...");
writeProgress(90);
int count = 0;
while (rs.next()) {
threadWait();
DBPathEntry p1 = getDB().rsToPathEntry(rs);
Assertion.assertAssertionError(p1 != null);
Assertion.assertAssertionError(p1.getPath() != null);
if (locationPathEntry != null) {
Assertion.assertAssertionError(locationPathEntry.getPath() != null);
Assertion.assertAssertionError(p1.getPath().startsWith(locationPathEntry.getPath()),
p1.getPath() + " does not start with " + locationPathEntry.getPath()
);
}
PathEntry p2;
try {
p2 = disp.dispatch(p1);
} catch (IOException e) {
p2 = null;
}
if (p2 == null) {
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), true);
getDB().unsetClean(p1.getParentId());
} else {
Assertion.assertAssertionError(p1.getPath().equals(p2.getPath()),
"!! " + p1.getPath() + " != " + p2.getPath());
if (!PathEntry.dscMatch(p1, p2)) {
p1.setDateLastModified(p2.getDateLastModified());
p1.setSize(p2.getSize());
p1.setCompressedSize(p2.getCompressedSize());
p1.clearCsum();
getDB().unsetClean(p1.getParentId());
}
addRow(p1, rs.getInt("duplicate"), rs.getLong("dedupablesize"), false);
}
count ++;
}
writeStatusBar(String.format("%d items", count));
} finally {
rs.close();
}
} finally {
ps.close();
}
writeProgress(0);
} catch (WindowDisposedException e) {}
}
protected void cleanupTable() throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at cleanupTable");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.clear();
table.removeAll();;
}
});
}
protected void addRow(final DBPathEntry entry, final int duplicate,
final long dedupablesize, final boolean grayout) throws WindowDisposedException {
if (table.isDisposed()) {
throw new WindowDisposedException("!! Window disposed at addRow");
}
Display.getDefault().asyncExec(new Runnable() {
public void run() {
pathentrylist.add(entry);
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
final NumberFormat numf = NumberFormat.getNumberInstance();
Date d = new Date(entry.getDateLastModified());
String[] row = {
entry.getPath(),
sdf.format(d),
numf.format(entry.getSize()),
numf.format(entry.getCompressedSize()),
(duplicate > 0 ? numf.format(duplicate) : null),
(dedupablesize > 0 ? numf.format(dedupablesize) : null),
};
final Display display = Display.getDefault();
final Color blue = new Color(display, 0, 0, 255);
final Color red = new Color(display, 255, 0, 0);
final Color black = new Color(display, 0, 0, 0);
final Color gray = new Color(display, 127, 127, 127);
try {
TableItem tableItem = new TableItem(table, SWT.NONE);
tableItem.setText(row);
if (grayout) {
tableItem.setForeground(gray);
} else if (entry.isNoAccess()) {
tableItem.setForeground(red);
} else if (entry.isFile() && entry.getSize() != entry.getCompressedSize()) {
tableItem.setForeground(blue);
} else {
tableItem.setForeground(black);
}
} catch (Exception e) {
if (!table.isDisposed()) {
e.printStackTrace();
}
}
}
});
}
}
protected DataBindingContext initDataBindings() {
DataBindingContext bindingContext = new DataBindingContext();
//
IObservableValue observeBackgroundCompositeObserveWidget = WidgetProperties.background().observe(compositeToolBar);
IObservableValue backgroundShellObserveValue = PojoProperties.value("background").observe(shell);
bindingContext.bindValue(observeBackgroundCompositeObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundLblStatusBarObserveWidget = WidgetProperties.background().observe(lblStatusBar);
bindingContext.bindValue(observeBackgroundLblStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
IObservableValue observeBackgroundCompositeStatusBarObserveWidget = WidgetProperties.background().observe(compositeStatusBar);
bindingContext.bindValue(observeBackgroundCompositeStatusBarObserveWidget, backgroundShellObserveValue, null, null);
//
return bindingContext;
}
}<|fim▁end|>
|
gl_compositeToolBar.verticalSpacing = 0;
gl_compositeToolBar.marginWidth = 0;
|
<|file_name|>TrendsWrapperMixin.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 France Telecom R&D Beijing Co., Ltd 北京法国电信研发中心有限公司
*
<|fim▁hole|> *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.weibo.api.impl.json;
import java.util.Date;
import java.util.SortedSet;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import org.springframework.social.weibo.api.Trends.Trend;
/**
* Annotated mixin to add Jackson annotations to TrendsWrapper.
*
* @author edva8332
*/
@JsonIgnoreProperties(ignoreUnknown = true)
abstract class TrendsWrapperMixin {
@JsonProperty("trends")
@JsonDeserialize(using = TrendsDeserializer.class)
SortedSet<Trend> trends;
@JsonProperty("as_of")
@JsonDeserialize(using = DateInSecondsDeserializer.class)
Date asOf;
}<|fim▁end|>
|
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
|
<|file_name|>vehicle-details-general.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input, OnInit, OnDestroy, ViewChild } from '@angular/core';
import { AbstractControl, FormArray, FormBuilder, FormGroup, Validators, ValidatorFn, FormControl } from '@angular/forms';
import { SharedValidatorsService } from '../../validators/shared-validators.service';
import { FormUtilsService } from '../../form-utils/form-utils.service';
import { TimeCalculationService } from '../../time-calculation/time-calculation.service';
import { Observable } from 'rxjs/Observable';
import { Subject } from 'rxjs/Subject';
import { Subscription } from 'rxjs/Subscription';
@Component({
selector: 'app-vehicle-details-general',
templateUrl: './vehicle-details-general.component.html',
styleUrls: ['./vehicle-details-general.component.scss']
})
export class VehicleDetailsGeneralComponent implements OnInit, OnDestroy {
valueChangesRx: Observable<any>;
fform: FormGroup;
@Input() vehicle: any;
@Input() btity: any;
@Input() isNew: boolean;
facilityObjList: any;
// @Input() checkMofcomValidityRxx: any;
updateVehicleControlValidatorsOnIsDismantlingReadyRxx = new Subject();
subscriptions: Subscription[] = [];
surveyRoundsHash = [
{value: 'zero', displayValue: '0'},
{value: 'one', displayValue: '1'},
{value: 'two', displayValue: '2'},
]
constructor(
private fb: FormBuilder,
private sv: SharedValidatorsService,
private fu: FormUtilsService,
private tc: TimeCalculationService
) { }
ngOnInit() {
this.facilityObjList = this.btity.types.facilities.filter(i => i.name !== '隆运总部');
this.fform = this.fb.group({
vin: [this.vehicle.vin, [Validators.required, this.sv.startedWithSpace()]],
batchId: [this.vehicle.batchId, this.sv.startedWithSpace()],
source: [this.fu.idToName(this.vehicle.source, this.btity.types['sources']), [
Validators.required,
this.sv.notListedButCanBeEmpty(this.btity.types.sources.map(t => t.name))
]],
// isToDeregister: [this.vehicle.isToDeregister], // replaced by consignmentType
mofcomRegisterType: [this.fu.idToName(this.vehicle.mofcomRegisterType, this.btity.types['mofcomRegisterTypes']), [
this.sv.notListedButCanBeEmpty(this.btity.types.mofcomRegisterTypes.map(t => t.name)),
Validators.required
]],
consignmentType: [this.fu.idToName(this.vehicle.consignmentType, this.btity.types['consignmentTypes']), [
Validators.required,
this.sv.notListedButCanBeEmpty(this.btity.types.consignmentTypes.map(t => t.name))
]],
mofcomRegisterRef: [this.vehicle.mofcomRegisterRef, this.sv.startedWithSpace()],
entranceDate: [this.vehicle.entranceDate || this.tc.getTodayDateBeijing(), [Validators.required]],
facility: [{
value: this.fu.idToName(this.vehicle.facility, this.btity.types.facilities),
disabled: this.vehicle.facility ? true : false
}, [Validators.required, this.sv.notListedButCanBeEmpty(this.btity.types.facilities.map(t => t.name))]],
metadata: this.fb.group({
isDeleted: [this.vehicle.metadata.isDeleted],
deletedFor: [this.vehicle.metadata.deletedFor],
deletedBy: [this.vehicle.metadata.deletedBy],
deletedAt: [this.vehicle.metadata.deletedAt],
}),
internalSurveyor: [
this.fu.idToName(this.vehicle.internalSurveyor,
this.btity.staffs, 'displayName'), [this.sv.startedWithSpace(), Validators.required]
],
isSurveyNecessary: [this.vehicle.isSurveyNecessary, [this.sv.shouldBeBoolean(), Validators.required]],
surveyRounds: [this.vehicle.surveyRounds, [Validators.required]],
status2: this.fb.group({
isSurveyReady: [this.vehicle.status2.isSurveyReady, [this.sv.shouldBeBoolean(), Validators.required]],
isSurveyNotReadyReason: [this.vehicle.status2.isSurveyNotReadyReason],
isSurveyNotReadySince: [this.vehicle.status2.isSurveyNotReadySince],
isSurveyNotReadyTill: [this.vehicle.status2.isSurveyNotReadyTill],
isDismantlingReady: [this.vehicle.status2.isDismantlingReady, [this.sv.shouldBeBoolean(), Validators.required]],
isDismantlingNotReadyReason: [this.vehicle.status2.isDismantlingNotReadyReason],
isDismantlingNotReadySince: [this.vehicle.status2.isDismantlingNotReadySince],
isDismantlingNotReadyTill: [this.vehicle.status2.isDismantlingNotReadyTill],
dismantlingOrderId: [this.vehicle.status2.dismantlingOrderId]
// dismantling: this.vehicle.status2.dismantling,
// auctioning: this.vehicle.status2.auctioning,
}),
estimatedSurveyDateFirst: [this.vehicle.estimatedSurveyDateFirst],
estimatedSurveyDateSecond: [this.vehicle.estimatedSurveyDateSecond],
// estimatedSurveyDatesUseDefault: ['']
});
/* setting up vinConfirm based on isNew*/
if (this.isNew) {
const vinConfirmCtrl = new FormControl('', [
Validators.required,
this.sv.notMatchingOtherControl(this.fform.get('vin'))
], [this.sv.duplicateVINAsync()]
);
this.fform.addControl('vinConfirm', vinConfirmCtrl);
} else {
this.fform.get('vin').disable();
}
const sub0_ = this.fform.get('status2.isDismantlingReady').valueChanges
.startWith(this.vehicle.status2.isDismantlingReady)
.subscribe(v => {
const isDismantlingNotReadyReasonCtrl = this.fform.get('status2.isDismantlingNotReadyReason');
const isDismantlingNotReadySinceCtrl = this.fform.get('status2.isDismantlingNotReadySince');
if (!v) { // when dismantling is not ready, set validators for isDismantlingNotReadyReason
// console.log('setting validators');
this.updateVehicleControlValidatorsOnIsDismantlingReadyRxx.next(false);
isDismantlingNotReadyReasonCtrl.setValidators([
this.sv.startedWithSpace(), Validators.required
]);
isDismantlingNotReadyReasonCtrl.markAsDirty();
isDismantlingNotReadyReasonCtrl.updateValueAndValidity();
if ((typeof isDismantlingNotReadySinceCtrl.value !== 'string') || !isDismantlingNotReadySinceCtrl.value) {
isDismantlingNotReadySinceCtrl.setValue((new Date()).toISOString().slice(0, 10));
}
} else {
this.updateVehicleControlValidatorsOnIsDismantlingReadyRxx.next(true);
isDismantlingNotReadyReasonCtrl.clearValidators();
isDismantlingNotReadyReasonCtrl.updateValueAndValidity();
}
});
this.subscriptions.push(sub0_);
const sub1_ = Observable.combineLatest(
this.fform.get('status2.isSurveyReady').valueChanges.startWith(this.vehicle.status2.isSurveyReady),
this.fform.get('surveyRounds').valueChanges.startWith(this.fform.get('surveyRounds').value),
).subscribe(combo => {
// console.log(combo);
const isSurveyReady = combo[0];
const surveyRounds = combo[1];
const isSurveyNotReadyReasonCtrl = this.fform.get('status2.isSurveyNotReadyReason');
const isSurveyNotReadySinceCtrl = this.fform.get('status2.isSurveyNotReadySince');
const firstSurveyDateCtrl = this.fform.get('estimatedSurveyDateFirst');
const secondSurveyDateCtrl = this.fform.get('estimatedSurveyDateSecond');
switch (true) {
case !isSurveyReady && surveyRounds !== 'zero':
isSurveyNotReadyReasonCtrl.setValidators([
this.sv.startedWithSpace(), Validators.required
]);
isSurveyNotReadyReasonCtrl.updateValueAndValidity();
if ((typeof isSurveyNotReadySinceCtrl.value !== 'string') || !isSurveyNotReadySinceCtrl.value) {
isSurveyNotReadySinceCtrl.setValue((new Date()).toISOString().slice(0, 10));
}
firstSurveyDateCtrl.clearValidators();
firstSurveyDateCtrl.updateValueAndValidity();
secondSurveyDateCtrl.clearValidators();
secondSurveyDateCtrl.updateValueAndValidity();
break;
case isSurveyReady && surveyRounds === 'one':
isSurveyNotReadyReasonCtrl.clearValidators();
isSurveyNotReadyReasonCtrl.updateValueAndValidity();
firstSurveyDateCtrl.setValidators(Validators.required);
firstSurveyDateCtrl.updateValueAndValidity();
secondSurveyDateCtrl.clearValidators();
secondSurveyDateCtrl.updateValueAndValidity();
break;
case isSurveyReady && surveyRounds === 'two':
isSurveyNotReadyReasonCtrl.clearValidators();
isSurveyNotReadyReasonCtrl.updateValueAndValidity();
firstSurveyDateCtrl.setValidators(Validators.required);
firstSurveyDateCtrl.updateValueAndValidity();
secondSurveyDateCtrl.setValidators(Validators.required);
secondSurveyDateCtrl.updateValueAndValidity();
break;
}
});
// .subscribe(v => {
// const isSurveyNotReadyReasonCtrl = this.fform.get('status2.isSurveyNotReadyReason');
// const isSurveyNotReadySinceCtrl = this.fform.get('status2.isSurveyNotReadySince');
// const firstSurveyDateCtrl = this.fform.get('estimatedSurveyDateFirst');
// const secondSurveyDateCtrl = this.fform.get('estimatedSurveyDateSecond');
// if (!v) { // when survey is not ready, set validators for isSurveyNotReadyReason
// isSurveyNotReadyReasonCtrl.setValidators([
// this.sv.startedWithSpace(), Validators.required
// ]);
// isSurveyNotReadyReasonCtrl.updateValueAndValidity();
// if ((typeof isSurveyNotReadySinceCtrl.value !== 'string') || !isSurveyNotReadySinceCtrl.value) {
// isSurveyNotReadySinceCtrl.setValue((new Date()).toISOString().slice(0, 10));
// }
// firstSurveyDateCtrl.clearValidators();
// firstSurveyDateCtrl.updateValueAndValidity();
// secondSurveyDateCtrl.clearValidators();
// secondSurveyDateCtrl.updateValueAndValidity();
// } else {
// isSurveyNotReadyReasonCtrl.clearValidators();
// isSurveyNotReadyReasonCtrl.updateValueAndValidity();
// firstSurveyDateCtrl.setValidators(Validators.required);
// firstSurveyDateCtrl.updateValueAndValidity();
// secondSurveyDateCtrl.setValidators(Validators.required);
// secondSurveyDateCtrl.updateValueAndValidity();
// }
// });
this.subscriptions.push(sub1_);
const sub2_ = this.fform.get('source').valueChanges
.startWith(this.vehicle.source)
.subscribe(v => {
if (v === '交警') {
this.fform.get('batchId').setValidators([Validators.required]);
this.fform.get('batchId').updateValueAndValidity();
} else {
this.fform.get('batchId').clearValidators();
this.fform.get('batchId').updateValueAndValidity();
}
});
this.valueChangesRx = this.fform.valueChanges
.startWith(null)
.map(v => {
// if (this.fform.valid) {
const allV = this.fform.getRawValue();
allV.source = this.fu.nameToId(allV.source, this.btity.types['sources']);
allV.mofcomRegisterType = this.fu.nameToId(allV.mofcomRegisterType, this.btity.types['mofcomRegisterTypes']);
allV.consignmentType = this.fu.nameToId(allV.consignmentType, this.btity.types['consignmentTypes']);
allV.facility = this.fu.nameToId(allV.facility, this.btity.types['facilities']);
allV.internalSurveyor = this.fu.nameToId(allV.internalSurveyor, this.btity.staffs, 'displayName');
return allV;
// }
});
// this.checkMofcomValidityRxx.subscribe((mofcomRegisterType) => {
// // no validation rule needs to be changed for general part
// });
// const subOnDismantlingReady_ = this.updateVehicleControlValidatorsOnIsDismantlingReadyRxx.subscribe(v => {
// if (v && this.fform.get('isSurveyNecessary').value) {
// this.fform.get('status2.isSurveyReady').setValue(true);
// }
// });
// this.subscriptions.push(subOnDismantlingReady_);
const onIsDismantlingReadyAndSurveyRoundsChange_ = Observable.combineLatest(
this.fform.get('status2.isDismantlingReady').valueChanges.startWith(this.fform.get('status2.isDismantlingReady').value),
this.fform.get('surveyRounds').valueChanges.startWith(this.fform.get('surveyRounds').value),
).subscribe(combo => {
const isDismantlingReady = combo[0];
const surveyRounds = combo[1];
const isSurveyReadyCtrl = this.fform.get('status2.isSurveyReady');
switch (true) {
case surveyRounds === 'zero':
isSurveyReadyCtrl.setValue(false);
break;
case isDismantlingReady:
isSurveyReadyCtrl.setValue(true);
break;
}
switch (true) {
case isDismantlingReady:
isSurveyReadyCtrl.disable(); break;
case !isDismantlingReady:
isSurveyReadyCtrl.enable(); break;
}
});<|fim▁hole|>
ngOnDestroy() {
this.subscriptions.forEach(sub_ => sub_.unsubscribe());
}
}<|fim▁end|>
|
this.subscriptions.push(onIsDismantlingReadyAndSurveyRoundsChange_);
}
|
<|file_name|>jqt.activityIndicator.js<|end_file_name|><|fim▁begin|>/*
_/ _/_/ _/_/_/_/_/ _/
_/ _/ _/ _/_/ _/ _/ _/_/_/ _/_/_/
_/ _/ _/_/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/ _/ _/ _/ _/ _/ _/ _/ _/ _/
_/ _/_/ _/ _/ _/_/ _/_/_/ _/_/_/ _/ _/
_/
_/
Created by David Kaneda <http://www.davidkaneda.com>
Documentation and issue tracking on Google Code <http://code.google.com/p/jqtouch/>
Special thanks to Jonathan Stark <http://jonathanstark.com/>
and pinch/zoom <http://www.pinchzoom.com/><|fim▁hole|>
(c) 2009 by jQTouch project members.
See LICENSE.txt for license.
activityIndicator - Daniel J. Pinter - DataZombies
Based on http://starkravingcoder.blogspot.com/2007/09/canvas-loading-indicator.html
Object Properties (all properties are set in the canvas tag):
animating
is the object in motion? Use object methods to change - true/false - Default = false
barHeight
height of the bars in px - Default = 5
barWidth
width of the bars in px - Default = 2
color
uses canvas's style attribute to set the bar color - in rgb() - Default = 0, 0, 0 (black)
direction
the direction the object rotates - counterclockwise/clockwise - Default = clockwise
innerRadius
radius of the hole in the middle in px - Default = 5
numberOfBars
how many bars the object has - Default = 12
speed
how fast the object rotates - larger numbers are slower - Default = 50
xPos
x-position on canvas in px - Default = center of canvas
yPos
y-position on canvas in px - Default = middle of canvas
Object Methods:
start()
begins the object's rotation
stop()
ends the object's rotation
Object Instantiation:
var aiGreenStar = new activityIndicator($('#GreenStar'));
Bind Object to Events via jQuery:
$('#page1').bind('pageAnimationStart', function (e, data) {if (data.direction === 'in'){aiGreenStar.start();}});
$('#page').bind('pageAnimationEnd', function (e, data) {if (data.direction === 'out'){aiGreenStar.stop();}});
Canvas tag with Object's ID:
This displays an green asterisk-like (*) activityIndicator in the top left corner of a 100 x 250 canvas.
<canvas id="GreenStar" height="100" width="250" barHeight="10" barWidth="3" style="color:rgb(0,255,0);"
direction="counterclockwise" innerRadius="5" numberOfBars="6" speed="50" xPos="30" yPos="45"></canvas>
*/
function activityIndicator(canvas) {
var animating = false;
var barHeight = $(canvas).attr('barHeight') - 0;
var barWidth = $(canvas).attr('barWidth') - 0;
var color = $(canvas).css('color');
var context = $(canvas).get(0).getContext('2d');
var direction = $(canvas).attr('direction');
var innerRadius = $(canvas).attr('innerRadius') - 0;
var numberOfBars = $(canvas).attr('numberOfBars') - 0;
var speed = $(canvas).attr('speed') - 0;
var xPos = $(canvas).attr('xPos') - 0;
var yPos = $(canvas).attr('yPos') - 0;
var offset = 0;
if (isNaN(barHeight)) {barHeight = 5;}
if (isNaN(barWidth)) {barWidth = 2;}
var a = color.indexOf('(') + 1;
var b = a;
if (a !== -1) {
if (color.substr(0, 4) === 'rgb('){
b = color.lastIndexOf(')') - a;
} else if (color.substr(0, 5) === 'rgba(') {
b = color.lastIndexOf(',') - a;
}
color = b > a ? color.substr(a, b) + ', ' : '0, 0, 0, ';
} else {
color = '0, 0, 0, ';
}
switch (direction){
case 'counterclockwise':
direction = -1;
break;
case 'clockwise': default:
direction = 1;
break;
}
if (isNaN(innerRadius)) {innerRadius = 5;}
if (isNaN(numberOfBars)) {numberOfBars = 12;}
if (isNaN(speed)) {speed = 50;}
if (isNaN(xPos)) {xPos = $(canvas).attr('width') / 2;}
if (isNaN(yPos)) {yPos = $(canvas).attr('height') / 2;}
function clear() {context.clearRect(0, 0, context.canvas.clientWidth, context.canvas.clientHeight);};
function draw(offset) {
clear();
context.save();
context.translate(xPos, yPos);
for (var i = 0; i < numberOfBars; i++) {
var angle = 2 * ((offset + i) % numberOfBars) * Math.PI / numberOfBars;
context.save();
context.translate((innerRadius * Math.sin(-angle)), (innerRadius * Math.cos(-angle)));
context.rotate(angle);
context.fillStyle = 'rgba(' + color + (numberOfBars + 1 - i) / (numberOfBars + 1) + ')';
context.fillRect(-barWidth / 2, 0, barWidth, barHeight);
context.restore();
}
context.restore();
};
function animate() {
if (!animating) {return;};
offset = (offset + direction) % numberOfBars;
draw(offset);
setTimeout(animate, speed);
};
function start(){
animating = true;
animate();
};
function stop(){
animating = false;
clear();
};
return {
start: start,
stop: stop
};
};<|fim▁end|>
| |
<|file_name|>matching.py<|end_file_name|><|fim▁begin|>"""
********
Matching
********
"""
# Copyright (C) 2004-2015 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
# Copyright (C) 2011 by
# Nicholas Mancuso <[email protected]>
# All rights reserved.
# BSD license.
from itertools import repeat
__author__ = """\n""".join(['Joris van Rantwijk',
'Nicholas Mancuso ([email protected])'])
__all__ = ['max_weight_matching', 'maximal_matching']
def maximal_matching(G):
r"""Find a maximal cardinality matching in the graph.
A matching is a subset of edges in which no node occurs more than once.
The cardinality of a matching is the number of matched edges.
Parameters
----------
G : NetworkX graph
Undirected graph
Returns
-------
matching : set
A maximal matching of the graph.
Notes
-----
The algorithm greedily selects a maximal matching M of the graph G
(i.e. no superset of M exists). It runs in `O(|E|)` time.
"""
matching = set([])
edges = set([])
for u,v in G.edges():
# If the edge isn't covered, add it to the matching
# then remove neighborhood of u and v from consideration.
if (u,v) not in edges and (v,u) not in edges:
matching.add((u,v))
edges |= set(G.edges(u))
edges |= set(G.edges(v))
return matching
def max_weight_matching(G, maxcardinality=False):
"""Compute a maximum-weighted matching of G.
A matching is a subset of edges in which no node occurs more than once.
The cardinality of a matching is the number of matched edges.
The weight of a matching is the sum of the weights of its edges.
Parameters
----------
G : NetworkX graph
Undirected graph
maxcardinality: bool, optional
If maxcardinality is True, compute the maximum-cardinality matching
with maximum weight among all maximum-cardinality matchings.
Returns
-------
mate : dictionary
The matching is returned as a dictionary, mate, such that
mate[v] == w if node v is matched to node w. Unmatched nodes do not
occur as a key in mate.
Notes
------
If G has edges with 'weight' attribute the edge data are used as
weight values else the weights are assumed to be 1.
This function takes time O(number_of_nodes ** 3).
If all edge weights are integers, the algorithm uses only integer
computations. If floating point weights are used, the algorithm
could return a slightly suboptimal matching due to numeric
precision errors.
This method is based on the "blossom" method for finding augmenting
paths and the "primal-dual" method for finding a matching of maximum
weight, both methods invented by Jack Edmonds [1]_.
Bipartite graphs can also be matched using the functions present in
:mod:`networkx.algorithms.bipartite.matching`.
References
----------
.. [1] "Efficient Algorithms for Finding Maximum Matching in Graphs",
Zvi Galil, ACM Computing Surveys, 1986.
"""
#
# The algorithm is taken from "Efficient Algorithms for Finding Maximum
# Matching in Graphs" by Zvi Galil, ACM Computing Surveys, 1986.
# It is based on the "blossom" method for finding augmenting paths and
# the "primal-dual" method for finding a matching of maximum weight, both
# methods invented by Jack Edmonds.
#
# A C program for maximum weight matching by Ed Rothberg was used
# extensively to validate this new code.
#
# Many terms used in the code comments are explained in the paper
# by Galil. You will probably need the paper to make sense of this code.
#
class NoNode:
"""Dummy value which is different from any node."""
pass
class Blossom:
"""Representation of a non-trivial blossom or sub-blossom."""
__slots__ = [ 'childs', 'edges', 'mybestedges' ]
# b.childs is an ordered list of b's sub-blossoms, starting with
# the base and going round the blossom.
# b.edges is the list of b's connecting edges, such that
# b.edges[i] = (v, w) where v is a vertex in b.childs[i]
# and w is a vertex in b.childs[wrap(i+1)].
# If b is a top-level S-blossom,
# b.mybestedges is a list of least-slack edges to neighbouring
# S-blossoms, or None if no such list has been computed yet.
# This is used for efficient computation of delta3.
# Generate the blossom's leaf vertices.
def leaves(self):
for t in self.childs:
if isinstance(t, Blossom):
for v in t.leaves():
yield v
else:
yield t
# Get a list of vertices.
gnodes = list(G)
if not gnodes:
return { } # don't bother with empty graphs
# Find the maximum edge weight.
maxweight = 0
allinteger = True
for i,j,d in G.edges(data=True):
wt=d.get('weight',1)
if i != j and wt > maxweight:
maxweight = wt
allinteger = allinteger and (str(type(wt)).split("'")[1]
in ('int', 'long'))
# If v is a matched vertex, mate[v] is its partner vertex.
# If v is a single vertex, v does not occur as a key in mate.
# Initially all vertices are single; updated during augmentation.
mate = { }
# If b is a top-level blossom,
# label.get(b) is None if b is unlabeled (free),
# 1 if b is an S-blossom,
# 2 if b is a T-blossom.
# The label of a vertex is found by looking at the label of its top-level
# containing blossom.
# If v is a vertex inside a T-blossom, label[v] is 2 iff v is reachable
# from an S-vertex outside the blossom.
# Labels are assigned during a stage and reset after each augmentation.
label = { }
# If b is a labeled top-level blossom,
# labeledge[b] = (v, w) is the edge through which b obtained its label
# such that w is a vertex in b, or None if b's base vertex is single.
# If w is a vertex inside a T-blossom and label[w] == 2,
# labeledge[w] = (v, w) is an edge through which w is reachable from
# outside the blossom.
labeledge = { }
# If v is a vertex, inblossom[v] is the top-level blossom to which v
# belongs.
# If v is a top-level vertex, inblossom[v] == v since v is itself
# a (trivial) top-level blossom.
# Initially all vertices are top-level trivial blossoms.
inblossom = dict(zip(gnodes, gnodes))
# If b is a sub-blossom,
# blossomparent[b] is its immediate parent (sub-)blossom.
# If b is a top-level blossom, blossomparent[b] is None.
blossomparent = dict(zip(gnodes, repeat(None)))
# If b is a (sub-)blossom,
# blossombase[b] is its base VERTEX (i.e. recursive sub-blossom).
blossombase = dict(zip(gnodes, gnodes))
# If w is a free vertex (or an unreached vertex inside a T-blossom),
# bestedge[w] = (v, w) is the least-slack edge from an S-vertex,
# or None if there is no such edge.
# If b is a (possibly trivial) top-level S-blossom,
# bestedge[b] = (v, w) is the least-slack edge to a different S-blossom
# (v inside b), or None if there is no such edge.
# This is used for efficient computation of delta2 and delta3.
bestedge = { }
# If v is a vertex,
# dualvar[v] = 2 * u(v) where u(v) is the v's variable in the dual
# optimization problem (if all edge weights are integers, multiplication
# by two ensures that all values remain integers throughout the algorithm).
# Initially, u(v) = maxweight / 2.
dualvar = dict(zip(gnodes, repeat(maxweight)))
# If b is a non-trivial blossom,
# blossomdual[b] = z(b) where z(b) is b's variable in the dual
# optimization problem.
blossomdual = { }
# If (v, w) in allowedge or (w, v) in allowedg, then the edge
# (v, w) is known to have zero slack in the optimization problem;
# otherwise the edge may or may not have zero slack.
allowedge = { }
# Queue of newly discovered S-vertices.
queue = [ ]
# Return 2 * slack of edge (v, w) (does not work inside blossoms).
def slack(v, w):
return dualvar[v] + dualvar[w] - 2 * G[v][w].get('weight',1)
# Assign label t to the top-level blossom containing vertex w,
# coming through an edge from vertex v.
def assignLabel(w, t, v):
b = inblossom[w]
assert label.get(w) is None and label.get(b) is None
label[w] = label[b] = t
if v is not None:
labeledge[w] = labeledge[b] = (v, w)
else:
labeledge[w] = labeledge[b] = None
bestedge[w] = bestedge[b] = None
if t == 1:
# b became an S-vertex/blossom; add it(s vertices) to the queue.
if isinstance(b, Blossom):
queue.extend(b.leaves())
else:
queue.append(b)
elif t == 2:
# b became a T-vertex/blossom; assign label S to its mate.
# (If b is a non-trivial blossom, its base is the only vertex
# with an external mate.)
base = blossombase[b]
assignLabel(mate[base], 1, base)
# Trace back from vertices v and w to discover either a new blossom
# or an augmenting path. Return the base vertex of the new blossom,
# or NoNode if an augmenting path was found.
def scanBlossom(v, w):
# Trace back from v and w, placing breadcrumbs as we go.
path = [ ]
base = NoNode
while v is not NoNode:
# Look for a breadcrumb in v's blossom or put a new breadcrumb.
b = inblossom[v]
if label[b] & 4:
base = blossombase[b]
break
assert label[b] == 1
path.append(b)
label[b] = 5
# Trace one step back.
if labeledge[b] is None:
# The base of blossom b is single; stop tracing this path.
assert blossombase[b] not in mate
v = NoNode
else:
assert labeledge[b][0] == mate[blossombase[b]]
v = labeledge[b][0]
b = inblossom[v]
assert label[b] == 2
# b is a T-blossom; trace one more step back.
v = labeledge[b][0]
# Swap v and w so that we alternate between both paths.
if w is not NoNode:
v, w = w, v
# Remove breadcrumbs.
for b in path:
label[b] = 1
# Return base vertex, if we found one.
return base
# Construct a new blossom with given base, through S-vertices v and w.
# Label the new blossom as S; set its dual variable to zero;
# relabel its T-vertices to S and add them to the queue.
def addBlossom(base, v, w):
bb = inblossom[base]
bv = inblossom[v]
bw = inblossom[w]
# Create blossom.
b = Blossom()
blossombase[b] = base
blossomparent[b] = None
blossomparent[bb] = b
# Make list of sub-blossoms and their interconnecting edge endpoints.
b.childs = path = [ ]
b.edges = edgs = [ (v, w) ]
# Trace back from v to base.
while bv != bb:
# Add bv to the new blossom.
blossomparent[bv] = b
path.append(bv)
edgs.append(labeledge[bv])
assert label[bv] == 2 or (label[bv] == 1 and labeledge[bv][0] == mate[blossombase[bv]])
# Trace one step back.
v = labeledge[bv][0]
bv = inblossom[v]
# Add base sub-blossom; reverse lists.
path.append(bb)
path.reverse()
edgs.reverse()
# Trace back from w to base.
while bw != bb:
# Add bw to the new blossom.
blossomparent[bw] = b
path.append(bw)
edgs.append((labeledge[bw][1], labeledge[bw][0]))
assert label[bw] == 2 or (label[bw] == 1 and labeledge[bw][0] == mate[blossombase[bw]])
# Trace one step back.
w = labeledge[bw][0]
bw = inblossom[w]
# Set label to S.
assert label[bb] == 1
label[b] = 1
labeledge[b] = labeledge[bb]
# Set dual variable to zero.
blossomdual[b] = 0
# Relabel vertices.
for v in b.leaves():
if label[inblossom[v]] == 2:
# This T-vertex now turns into an S-vertex because it becomes
# part of an S-blossom; add it to the queue.
queue.append(v)
inblossom[v] = b
# Compute b.mybestedges.
bestedgeto = { }
for bv in path:
if isinstance(bv, Blossom):
if bv.mybestedges is not None:
# Walk this subblossom's least-slack edges.
nblist = bv.mybestedges
# The sub-blossom won't need this data again.
bv.mybestedges = None
else:
# This subblossom does not have a list of least-slack
# edges; get the information from the vertices.
nblist = [ (v, w)
for v in bv.leaves()
for w in G.neighbors(v)
if v != w ]
else:
nblist = [ (bv, w)
for w in G.neighbors(bv)
if bv != w ]
for k in nblist:
(i, j) = k
if inblossom[j] == b:
i, j = j, i
bj = inblossom[j]
if (bj != b and label.get(bj) == 1 and
((bj not in bestedgeto) or
slack(i, j) < slack(*bestedgeto[bj]))):
bestedgeto[bj] = k
# Forget about least-slack edge of the subblossom.
bestedge[bv] = None
b.mybestedges = list(bestedgeto.values())
# Select bestedge[b].
mybestedge = None
bestedge[b] = None
for k in b.mybestedges:
kslack = slack(*k)
if mybestedge is None or kslack < mybestslack:
mybestedge = k
mybestslack = kslack
bestedge[b] = mybestedge
# Expand the given top-level blossom.
def expandBlossom(b, endstage):
# Convert sub-blossoms into top-level blossoms.
for s in b.childs:
blossomparent[s] = None
if isinstance(s, Blossom):
if endstage and blossomdual[s] == 0:
# Recursively expand this sub-blossom.
expandBlossom(s, endstage)
else:
for v in s.leaves():
inblossom[v] = s
else:
inblossom[s] = s
# If we expand a T-blossom during a stage, its sub-blossoms must be
# relabeled.
if (not endstage) and label.get(b) == 2:
# Start at the sub-blossom through which the expanding
# blossom obtained its label, and relabel sub-blossoms untili
# we reach the base.
# Figure out through which sub-blossom the expanding blossom
# obtained its label initially.
entrychild = inblossom[labeledge[b][1]]
# Decide in which direction we will go round the blossom.
j = b.childs.index(entrychild)
if j & 1:
# Start index is odd; go forward and wrap.
j -= len(b.childs)
jstep = 1
else:
# Start index is even; go backward.
jstep = -1
# Move along the blossom until we get to the base.
v, w = labeledge[b]
while j != 0:
# Relabel the T-sub-blossom.
if jstep == 1:
p, q = b.edges[j]
else:
q, p = b.edges[j-1]
label[w] = None
label[q] = None
assignLabel(w, 2, v)
# Step to the next S-sub-blossom and note its forward edge.
allowedge[(p, q)] = allowedge[(q, p)] = True
j += jstep
if jstep == 1:
v, w = b.edges[j]
else:
w, v = b.edges[j-1]
# Step to the next T-sub-blossom.
allowedge[(v, w)] = allowedge[(w, v)] = True
j += jstep
# Relabel the base T-sub-blossom WITHOUT stepping through to
# its mate (so don't call assignLabel).
bw = b.childs[j]
label[w] = label[bw] = 2
labeledge[w] = labeledge[bw] = (v, w)
bestedge[bw] = None
# Continue along the blossom until we get back to entrychild.
j += jstep
while b.childs[j] != entrychild:
# Examine the vertices of the sub-blossom to see whether
# it is reachable from a neighbouring S-vertex outside the
# expanding blossom.
bv = b.childs[j]
if label.get(bv) == 1:
# This sub-blossom just got label S through one of its
# neighbours; leave it be.
j += jstep
continue
if isinstance(bv, Blossom):
for v in bv.leaves():
if label.get(v):
break
else:
v = bv
# If the sub-blossom contains a reachable vertex, assign
# label T to the sub-blossom.
if label.get(v):
assert label[v] == 2
assert inblossom[v] == bv
label[v] = None
label[mate[blossombase[bv]]] = None
assignLabel(v, 2, labeledge[v][0])
j += jstep
# Remove the expanded blossom entirely.
label.pop(b, None)
labeledge.pop(b, None)
bestedge.pop(b, None)
del blossomparent[b]
del blossombase[b]
del blossomdual[b]
# Swap matched/unmatched edges over an alternating path through blossom b
# between vertex v and the base vertex. Keep blossom bookkeeping consistent.
def augmentBlossom(b, v):
# Bubble up through the blossom tree from vertex v to an immediate
# sub-blossom of b.
t = v
while blossomparent[t] != b:
t = blossomparent[t]
# Recursively deal with the first sub-blossom.
if isinstance(t, Blossom):
augmentBlossom(t, v)
# Decide in which direction we will go round the blossom.
i = j = b.childs.index(t)
if i & 1:
# Start index is odd; go forward and wrap.
j -= len(b.childs)
jstep = 1
else:
# Start index is even; go backward.
jstep = -1
# Move along the blossom until we get to the base.
while j != 0:
# Step to the next sub-blossom and augment it recursively.
j += jstep
t = b.childs[j]
if jstep == 1:
w, x = b.edges[j]
else:
x, w = b.edges[j-1]
if isinstance(t, Blossom):
augmentBlossom(t, w)
# Step to the next sub-blossom and augment it recursively.
j += jstep
t = b.childs[j]
if isinstance(t, Blossom):
augmentBlossom(t, x)
# Match the edge connecting those sub-blossoms.
mate[w] = x
mate[x] = w
# Rotate the list of sub-blossoms to put the new base at the front.
b.childs = b.childs[i:] + b.childs[:i]
b.edges = b.edges[i:] + b.edges[:i]
blossombase[b] = blossombase[b.childs[0]]
assert blossombase[b] == v
# Swap matched/unmatched edges over an alternating path between two
# single vertices. The augmenting path runs through S-vertices v and w.
def augmentMatching(v, w):
for (s, j) in ((v, w), (w, v)):
# Match vertex s to vertex j. Then trace back from s
# until we find a single vertex, swapping matched and unmatched
# edges as we go.
while 1:
bs = inblossom[s]
assert label[bs] == 1
assert (labeledge[bs] is None and blossombase[bs] not in mate) or (labeledge[bs][0] == mate[blossombase[bs]])
# Augment through the S-blossom from s to base.
if isinstance(bs, Blossom):
augmentBlossom(bs, s)
# Update mate[s]
mate[s] = j
# Trace one step back.
if labeledge[bs] is None:
# Reached single vertex; stop.
break
t = labeledge[bs][0]
bt = inblossom[t]
assert label[bt] == 2
# Trace one more step back.
s, j = labeledge[bt]
# Augment through the T-blossom from j to base.
assert blossombase[bt] == t
if isinstance(bt, Blossom):
augmentBlossom(bt, j)
# Update mate[j]
mate[j] = s
# Verify that the optimum solution has been reached.
def verifyOptimum():
if maxcardinality:
# Vertices may have negative dual;
# find a constant non-negative number to add to all vertex duals.
vdualoffset = max(0, -min(dualvar.values()))
else:
vdualoffset = 0
# 0. all dual variables are non-negative
assert min(dualvar.values()) + vdualoffset >= 0
assert len(blossomdual) == 0 or min(blossomdual.values()) >= 0
# 0. all edges have non-negative slack and
# 1. all matched edges have zero slack;
for i,j,d in G.edges(data=True):
wt=d.get('weight',1)
if i == j:
continue # ignore self-loops
s = dualvar[i] + dualvar[j] - 2 * wt
iblossoms = [ i ]
jblossoms = [ j ]
while blossomparent[iblossoms[-1]] is not None:
iblossoms.append(blossomparent[iblossoms[-1]])
while blossomparent[jblossoms[-1]] is not None:
jblossoms.append(blossomparent[jblossoms[-1]])
iblossoms.reverse()
jblossoms.reverse()
for (bi, bj) in zip(iblossoms, jblossoms):
if bi != bj:
break
s += 2 * blossomdual[bi]
assert s >= 0
if mate.get(i) == j or mate.get(j) == i:
assert mate[i] == j and mate[j] == i
assert s == 0
# 2. all single vertices have zero dual value;
for v in gnodes:
assert (v in mate) or dualvar[v] + vdualoffset == 0
# 3. all blossoms with positive dual value are full.
for b in blossomdual:
if blossomdual[b] > 0:
assert len(b.edges) % 2 == 1
for (i, j) in b.edges[1::2]:
assert mate[i] == j and mate[j] == i
# Ok.
# Main loop: continue until no further improvement is possible.
while 1:
# Each iteration of this loop is a "stage".
# A stage finds an augmenting path and uses that to improve
# the matching.
# Remove labels from top-level blossoms/vertices.
label.clear()
labeledge.clear()
# Forget all about least-slack edges.
bestedge.clear()
for b in blossomdual:
b.mybestedges = None
# Loss of labeling means that we can not be sure that currently
# allowable edges remain allowable througout this stage.
allowedge.clear()
# Make queue empty.
queue[:] = [ ]
# Label single blossoms/vertices with S and put them in the queue.
for v in gnodes:
if (v not in mate) and label.get(inblossom[v]) is None:
assignLabel(v, 1, None)
# Loop until we succeed in augmenting the matching.
augmented = 0
while 1:
# Each iteration of this loop is a "substage".
# A substage tries to find an augmenting path;
# if found, the path is used to improve the matching and
# the stage ends. If there is no augmenting path, the
# primal-dual method is used to pump some slack out of
# the dual variables.
# Continue labeling until all vertices which are reachable
# through an alternating path have got a label.
while queue and not augmented:
# Take an S vertex from the queue.
v = queue.pop()
assert label[inblossom[v]] == 1
# Scan its neighbours:
for w in G.neighbors(v):
if w == v:
continue # ignore self-loops
# w is a neighbour to v
bv = inblossom[v]
bw = inblossom[w]
if bv == bw:
# this edge is internal to a blossom; ignore it
continue
if (v, w) not in allowedge:
kslack = slack(v, w)
if kslack <= 0:
# edge k has zero slack => it is allowable
allowedge[(v, w)] = allowedge[(w, v)] = True
if (v, w) in allowedge:
if label.get(bw) is None:
# (C1) w is a free vertex;
# label w with T and label its mate with S (R12).
assignLabel(w, 2, v)
elif label.get(bw) == 1:
# (C2) w is an S-vertex (not in the same blossom);
# follow back-links to discover either an
# augmenting path or a new blossom.
base = scanBlossom(v, w)
if base is not NoNode:
# Found a new blossom; add it to the blossom
# bookkeeping and turn it into an S-blossom.
addBlossom(base, v, w)
else:
# Found an augmenting path; augment the
# matching and end this stage.
augmentMatching(v, w)
augmented = 1
break
elif label.get(w) is None:
# w is inside a T-blossom, but w itself has not
# yet been reached from outside the blossom;
# mark it as reached (we need this to relabel
# during T-blossom expansion).
assert label[bw] == 2
label[w] = 2
labeledge[w] = (v, w)
elif label.get(bw) == 1:
# keep track of the least-slack non-allowable edge to
# a different S-blossom.
if bestedge.get(bv) is None or kslack < slack(*bestedge[bv]):
bestedge[bv] = (v, w)
elif label.get(w) is None:
# w is a free vertex (or an unreached vertex inside
# a T-blossom) but we can not reach it yet;
# keep track of the least-slack edge that reaches w.
if bestedge.get(w) is None or kslack < slack(*bestedge[w]):
bestedge[w] = (v, w)
if augmented:
break
# There is no augmenting path under these constraints;
# compute delta and reduce slack in the optimization problem.
# (Note that our vertex dual variables, edge slacks and delta's
# are pre-multiplied by two.)
deltatype = -1
delta = deltaedge = deltablossom = None<|fim▁hole|> delta = min(dualvar.values())
# Compute delta2: the minimum slack on any edge between
# an S-vertex and a free vertex.
for v in G.nodes():
if label.get(inblossom[v]) is None and bestedge.get(v) is not None:
d = slack(*bestedge[v])
if deltatype == -1 or d < delta:
delta = d
deltatype = 2
deltaedge = bestedge[v]
# Compute delta3: half the minimum slack on any edge between
# a pair of S-blossoms.
for b in blossomparent:
if ( blossomparent[b] is None and label.get(b) == 1 and
bestedge.get(b) is not None ):
kslack = slack(*bestedge[b])
if allinteger:
assert (kslack % 2) == 0
d = kslack // 2
else:
d = kslack / 2.0
if deltatype == -1 or d < delta:
delta = d
deltatype = 3
deltaedge = bestedge[b]
# Compute delta4: minimum z variable of any T-blossom.
for b in blossomdual:
if ( blossomparent[b] is None and label.get(b) == 2 and
(deltatype == -1 or blossomdual[b] < delta) ):
delta = blossomdual[b]
deltatype = 4
deltablossom = b
if deltatype == -1:
# No further improvement possible; max-cardinality optimum
# reached. Do a final delta update to make the optimum
# verifyable.
assert maxcardinality
deltatype = 1
delta = max(0, min(dualvar.values()))
# Update dual variables according to delta.
for v in gnodes:
if label.get(inblossom[v]) == 1:
# S-vertex: 2*u = 2*u - 2*delta
dualvar[v] -= delta
elif label.get(inblossom[v]) == 2:
# T-vertex: 2*u = 2*u + 2*delta
dualvar[v] += delta
for b in blossomdual:
if blossomparent[b] is None:
if label.get(b) == 1:
# top-level S-blossom: z = z + 2*delta
blossomdual[b] += delta
elif label.get(b) == 2:
# top-level T-blossom: z = z - 2*delta
blossomdual[b] -= delta
# Take action at the point where minimum delta occurred.
if deltatype == 1:
# No further improvement possible; optimum reached.
break
elif deltatype == 2:
# Use the least-slack edge to continue the search.
(v, w) = deltaedge
assert label[inblossom[v]] == 1
allowedge[(v, w)] = allowedge[(w, v)] = True
queue.append(v)
elif deltatype == 3:
# Use the least-slack edge to continue the search.
(v, w) = deltaedge
allowedge[(v, w)] = allowedge[(w, v)] = True
assert label[inblossom[v]] == 1
queue.append(v)
elif deltatype == 4:
# Expand the least-z blossom.
expandBlossom(deltablossom, False)
# End of a this substage.
# Paranoia check that the matching is symmetric.
for v in mate:
assert mate[mate[v]] == v
# Stop when no more augmenting path can be found.
if not augmented:
break
# End of a stage; expand all S-blossoms which have zero dual.
for b in list(blossomdual.keys()):
if b not in blossomdual:
continue # already expanded
if ( blossomparent[b] is None and label.get(b) == 1 and
blossomdual[b] == 0 ):
expandBlossom(b, True)
# Verify that we reached the optimum solution (only for integer weights).
if allinteger:
verifyOptimum()
return mate<|fim▁end|>
|
# Compute delta1: the minumum value of any vertex dual.
if not maxcardinality:
deltatype = 1
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
CSRF_ENABLED = True
ENV = os.environ.get('ENVIRONMENT', 'dev')
MODEL_HASH = os.environ.get('MODEL_HASH')
PROJECT_PATH = os.path.realpath(os.path.dirname(__file__))
ROOT_PATH = BASE_DIR = os.path.join(os.path.dirname(__file__), '..')
SECRET_KEY = os.environ.get('SECRET_KEY')
STATIC_FOLDER = os.path.join(ROOT_PATH, 'static')
TEMPLATE_FOLDER = os.path.join(ROOT_PATH, 'templates')
SQLALCHEMY_MIGRATE_REPO = os.path.join(ROOT_PATH, 'db_repository')
<|fim▁hole|> APP_BASE_LINK = 'http://localhost:{}'.format(PORT)
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/dev_db'
else:
APP_BASE_LINK = os.environ.get('APP_BASE_LINK')
DEBUG = False
SQLALCHEMY_DATABASE_URI = 'postgresql://{}:{}@{}:{}/{}'.format(os.environ.get('POSTGRES_ENV_POSTGRES_USER'),
os.environ.get('POSTGRES_ENV_POSTGRES_PASSWORD'),
os.environ.get('POSTGRES_PORT_5432_TCP_ADDR'),
os.environ.get('POSTGRES_PORT_5432_TCP_PORT'),
os.environ.get('POSTGRES_ENV_POSTGRESQL_DB'))<|fim▁end|>
|
if ENV == 'dev':
PORT = 7070
|
<|file_name|>search_filter.py<|end_file_name|><|fim▁begin|>from zou.app.models.search_filter import SearchFilter
from .base import BaseModelResource, BaseModelsResource
class SearchFiltersResource(BaseModelsResource):<|fim▁hole|>class SearchFilterResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, SearchFilter)<|fim▁end|>
|
def __init__(self):
BaseModelsResource.__init__(self, SearchFilter)
|
<|file_name|>django_wsgi_production.py<|end_file_name|><|fim▁begin|>import os, sys
PATH = os.path.join(os.path.dirname(__file__), '..')
sys.path += [
os.path.join(PATH, 'project/apps'),
os.path.join(PATH, 'project'),
os.path.join(PATH, '..'), <|fim▁hole|><|fim▁end|>
|
PATH]
os.environ['DJANGO_SETTINGS_MODULE'] = 'project.settings.production'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>"""
Forms and validation code for user registration.
"""
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from registration.models import RegistrationProfile
# I put this on all required fields, because it's easier to pick up
# on them with CSS or JavaScript if they have a class of "required"
# in the HTML. Your mileage may vary. If/when Django ticket #3515
# lands in trunk, this will no longer be necessary.
attrs_dict = { 'class': 'required' }
class RegistrationForm(forms.Form):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` which accepts the ``profile_callback`` keyword
argument and passes it through to
``RegistrationProfile.objects.create_inactive_user()``.
"""
username = forms.RegexField(regex=r'^\w+$',
max_length=30,
widget=forms.TextInput(attrs=attrs_dict),
label=_(u'username'))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=75)),
label=_(u'email address'))
password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password'))
password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password (again)'))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
try:
user = User.objects.get(username__iexact=self.cleaned_data['username'])
except User.DoesNotExist:
return self.cleaned_data['username']<|fim▁hole|> """
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_(u'You must type the same password each time'))
return self.cleaned_data
def save(self, request, profile_callback=None):
"""
Create the new ``User`` and ``RegistrationProfile``, and
returns the ``User``.
This is essentially a light wrapper around
``RegistrationProfile.objects.create_inactive_user()``,
feeding it the form data and a profile callback (see the
documentation on ``create_inactive_user()`` for details) if
supplied.
"""
new_user = RegistrationProfile.objects.create_inactive_user(request,
username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
profile_callback=profile_callback)
return new_user
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput(attrs=attrs_dict),
label=_(u'I have read and agree to the Terms of Service'))
def clean_tos(self):
"""
Validate that the user accepted the Terms of Service.
"""
if self.cleaned_data.get('tos', False):
return self.cleaned_data['tos']
raise forms.ValidationError(_(u'You must agree to the terms to register'))
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
if User.objects.filter(email__iexact=self.cleaned_data['email']):
raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.'))
return self.cleaned_data['email']
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.'))
return self.cleaned_data['email']<|fim▁end|>
|
raise forms.ValidationError(_(u'This username is already taken. Please choose another.'))
def clean(self):
|
<|file_name|>keys.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
__all__ = (
'Key',
'Keys',
)
class Key(object):
def __init__(self, name):
#: Descriptive way of writing keys in configuration files. e.g. <C-A>
#: for ``Control-A``.
self.name = name
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.name)
class Keys(object):
Escape = Key('<Escape>')
ControlA = Key('<C-A>')<|fim▁hole|> ControlF = Key('<C-F>')
ControlG = Key('<C-G>')
ControlH = Key('<C-H>')
ControlI = Key('<C-I>') # Tab
ControlJ = Key('<C-J>') # Enter
ControlK = Key('<C-K>')
ControlL = Key('<C-L>')
ControlM = Key('<C-M>') # Enter
ControlN = Key('<C-N>')
ControlO = Key('<C-O>')
ControlP = Key('<C-P>')
ControlQ = Key('<C-Q>')
ControlR = Key('<C-R>')
ControlS = Key('<C-S>')
ControlT = Key('<C-T>')
ControlU = Key('<C-U>')
ControlV = Key('<C-V>')
ControlW = Key('<C-W>')
ControlX = Key('<C-X>')
ControlY = Key('<C-Y>')
ControlZ = Key('<C-Z>')
ControlSpace = Key('<C-Space>')
ControlBackslash = Key('<C-Backslash>')
ControlSquareClose = Key('<C-SquareClose>')
ControlCircumflex = Key('<C-Circumflex>')
ControlUnderscore = Key('<C-Underscore>')
ControlLeft = Key('<C-Left>')
ControlRight = Key('<C-Right>')
ControlUp = Key('<C-Up>')
ControlDown = Key('<C-Down>')
Up = Key('<Up>')
Down = Key('<Down>')
Right = Key('<Right>')
Left = Key('<Left>')
Home = Key('<Home>')
End = Key('<End>')
Delete = Key('<Delete>')
ShiftDelete = Key('<ShiftDelete>')
PageUp = Key('<PageUp>')
PageDown = Key('<PageDown>')
BackTab = Key('<BackTab>') # shift + tab
Tab = ControlI
Backspace = ControlH
F1 = Key('<F1>')
F2 = Key('<F2>')
F3 = Key('<F3>')
F4 = Key('<F4>')
F5 = Key('<F5>')
F6 = Key('<F6>')
F7 = Key('<F7>')
F8 = Key('<F8>')
F9 = Key('<F9>')
F10 = Key('<F10>')
F11 = Key('<F11>')
F12 = Key('<F12>')
F13 = Key('<F13>')
F14 = Key('<F14>')
F15 = Key('<F15>')
F16 = Key('<F16>')
F17 = Key('<F17>')
F18 = Key('<F18>')
F19 = Key('<F19>')
F20 = Key('<F20>')
# Matches any key.
Any = Key('<Any>')
# Special
CPRResponse = Key('<Cursor-Position-Response>')<|fim▁end|>
|
ControlB = Key('<C-B>')
ControlC = Key('<C-C>')
ControlD = Key('<C-D>')
ControlE = Key('<C-E>')
|
<|file_name|>DataFrame.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2013 Jad
*
* This file is part of Jad.
* Jad is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Jad is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Jad. If not, see <http://www.gnu.org/licenses/>.
*/
package de.fhffm.jad.demo.jad;
import java.util.ArrayList;
import de.fhffm.jad.data.DataWrapper;
import de.fhffm.jad.data.EInputFields;
import de.fhffm.jad.data.IDataFieldEnum;
/**
* This class synchronizes the access to our Data.Frame
* Added Observations are stored in a local ArrayList.
* Use 'write' to send everything to GNU R.
* @author Denis Hock
*/
public class DataFrame {
private static DataWrapper dw = null;
private static ArrayList<String[]> rows = new ArrayList<>();
/**
* @return Singleton Instance of the GNU R Data.Frame
*/
public static DataWrapper getDataFrame(){
if (dw == null){
//Create the data.frame for GNU R:
dw = new DataWrapper("data");
clear();
}
return dw;
}
/**
* Delete the old observations and send all new observations to Gnu R
* @return
*/
public synchronized static boolean write(){<|fim▁hole|>
//Clear the R-Data.Frame
clear();
//Send all new Observations to Gnu R
for(String[] row : rows)
dw.addObservation(row);
//Clear the local ArrayList
rows.clear();
return true;
}
/**
* These Observations are locally stored and wait for the write() command
* @param row
*/
public synchronized static void add(String[] row){
//Store everything in an ArrayList
rows.add(row);
}
/**
* Clear local ArrayList and GNU R Data.Frame
*/
private static void clear(){
ArrayList<IDataFieldEnum> fields = new ArrayList<IDataFieldEnum>();
fields.add(EInputFields.ipsrc);
fields.add(EInputFields.tcpdstport);
fields.add(EInputFields.framelen);
dw.createEmptyDataFrame(fields);
}
}<|fim▁end|>
|
if (rows.size() < 1){
return false;
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
impl super::OBR {
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
}
#[doc = r" Value of the field"]
pub struct OPTERRR {
bits: bool,
}
impl OPTERRR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RDPRTR {
bits: u8,
}
impl RDPRTR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct WDG_SWR {
bits: bool,
}
impl WDG_SWR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NRST_STOPR {
bits: bool,
}
impl NRST_STOPR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NRST_STDBYR {
bits: bool,
}
impl NRST_STDBYR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NBOOT0R {
bits: bool,
}
impl NBOOT0R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct NBOOT1R {
bits: bool,
}
impl NBOOT1R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct VDDA_MONITORR {
bits: bool,
}
impl VDDA_MONITORR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct RAM_PARITY_CHECKR {
bits: bool,
}
impl RAM_PARITY_CHECKR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct BOOT_SELR {
bits: bool,
}
impl BOOT_SELR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline(always)]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline(always)]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct DATA0R {
bits: u8,
}
impl DATA0R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct DATA1R {
bits: u8,
}
impl DATA1R {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bit 0 - Option byte error"]
#[inline(always)]
pub fn opterr(&self) -> OPTERRR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) != 0
};
OPTERRR { bits }
}
#[doc = "Bits 1:2 - Read protection level status"]
#[inline(always)]
pub fn rdprt(&self) -> RDPRTR {
let bits = {
const MASK: u8 = 3;
const OFFSET: u8 = 1;
((self.bits >> OFFSET) & MASK as u32) as u8
};
RDPRTR { bits }
}
#[doc = "Bit 8 - WDG_SW"]
#[inline(always)]
pub fn wdg_sw(&self) -> WDG_SWR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 8;
((self.bits >> OFFSET) & MASK as u32) != 0<|fim▁hole|> WDG_SWR { bits }
}
#[doc = "Bit 9 - nRST_STOP"]
#[inline(always)]
pub fn n_rst_stop(&self) -> NRST_STOPR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 9;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NRST_STOPR { bits }
}
#[doc = "Bit 10 - nRST_STDBY"]
#[inline(always)]
pub fn n_rst_stdby(&self) -> NRST_STDBYR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 10;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NRST_STDBYR { bits }
}
#[doc = "Bit 11 - nBOOT0"]
#[inline(always)]
pub fn n_boot0(&self) -> NBOOT0R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 11;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NBOOT0R { bits }
}
#[doc = "Bit 12 - BOOT1"]
#[inline(always)]
pub fn n_boot1(&self) -> NBOOT1R {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) != 0
};
NBOOT1R { bits }
}
#[doc = "Bit 13 - VDDA_MONITOR"]
#[inline(always)]
pub fn vdda_monitor(&self) -> VDDA_MONITORR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 13;
((self.bits >> OFFSET) & MASK as u32) != 0
};
VDDA_MONITORR { bits }
}
#[doc = "Bit 14 - RAM_PARITY_CHECK"]
#[inline(always)]
pub fn ram_parity_check(&self) -> RAM_PARITY_CHECKR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 14;
((self.bits >> OFFSET) & MASK as u32) != 0
};
RAM_PARITY_CHECKR { bits }
}
#[doc = "Bit 15 - BOOT_SEL"]
#[inline(always)]
pub fn boot_sel(&self) -> BOOT_SELR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 15;
((self.bits >> OFFSET) & MASK as u32) != 0
};
BOOT_SELR { bits }
}
#[doc = "Bits 16:23 - Data0"]
#[inline(always)]
pub fn data0(&self) -> DATA0R {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
DATA0R { bits }
}
#[doc = "Bits 24:31 - Data1"]
#[inline(always)]
pub fn data1(&self) -> DATA1R {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
DATA1R { bits }
}
}<|fim▁end|>
|
};
|
<|file_name|>ScrKeyboard.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2010 TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
import gettext
_ = gettext.translation('yali', fallback=True).ugettext
from PyQt5.Qt import QWidget, pyqtSignal, QVariant
import yali.util
import yali.localedata
import yali.postinstall
import yali.context as ctx
from yali.gui import ScreenWidget
from yali.gui.Ui.keyboardwidget import Ui_KeyboardWidget
##
# Keyboard setup screen
class Widget(QWidget, ScreenWidget):
name = "keyboardSetup"
def __init__(self):
QWidget.__init__(self)
self.ui = Ui_KeyboardWidget()
self.ui.setupUi(self)
index = 0 # comboBox.addItem doesn't increase the currentIndex
self.default_layout_index = None
locales = sorted([(country, data) for country, data in yali.localedata.locales.items()])
for country, data in locales:
if data["xkbvariant"]:
i = 0
for variant in data["xkbvariant"]:
_d = dict(data)
_d["xkbvariant"] = variant[0]
_d["name"] = variant[1]
_d["consolekeymap"] = data["consolekeymap"][i]
self.ui.keyboard_list.addItem(_d["name"], QVariant(_d))
i += 1
else:
self.ui.keyboard_list.addItem(data["name"], QVariant(data))
if ctx.consts.lang == country:
if ctx.consts.lang == "tr":
self.default_layout_index = index + 1
else:
self.default_layout_index = index
index += 1
self.ui.keyboard_list.setCurrentIndex(self.default_layout_index)
self.ui.keyboard_list.currentIndexChanged[int].connect(self.slotLayoutChanged)
def shown(self):
self.slotLayoutChanged()
def slotLayoutChanged(self):
index = self.ui.keyboard_list.currentIndex()
keymap = self.ui.keyboard_list.itemData(index)#.toMap()
# Gökmen's converter
keymap = dict(map(lambda x: (str(x[0]), unicode(x[1])), keymap.iteritems()))
ctx.installData.keyData = keymap
ctx.interface.informationWindow.hide()
if "," in keymap["xkblayout"]:
message = _("Use Alt-Shift to toggle between alternative keyboard layouts")
ctx.interface.informationWindow.update(message, type="warning")
else:
ctx.interface.informationWindow.hide()
yali.util.setKeymap(keymap["xkblayout"], keymap["xkbvariant"])<|fim▁hole|> ctx.logger.debug("Selected keymap is : %s" % ctx.installData.keyData["name"])
return True<|fim▁end|>
|
def execute(self):
ctx.interface.informationWindow.hide()
|
<|file_name|>path_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package helpers
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"runtime"
"strconv"
"strings"
"testing"
"time"
"github.com/gohugoio/hugo/langs"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/hugofs"
"github.com/spf13/afero"
)
func TestMakePath(t *testing.T) {
c := qt.New(t)
tests := []struct {
input string
expected string
removeAccents bool
}{
{"dot.slash/backslash\\underscore_pound#plus+hyphen-", "dot.slash/backslash\\underscore_pound#plus+hyphen-", true},
{"abcXYZ0123456789", "abcXYZ0123456789", true},
{"%20 %2", "%20-2", true},
{"foo- bar", "foo-bar", true},
{" Foo bar ", "Foo-bar", true},
{"Foo.Bar/foo_Bar-Foo", "Foo.Bar/foo_Bar-Foo", true},
{"fOO,bar:foobAR", "fOObarfoobAR", true},
{"FOo/BaR.html", "FOo/BaR.html", true},
{"трям/трям", "трям/трям", true},
{"은행", "은행", true},
{"Банковский кассир", "Банковскии-кассир", true},
// Issue #1488
{"संस्कृत", "संस्कृत", false},
{"a%C3%B1ame", "a%C3%B1ame", false}, // Issue #1292
{"this+is+a+test", "this+is+a+test", false}, // Issue #1290
{"~foo", "~foo", false}, // Issue #2177
{"foo--bar", "foo--bar", true}, // Issue #7288
}
for _, test := range tests {
v := newTestCfg()
v.Set("removePathAccents", test.removeAccents)
l := langs.NewDefaultLanguage(v)
p, err := NewPathSpec(hugofs.NewMem(v), l, nil)
c.Assert(err, qt.IsNil)
output := p.MakePath(test.input)
if output != test.expected {
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
}
}
}
func TestMakePathSanitized(t *testing.T) {
v := newTestCfg()
p, _ := NewPathSpec(hugofs.NewMem(v), v, nil)
tests := []struct {
input string
expected string
}{
{" FOO bar ", "foo-bar"},
{"Foo.Bar/fOO_bAr-Foo", "foo.bar/foo_bar-foo"},
{"FOO,bar:FooBar", "foobarfoobar"},
{"foo/BAR.HTML", "foo/bar.html"},
{"трям/трям", "трям/трям"},
{"은행", "은행"},
}<|fim▁hole|> output := p.MakePathSanitized(test.input)
if output != test.expected {
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
}
}
}
func TestMakePathSanitizedDisablePathToLower(t *testing.T) {
v := newTestCfg()
v.Set("disablePathToLower", true)
l := langs.NewDefaultLanguage(v)
p, _ := NewPathSpec(hugofs.NewMem(v), l, nil)
tests := []struct {
input string
expected string
}{
{" FOO bar ", "FOO-bar"},
{"Foo.Bar/fOO_bAr-Foo", "Foo.Bar/fOO_bAr-Foo"},
{"FOO,bar:FooBar", "FOObarFooBar"},
{"foo/BAR.HTML", "foo/BAR.HTML"},
{"трям/трям", "трям/трям"},
{"은행", "은행"},
}
for _, test := range tests {
output := p.MakePathSanitized(test.input)
if output != test.expected {
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
}
}
}
func TestMakePathRelative(t *testing.T) {
type test struct {
inPath, path1, path2, output string
}
data := []test{
{"/abc/bcd/ab.css", "/abc/bcd", "/bbc/bcd", "/ab.css"},
{"/abc/bcd/ab.css", "/abcd/bcd", "/abc/bcd", "/ab.css"},
}
for i, d := range data {
output, _ := makePathRelative(d.inPath, d.path1, d.path2)
if d.output != output {
t.Errorf("Test #%d failed. Expected %q got %q", i, d.output, output)
}
}
_, error := makePathRelative("a/b/c.ss", "/a/c", "/d/c", "/e/f")
if error == nil {
t.Errorf("Test failed, expected error")
}
}
func TestGetDottedRelativePath(t *testing.T) {
// on Windows this will receive both kinds, both country and western ...
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
doTestGetDottedRelativePath(f, t)
}
}
func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
type test struct {
input, expected string
}
data := []test{
{"", "./"},
{urlFixer("/"), "./"},
{urlFixer("post"), "../"},
{urlFixer("/post"), "../"},
{urlFixer("post/"), "../"},
{urlFixer("tags/foo.html"), "../"},
{urlFixer("/tags/foo.html"), "../"},
{urlFixer("/post/"), "../"},
{urlFixer("////post/////"), "../"},
{urlFixer("/foo/bar/index.html"), "../../"},
{urlFixer("/foo/bar/foo/"), "../../../"},
{urlFixer("/foo/bar/foo"), "../../../"},
{urlFixer("foo/bar/foo/"), "../../../"},
{urlFixer("foo/bar/foo/bar"), "../../../../"},
{"404.html", "./"},
{"404.xml", "./"},
{"/404.html", "./"},
}
for i, d := range data {
output := GetDottedRelativePath(d.input)
if d.expected != output {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
}
}
}
func TestMakeTitle(t *testing.T) {
type test struct {
input, expected string
}
data := []test{
{"Make-Title", "Make Title"},
{"MakeTitle", "MakeTitle"},
{"make_title", "make_title"},
}
for i, d := range data {
output := MakeTitle(d.input)
if d.expected != output {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
}
}
}
func TestDirExists(t *testing.T) {
type test struct {
input string
expected bool
}
data := []test{
{".", true},
{"./", true},
{"..", true},
{"../", true},
{"./..", true},
{"./../", true},
{os.TempDir(), true},
{os.TempDir() + FilePathSeparator, true},
{"/", true},
{"/some-really-random-directory-name", false},
{"/some/really/random/directory/name", false},
{"./some-really-random-local-directory-name", false},
{"./some/really/random/local/directory/name", false},
}
for i, d := range data {
exists, _ := DirExists(filepath.FromSlash(d.input), new(afero.OsFs))
if d.expected != exists {
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
}
}
}
func TestIsDir(t *testing.T) {
type test struct {
input string
expected bool
}
data := []test{
{"./", true},
{"/", true},
{"./this-directory-does-not-existi", false},
{"/this-absolute-directory/does-not-exist", false},
}
for i, d := range data {
exists, _ := IsDir(d.input, new(afero.OsFs))
if d.expected != exists {
t.Errorf("Test %d failed. Expected %t got %t", i, d.expected, exists)
}
}
}
func TestIsEmpty(t *testing.T) {
zeroSizedFile, _ := createZeroSizedFileInTempDir()
defer deleteFileInTempDir(zeroSizedFile)
nonZeroSizedFile, _ := createNonZeroSizedFileInTempDir()
defer deleteFileInTempDir(nonZeroSizedFile)
emptyDirectory, _ := createEmptyTempDir()
defer deleteTempDir(emptyDirectory)
nonEmptyZeroLengthFilesDirectory, _ := createTempDirWithZeroLengthFiles()
defer deleteTempDir(nonEmptyZeroLengthFilesDirectory)
nonEmptyNonZeroLengthFilesDirectory, _ := createTempDirWithNonZeroLengthFiles()
defer deleteTempDir(nonEmptyNonZeroLengthFilesDirectory)
nonExistentFile := os.TempDir() + "/this-file-does-not-exist.txt"
nonExistentDir := os.TempDir() + "/this/directory/does/not/exist/"
fileDoesNotExist := fmt.Errorf("%q path does not exist", nonExistentFile)
dirDoesNotExist := fmt.Errorf("%q path does not exist", nonExistentDir)
type test struct {
input string
expectedResult bool
expectedErr error
}
data := []test{
{zeroSizedFile.Name(), true, nil},
{nonZeroSizedFile.Name(), false, nil},
{emptyDirectory, true, nil},
{nonEmptyZeroLengthFilesDirectory, false, nil},
{nonEmptyNonZeroLengthFilesDirectory, false, nil},
{nonExistentFile, false, fileDoesNotExist},
{nonExistentDir, false, dirDoesNotExist},
}
for i, d := range data {
exists, err := IsEmpty(d.input, new(afero.OsFs))
if d.expectedResult != exists {
t.Errorf("Test %d failed. Expected result %t got %t", i, d.expectedResult, exists)
}
if d.expectedErr != nil {
if d.expectedErr.Error() != err.Error() {
t.Errorf("Test %d failed. Expected %q(%#v) got %q(%#v)", i, d.expectedErr, d.expectedErr, err, err)
}
} else {
if d.expectedErr != err {
t.Errorf("Test %d failed. Expected %q(%#v) got %q(%#v)", i, d.expectedErr, d.expectedErr, err, err)
}
}
}
}
func createZeroSizedFileInTempDir() (*os.File, error) {
filePrefix := "_path_test_"
f, e := ioutil.TempFile("", filePrefix) // dir is os.TempDir()
if e != nil {
// if there was an error no file was created.
// => no requirement to delete the file
return nil, e
}
return f, nil
}
func createNonZeroSizedFileInTempDir() (*os.File, error) {
f, err := createZeroSizedFileInTempDir()
if err != nil {
// no file ??
return nil, err
}
byteString := []byte("byteString")
err = ioutil.WriteFile(f.Name(), byteString, 0644)
if err != nil {
// delete the file
deleteFileInTempDir(f)
return nil, err
}
return f, nil
}
func deleteFileInTempDir(f *os.File) {
_ = os.Remove(f.Name())
}
func createEmptyTempDir() (string, error) {
dirPrefix := "_dir_prefix_"
d, e := ioutil.TempDir("", dirPrefix) // will be in os.TempDir()
if e != nil {
// no directory to delete - it was never created
return "", e
}
return d, nil
}
func createTempDirWithZeroLengthFiles() (string, error) {
d, dirErr := createEmptyTempDir()
if dirErr != nil {
return "", dirErr
}
filePrefix := "_path_test_"
_, fileErr := ioutil.TempFile(d, filePrefix) // dir is os.TempDir()
if fileErr != nil {
// if there was an error no file was created.
// but we need to remove the directory to clean-up
deleteTempDir(d)
return "", fileErr
}
// the dir now has one, zero length file in it
return d, nil
}
func createTempDirWithNonZeroLengthFiles() (string, error) {
d, dirErr := createEmptyTempDir()
if dirErr != nil {
return "", dirErr
}
filePrefix := "_path_test_"
f, fileErr := ioutil.TempFile(d, filePrefix) // dir is os.TempDir()
if fileErr != nil {
// if there was an error no file was created.
// but we need to remove the directory to clean-up
deleteTempDir(d)
return "", fileErr
}
byteString := []byte("byteString")
fileErr = ioutil.WriteFile(f.Name(), byteString, 0644)
if fileErr != nil {
// delete the file
deleteFileInTempDir(f)
// also delete the directory
deleteTempDir(d)
return "", fileErr
}
// the dir now has one, zero length file in it
return d, nil
}
func deleteTempDir(d string) {
_ = os.RemoveAll(d)
}
func TestExists(t *testing.T) {
zeroSizedFile, _ := createZeroSizedFileInTempDir()
defer deleteFileInTempDir(zeroSizedFile)
nonZeroSizedFile, _ := createNonZeroSizedFileInTempDir()
defer deleteFileInTempDir(nonZeroSizedFile)
emptyDirectory, _ := createEmptyTempDir()
defer deleteTempDir(emptyDirectory)
nonExistentFile := os.TempDir() + "/this-file-does-not-exist.txt"
nonExistentDir := os.TempDir() + "/this/directory/does/not/exist/"
type test struct {
input string
expectedResult bool
expectedErr error
}
data := []test{
{zeroSizedFile.Name(), true, nil},
{nonZeroSizedFile.Name(), true, nil},
{emptyDirectory, true, nil},
{nonExistentFile, false, nil},
{nonExistentDir, false, nil},
}
for i, d := range data {
exists, err := Exists(d.input, new(afero.OsFs))
if d.expectedResult != exists {
t.Errorf("Test %d failed. Expected result %t got %t", i, d.expectedResult, exists)
}
if d.expectedErr != err {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expectedErr, err)
}
}
}
func TestAbsPathify(t *testing.T) {
type test struct {
inPath, workingDir, expected string
}
data := []test{
{os.TempDir(), filepath.FromSlash("/work"), filepath.Clean(os.TempDir())}, // TempDir has trailing slash
{"dir", filepath.FromSlash("/work"), filepath.FromSlash("/work/dir")},
}
windowsData := []test{
{"c:\\banana\\..\\dir", "c:\\foo", "c:\\dir"},
{"\\dir", "c:\\foo", "c:\\foo\\dir"},
{"c:\\", "c:\\foo", "c:\\"},
}
unixData := []test{
{"/banana/../dir/", "/work", "/dir"},
}
for i, d := range data {
// todo see comment in AbsPathify
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := ps.AbsPathify(d.inPath)
if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
}
}
t.Logf("Running platform specific path tests for %s", runtime.GOOS)
if runtime.GOOS == "windows" {
for i, d := range windowsData {
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := ps.AbsPathify(d.inPath)
if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
}
}
} else {
for i, d := range unixData {
ps := newTestDefaultPathSpec("workingDir", d.workingDir)
expected := ps.AbsPathify(d.inPath)
if d.expected != expected {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expected, expected)
}
}
}
}
func TestExtractAndGroupRootPaths(t *testing.T) {
in := []string{
filepath.FromSlash("/a/b/c/d"),
filepath.FromSlash("/a/b/c/e"),
filepath.FromSlash("/a/b/e/f"),
filepath.FromSlash("/a/b"),
filepath.FromSlash("/a/b/c/b/g"),
filepath.FromSlash("/c/d/e"),
}
inCopy := make([]string, len(in))
copy(inCopy, in)
result := ExtractAndGroupRootPaths(in)
c := qt.New(t)
c.Assert(fmt.Sprint(result), qt.Equals, filepath.FromSlash("[/a/b/{c,e} /c/d/e]"))
// Make sure the original is preserved
c.Assert(in, qt.DeepEquals, inCopy)
}
func TestExtractRootPaths(t *testing.T) {
tests := []struct {
input []string
expected []string
}{{
[]string{
filepath.FromSlash("a/b"), filepath.FromSlash("a/b/c/"), "b",
filepath.FromSlash("/c/d"), filepath.FromSlash("d/"), filepath.FromSlash("//e//"),
},
[]string{"a", "a", "b", "c", "d", "e"},
}}
for _, test := range tests {
output := ExtractRootPaths(test.input)
if !reflect.DeepEqual(output, test.expected) {
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
}
}
}
func TestFindCWD(t *testing.T) {
type test struct {
expectedDir string
expectedErr error
}
// cwd, _ := os.Getwd()
data := []test{
//{cwd, nil},
// Commenting this out. It doesn't work properly.
// There's a good reason why we don't use os.Getwd(), it doesn't actually work the way we want it to.
// I really don't know a better way to test this function. - SPF 2014.11.04
}
for i, d := range data {
dir, err := FindCWD()
if d.expectedDir != dir {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedDir, dir)
}
if d.expectedErr != err {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedErr, err)
}
}
}
func TestSafeWriteToDisk(t *testing.T) {
emptyFile, _ := createZeroSizedFileInTempDir()
defer deleteFileInTempDir(emptyFile)
tmpDir, _ := createEmptyTempDir()
defer deleteTempDir(tmpDir)
randomString := "This is a random string!"
reader := strings.NewReader(randomString)
fileExists := fmt.Errorf("%v already exists", emptyFile.Name())
type test struct {
filename string
expectedErr error
}
now := time.Now().Unix()
nowStr := strconv.FormatInt(now, 10)
data := []test{
{emptyFile.Name(), fileExists},
{tmpDir + "/" + nowStr, nil},
}
for i, d := range data {
e := SafeWriteToDisk(d.filename, reader, new(afero.OsFs))
if d.expectedErr != nil {
if d.expectedErr.Error() != e.Error() {
t.Errorf("Test %d failed. Expected error %q but got %q", i, d.expectedErr.Error(), e.Error())
}
} else {
if d.expectedErr != e {
t.Errorf("Test %d failed. Expected %q but got %q", i, d.expectedErr, e)
}
contents, _ := ioutil.ReadFile(d.filename)
if randomString != string(contents) {
t.Errorf("Test %d failed. Expected contents %q but got %q", i, randomString, string(contents))
}
}
reader.Seek(0, 0)
}
}
func TestWriteToDisk(t *testing.T) {
emptyFile, _ := createZeroSizedFileInTempDir()
defer deleteFileInTempDir(emptyFile)
tmpDir, _ := createEmptyTempDir()
defer deleteTempDir(tmpDir)
randomString := "This is a random string!"
reader := strings.NewReader(randomString)
type test struct {
filename string
expectedErr error
}
now := time.Now().Unix()
nowStr := strconv.FormatInt(now, 10)
data := []test{
{emptyFile.Name(), nil},
{tmpDir + "/" + nowStr, nil},
}
for i, d := range data {
e := WriteToDisk(d.filename, reader, new(afero.OsFs))
if d.expectedErr != e {
t.Errorf("Test %d failed. WriteToDisk Error Expected %q but got %q", i, d.expectedErr, e)
}
contents, e := ioutil.ReadFile(d.filename)
if e != nil {
t.Errorf("Test %d failed. Could not read file %s. Reason: %s\n", i, d.filename, e)
}
if randomString != string(contents) {
t.Errorf("Test %d failed. Expected contents %q but got %q", i, randomString, string(contents))
}
reader.Seek(0, 0)
}
}
func TestGetTempDir(t *testing.T) {
dir := os.TempDir()
if FilePathSeparator != dir[len(dir)-1:] {
dir = dir + FilePathSeparator
}
testDir := "hugoTestFolder" + FilePathSeparator
tests := []struct {
input string
expected string
}{
{"", dir},
{testDir + " Foo bar ", dir + testDir + " Foo bar " + FilePathSeparator},
{testDir + "Foo.Bar/foo_Bar-Foo", dir + testDir + "Foo.Bar/foo_Bar-Foo" + FilePathSeparator},
{testDir + "fOO,bar:foo%bAR", dir + testDir + "fOObarfoo%bAR" + FilePathSeparator},
{testDir + "fOO,bar:foobAR", dir + testDir + "fOObarfoobAR" + FilePathSeparator},
{testDir + "FOo/BaR.html", dir + testDir + "FOo/BaR.html" + FilePathSeparator},
{testDir + "трям/трям", dir + testDir + "трям/трям" + FilePathSeparator},
{testDir + "은행", dir + testDir + "은행" + FilePathSeparator},
{testDir + "Банковский кассир", dir + testDir + "Банковский кассир" + FilePathSeparator},
}
for _, test := range tests {
output := GetTempDir(test.input, new(afero.MemMapFs))
if output != test.expected {
t.Errorf("Expected %#v, got %#v\n", test.expected, output)
}
}
}<|fim▁end|>
|
for _, test := range tests {
|
<|file_name|>ansible.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python
import bottle
import settings
from controller import admin as admin_controller
from controller import email as email_controller
<|fim▁hole|>app.route(settings.BASEPATH + '/', 'GET', admin_controller.index)
app.route(
settings.BASEPATH + '/tasks/<id>',
'GET',
admin_controller.read_user_tasks
)
app.route(
settings.BASEPATH + '/update/<id>',
'POST',
admin_controller.update_self
)
# Email handler
email = bottle.Bottle()
app.mount(settings.EMAIL_PATH, email)
email.route('/', 'POST', email_controller.receive_email)
email.route('/', 'GET', email_controller.test_form)
email.route('', 'GET', email_controller.test_form)
# Ansible admin
admin = bottle.Bottle()
app.mount(settings.ADMIN_PATH, admin)
admin.route('/tasks', 'GET', admin_controller.read_tasks)
admin.route('/create', 'POST', admin_controller.create_person)
admin.route('/delete', 'POST', admin_controller.delete_people)
admin.route('/<id>', 'GET', admin_controller.read_person)
admin.route('/<id>', 'POST', admin_controller.update_person)
admin.route('/', 'GET', admin_controller.admin)
# Static files
app.route(
settings.STATIC_PATH + '/<type>/<filename>',
'GET',
lambda **kwargs: bottle.static_file(
filename=kwargs['filename'], root='static/' + kwargs['type']
)
)
if __name__ == '__main__':
bottle.run(app=app, reloader=True, **settings.SERVER)<|fim▁end|>
|
app = application = bottle.Bottle()
# Base url for regular users
app.route(settings.BASEPATH, 'GET', admin_controller.index)
|
<|file_name|>get_comments.py<|end_file_name|><|fim▁begin|>import click
from arrow.cli import pass_context, json_loads<|fim▁hole|>
@click.command('get_comments')
@click.argument("feature_id", type=str)
@click.option(
"--organism",
help="Organism Common Name",
type=str
)
@click.option(
"--sequence",
help="Sequence Name",
type=str
)
@pass_context
@custom_exception
@dict_output
def cli(ctx, feature_id, organism="", sequence=""):
"""Get a feature's comments
Output:
A standard apollo feature dictionary ({"features": [{...}]})
"""
return ctx.gi.annotations.get_comments(feature_id, organism=organism, sequence=sequence)<|fim▁end|>
|
from arrow.decorators import custom_exception, dict_output
|
<|file_name|>regular_expression_matching.py<|end_file_name|><|fim▁begin|>class R:
def __init__(self, c):
self.c = c
self.is_star = False
def match(self, c):
return self.c == '.' or self.c == c
class Solution(object):
def isMatch(self, s, p):
"""
:type s: str
:type p: str
:rtype: bool
"""
rs = []
""":type: list[R]"""
for c in p:
if c == '*':<|fim▁hole|> rs[-1].is_star = True
else:
rs.append(R(c))
lr = len(rs)
ls = len(s)
s += '\0'
dp = [[False] * (ls + 1) for _ in range(lr + 1)]
dp[0][0] = True
for i, r in enumerate(rs):
for j in range(ls + 1):
c = s[j - 1]
if r.is_star:
dp[i + 1][j] = dp[i][j]
if j and r.match(c):
dp[i + 1][j] |= dp[i + 1][j - 1]
else:
if j and r.match(c):
dp[i + 1][j] = dp[i][j - 1]
return dp[-1][-1]<|fim▁end|>
| |
<|file_name|>libraries.processor.ts<|end_file_name|><|fim▁begin|>////////////////////////////////////////////////////////////////////////////////////////////////////
/// NOTE: This file is also used in the Script Lab Samples project. ///
/// Please be sure that any changes that you make here are also copied to there. ///
/// See "config/libraires.processor.ts" in https://github.com/OfficeDev/script-lab-samples ///
////////////////////////////////////////////////////////////////////////////////////////////////////
export function processLibraries(snippet: ISnippet) {
let linkReferences: string[] = [];
let scriptReferences: string[] = [];
let officeJS: string = null;
snippet.libraries.split('\n').forEach(processLibrary);
return { linkReferences, scriptReferences, officeJS };
function processLibrary(text: string) {
if (text == null || text.trim() === '') {
return null;
}
text = text.trim();
let isNotScriptOrStyle =
/^#.*|^\/\/.*|^\/\*.*|.*\*\/$.*/im.test(text) ||
/^@types/.test(text) ||
/^dt~/.test(text) ||
/\.d\.ts$/i.test(text);
if (isNotScriptOrStyle) {
return null;
}
let resolvedUrlPath = (/^https?:\/\/|^ftp? :\/\//i.test(text)) ? text : `https://unpkg.com/${text}`;
if (/\.css$/i.test(resolvedUrlPath)) {
return linkReferences.push(resolvedUrlPath);
}
if (/\.ts$|\.js$/i.test(resolvedUrlPath)) {
/*
* Don't add Office.js to the rest of the script references --
* it is special because of how it needs to be *outside* of the iframe,
* whereas the rest of the script references need to be inside the iframe.
*/
if (/(?:office|office.debug).js$/.test(resolvedUrlPath.toLowerCase())) {
officeJS = resolvedUrlPath;
return null;<|fim▁hole|> }
return scriptReferences.push(resolvedUrlPath);
}
return scriptReferences.push(resolvedUrlPath);
}
}<|fim▁end|>
| |
<|file_name|>lexer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Collection of raw lexer test cases and class constructor.
"""
from __future__ import unicode_literals
import textwrap
swapquotes = {
39: 34, 34: 39,
# note the follow are interim error messages
96: 39,
}
<|fim▁hole|># from https://bitbucket.org/ned/jslex
es5_cases = [
(
# Identifiers
'identifiers_ascii',
('i my_variable_name c17 _dummy $str $ _ CamelCase class2type',
['ID i', 'ID my_variable_name', 'ID c17', 'ID _dummy',
'ID $str', 'ID $', 'ID _', 'ID CamelCase', 'ID class2type']
),
), (
'identifiers_unicode',
(u'\u03c0 \u03c0_tail var\ua67c',
[u'ID \u03c0', u'ID \u03c0_tail', u'ID var\ua67c']),
), (
# https://github.com/rspivak/slimit/issues/2
'slimit_issue_2',
('nullify truelie falsepositive',
['ID nullify', 'ID truelie', 'ID falsepositive']),
), (
'keywords_break',
('break Break BREAK', ['BREAK break', 'ID Break', 'ID BREAK']),
), (
# Literals
'literals',
('null true false Null True False',
['NULL null', 'TRUE true', 'FALSE false',
'ID Null', 'ID True', 'ID False']
),
), (
# Punctuators
'punctuators_simple',
('a /= b', ['ID a', 'DIVEQUAL /=', 'ID b']),
), (
'punctuators_various_equality',
(('= == != === !== < > <= >= || && ++ -- << >> '
'>>> += -= *= <<= >>= >>>= &= %= ^= |='),
['EQ =', 'EQEQ ==', 'NE !=', 'STREQ ===', 'STRNEQ !==', 'LT <',
'GT >', 'LE <=', 'GE >=', 'OR ||', 'AND &&', 'PLUSPLUS ++',
'MINUSMINUS --', 'LSHIFT <<', 'RSHIFT >>', 'URSHIFT >>>',
'PLUSEQUAL +=', 'MINUSEQUAL -=', 'MULTEQUAL *=', 'LSHIFTEQUAL <<=',
'RSHIFTEQUAL >>=', 'URSHIFTEQUAL >>>=', 'ANDEQUAL &=', 'MODEQUAL %=',
'XOREQUAL ^=', 'OREQUAL |=',
]
),
), (
'punctuators_various_others',
('. , ; : + - * % & | ^ ~ ? ! ( ) { } [ ]',
['PERIOD .', 'COMMA ,', 'SEMI ;', 'COLON :', 'PLUS +', 'MINUS -',
'MULT *', 'MOD %', 'BAND &', 'BOR |', 'BXOR ^', 'BNOT ~',
'CONDOP ?', 'NOT !', 'LPAREN (', 'RPAREN )', 'LBRACE {', 'RBRACE }',
'LBRACKET [', 'RBRACKET ]']
),
), (
'division_simple',
('a / b', ['ID a', 'DIV /', 'ID b']),
), (
'numbers',
(('3 3.3 0 0. 0.0 0.001 010 3.e2 3.e-2 3.e+2 3E2 3E+2 3E-2 '
'0.5e2 0.5e+2 0.5e-2 33 128.15 0x001 0X12ABCDEF 0xabcdef'),
['NUMBER 3', 'NUMBER 3.3', 'NUMBER 0', 'NUMBER 0.', 'NUMBER 0.0',
'NUMBER 0.001', 'NUMBER 010', 'NUMBER 3.e2', 'NUMBER 3.e-2',
'NUMBER 3.e+2', 'NUMBER 3E2', 'NUMBER 3E+2', 'NUMBER 3E-2',
'NUMBER 0.5e2', 'NUMBER 0.5e+2', 'NUMBER 0.5e-2', 'NUMBER 33',
'NUMBER 128.15', 'NUMBER 0x001', 'NUMBER 0X12ABCDEF',
'NUMBER 0xabcdef']
),
), (
'strings_simple_quote',
(""" '"' """, ["""STRING '"'"""]),
), (
'strings_escape_quote_tab',
(r'''"foo" 'foo' "x\";" 'x\';' "foo\tbar"''',
['STRING "foo"', """STRING 'foo'""", r'STRING "x\";"',
r"STRING 'x\';'", r'STRING "foo\tbar"']
),
), (
'strings_escape_ascii',
(r"""'\x55' "\x12ABCDEF" '!@#$%^&*()_+{}[]\";?'""",
[r"STRING '\x55'", r'STRING "\x12ABCDEF"',
r"STRING '!@#$%^&*()_+{}[]\";?'"]
),
), (
'strings_escape_unicode',
(r"""'\u0001' "\uFCEF" 'a\\\b\n'""",
[r"STRING '\u0001'", r'STRING "\uFCEF"', r"STRING 'a\\\b\n'"]
),
), (
'strings_unicode',
(u'"тест строки\\""', [u'STRING "тест строки\\""']),
), (
'strings_escape_octal',
(r"""'\251'""", [r"""STRING '\251'"""]),
), (
# Bug - https://github.com/rspivak/slimit/issues/5
'slimit_issue_5',
(r"var tagRegExp = new RegExp('<(\/*)(FooBar)', 'gi');",
['VAR var', 'ID tagRegExp', 'EQ =',
'NEW new', 'ID RegExp', 'LPAREN (',
r"STRING '<(\/*)(FooBar)'", 'COMMA ,', "STRING 'gi'",
'RPAREN )', 'SEMI ;']),
), (
# same as above but inside double quotes
'slimit_issue_5_double_quote',
(r'"<(\/*)(FooBar)"', [r'STRING "<(\/*)(FooBar)"']),
), (
# multiline string (string written across multiple lines
# of code) https://github.com/rspivak/slimit/issues/24
'slimit_issue_24_multi_line_code_double',
("var a = 'hello \\\n world'",
['VAR var', 'ID a', 'EQ =', "STRING 'hello \\\n world'"]),
), (
'slimit_issue_24_multi_line_code_single',
('var a = "hello \\\r world"',
['VAR var', 'ID a', 'EQ =', 'STRING "hello \\\r world"']),
), (
# regex
'regex_1',
(r'a=/a*/,1', ['ID a', 'EQ =', 'REGEX /a*/', 'COMMA ,', 'NUMBER 1']),
), (
'regex_2',
(r'a=/a*[^/]+/,1',
['ID a', 'EQ =', 'REGEX /a*[^/]+/', 'COMMA ,', 'NUMBER 1']
),
), (
'regex_3',
(r'a=/a*\[^/,1',
['ID a', 'EQ =', r'REGEX /a*\[^/', 'COMMA ,', 'NUMBER 1']
),
), (
'regex_4',
(r'a=/\//,1', ['ID a', 'EQ =', r'REGEX /\//', 'COMMA ,', 'NUMBER 1']),
), (
# not a regex, just a division
# https://github.com/rspivak/slimit/issues/6
'slimit_issue_6_not_regex_but_division',
(r'x = this / y;',
['ID x', 'EQ =', 'THIS this', r'DIV /', r'ID y', r'SEMI ;']),
), (
'regex_mozilla_example_1',
# next two are from
# http://www.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions
('for (var x = a in foo && "</x>" || mot ? z:/x:3;x<5;y</g/i) '
'{xyz(x++);}',
["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
"ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
"ID z", "COLON :", "REGEX /x:3;x<5;y</g", "DIV /", "ID i",
"RPAREN )", "LBRACE {", "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++",
"RPAREN )", "SEMI ;", "RBRACE }"]
),
), (
'regex_mozilla_example_2',
('for (var x = a in foo && "</x>" || mot ? z/x:3;x<5;y</g/i) '
'{xyz(x++);}',
["FOR for", "LPAREN (", "VAR var", "ID x", "EQ =", "ID a", "IN in",
"ID foo", "AND &&", 'STRING "</x>"', "OR ||", "ID mot", "CONDOP ?",
"ID z", "DIV /", "ID x", "COLON :", "NUMBER 3", "SEMI ;", "ID x",
"LT <", "NUMBER 5", "SEMI ;", "ID y", "LT <", "REGEX /g/i",
"RPAREN )", "LBRACE {", "ID xyz", "LPAREN (", "ID x", "PLUSPLUS ++",
"RPAREN )", "SEMI ;", "RBRACE }"]
),
), (
'regex_illegal_1',
# Various "illegal" regexes that are valid according to the std.
(r"""/????/, /++++/, /[----]/ """,
['REGEX /????/', 'COMMA ,',
'REGEX /++++/', 'COMMA ,', 'REGEX /[----]/']
),
), (
'regex_stress_test_1',
# Stress cases from
# http://stackoverflow.com/questions/5533925/
# what-javascript-constructs-does-jslex-incorrectly-lex/5573409#5573409
(r"""/\[/""", [r"""REGEX /\[/"""]),
), (
'regex_stress_test_2',
(r"""/[i]/""", [r"""REGEX /[i]/"""]),
), (
'regex_stress_test_3',
(r"""/[\]]/""", [r"""REGEX /[\]]/"""]),
), (
'regex_stress_test_4',
(r"""/a[\]]/""", [r"""REGEX /a[\]]/"""]),
), (
'regex_stress_test_5',
(r"""/a[\]]b/""", [r"""REGEX /a[\]]b/"""]),
), (
'regex_stress_test_6',
(r"""/[\]/]/gi""", [r"""REGEX /[\]/]/gi"""]),
), (
'regex_stress_test_7',
(r"""/\[[^\]]+\]/gi""", [r"""REGEX /\[[^\]]+\]/gi"""]),
), (
'regex_stress_test_8',
(r"""
rexl.re = {
NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
QUOTED_LITERAL: /^'(?:[^']|'')*'/,
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
};
""", [
"ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
"ID NAME", "COLON :",
r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""", "COMMA ,",
"ID UNQUOTED_LITERAL", "COLON :",
r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"COMMA ,", "ID QUOTED_LITERAL", "COLON :",
r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,", "ID NUMERIC_LITERAL",
"COLON :",
r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
"ID SYMBOL", "COLON :",
r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
"RBRACE }", "SEMI ;"]),
), (
'regex_stress_test_9',
(r"""
rexl.re = {
NAME: /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/,
UNQUOTED_LITERAL: /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
QUOTED_LITERAL: /^'(?:[^']|'')*'/,
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
};
str = '"';
""", [
"ID rexl", "PERIOD .", "ID re", "EQ =", "LBRACE {",
"ID NAME", "COLON :", r"""REGEX /^(?!\d)(?:\w)+|^"(?:[^"]|"")+"/""",
"COMMA ,", "ID UNQUOTED_LITERAL", "COLON :",
r"""REGEX /^@(?:(?!\d)(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"COMMA ,", "ID QUOTED_LITERAL", "COLON :",
r"""REGEX /^'(?:[^']|'')*'/""", "COMMA ,",
"ID NUMERIC_LITERAL", "COLON :",
r"""REGEX /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "COMMA ,",
"ID SYMBOL", "COLON :",
r"""REGEX /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""",
"RBRACE }", "SEMI ;",
"ID str", "EQ =", """STRING '"'""", "SEMI ;",
]),
), (
'regex_stress_test_10',
(r""" this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\")"; """,
["THIS this", "PERIOD .", "ID _js", "EQ =",
r'''STRING "e.str(\""''', "PLUS +", "THIS this", "PERIOD .",
"ID value", "PERIOD .", "ID replace", "LPAREN (", r"REGEX /\\/g",
"COMMA ,", r'STRING "\\\\"', "RPAREN )", "PERIOD .", "ID replace",
"LPAREN (", r'REGEX /"/g', "COMMA ,", r'STRING "\\\""', "RPAREN )",
"PLUS +", r'STRING "\")"', "SEMI ;"]),
), (
'regex_division_check',
('a = /a/ / /b/',
['ID a', 'EQ =', 'REGEX /a/', 'DIV /', 'REGEX /b/']),
), (
'regex_after_plus_brace',
('+{}/a/g',
['PLUS +', 'LBRACE {', 'RBRACE }', 'DIV /', 'ID a', 'DIV /', 'ID g']),
# The following pathological cases cannot be tested using the
# lexer alone, as the rules can only be addressed in conjunction
# with a parser
#
# 'regex_after_brace',
# ('{}/a/g',
# ['LBRACE {', 'RBRACE }', 'REGEX /a/g']),
# 'regex_after_if_brace',
# ('if (a) { } /a/.test(a)',
# ['IF if', 'LPAREN (', 'ID a', 'RPAREN )', 'LBRACE {', 'RBRACE }',
# 'REGEX /a/', "PERIOD .", "ID test", 'LPAREN (', 'ID a',
# 'RPAREN )']),
), (
'regex_case',
('switch(0){case /a/:}',
['SWITCH switch', 'LPAREN (', 'NUMBER 0', 'RPAREN )', 'LBRACE {',
'CASE case', 'REGEX /a/', 'COLON :', 'RBRACE }']),
), (
'div_after_valid_statement_function_call',
('if(){} f(a) / f(b)',
['IF if', 'LPAREN (', 'RPAREN )', 'LBRACE {', 'RBRACE }',
'ID f', 'LPAREN (', 'ID a', 'RPAREN )', 'DIV /',
'ID f', 'LPAREN (', 'ID b', 'RPAREN )']),
), (
'for_regex_slimit_issue_54',
('for (;;) /r/;',
['FOR for', 'LPAREN (', 'SEMI ;', 'SEMI ;', 'RPAREN )',
'REGEX /r/', 'SEMI ;']),
), (
'for_regex_slimit_issue_54_not_break_division',
('for (;;) { x / y }',
['FOR for', 'LPAREN (', 'SEMI ;', 'SEMI ;', 'RPAREN )',
'LBRACE {', 'ID x', 'DIV /', 'ID y', 'RBRACE }']),
), (
'for_regex_slimit_issue_54_bracket_accessor_check',
('s = {a:1} + s[2] / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'ID s', 'LBRACKET [', 'NUMBER 2', 'RBRACKET ]',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_function_parentheses_check',
('s = {a:1} + f(2) / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'ID f', 'LPAREN (', 'NUMBER 2', 'RPAREN )',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_parentheses_check',
('s = {a:1} + (2) / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'LPAREN (', 'NUMBER 2', 'RPAREN )',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_bracket_check',
('s = {a:1} + [2] / 1',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 1',
'RBRACE }', 'PLUS +', 'LBRACKET [', 'NUMBER 2', 'RBRACKET ]',
'DIV /', 'NUMBER 1'])
), (
'for_regex_slimit_issue_54_math_braces_check',
('s = {a:2} / 166 / 9',
['ID s', 'EQ =', 'LBRACE {', 'ID a', 'COLON :', 'NUMBER 2',
'RBRACE }', 'DIV /', 'NUMBER 166', 'DIV /', 'NUMBER 9'])
), (
'do_while_regex',
('do {} while (0) /s/',
['DO do', 'LBRACE {', 'RBRACE }', 'WHILE while', 'LPAREN (',
'NUMBER 0', 'RPAREN )', 'REGEX /s/'])
), (
'if_regex',
('if (thing) /s/',
['IF if', 'LPAREN (', 'ID thing', 'RPAREN )', 'REGEX /s/'])
), (
'identifier_math',
('f (v) /s/g',
['ID f', 'LPAREN (', 'ID v', 'RPAREN )', 'DIV /', 'ID s', 'DIV /',
'ID g'])
), (
'section_7',
("a = b\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
), (
'section_7_extras',
("a = b\n\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
), (
'slimit_issue_39_and_57',
(r"f(a, 'hi\01').split('\1').split('\0');",
['ID f', 'LPAREN (', 'ID a', 'COMMA ,', r"STRING 'hi\01'", 'RPAREN )',
'PERIOD .', 'ID split', 'LPAREN (', r"STRING '\1'", 'RPAREN )',
'PERIOD .', 'ID split', 'LPAREN (', r"STRING '\0'", 'RPAREN )',
'SEMI ;'])
), (
'section_7_8_4_string_literal_with_7_3_conformance',
("'<LF>\\\n<CR>\\\r<LS>\\\u2028<PS>\\\u2029<CR><LF>\\\r\n'",
["STRING '<LF>\\\n<CR>\\\r<LS>\\\u2028<PS>\\\u2029<CR><LF>\\\r\n'"])
), (
# okay this is getting ridiculous how bad ECMA is.
'section_7_comments',
("a = b\n/** **/\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'DIV /', 'ID hi', 'DIV /', 'ID s'])
),
]
# various string related syntax errors
es5_error_cases_str = [
(
'unterminated_string_eof',
"var foo = 'test",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_line_separator_in_string',
"vaf foo = 'test\u2028foo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_line_feed_in_string',
"var foo = 'test\u2029foo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_crnl_in_string',
"var foo = 'test\r\nfoo'",
'Unterminated string literal "\'test" at 1:11',
), (
'naked_cr_in_string',
"var foo = 'test\\\n\rfoo'",
# FIXME Note that the \\ is double escaped
'Unterminated string literal "\'test\\\\" at 1:11',
), (
'invalid_hex_sequence',
"var foo = 'fail\\x1'",
# backticks are converted to single quotes
"Invalid hexadecimal escape sequence `\\x1` at 1:16",
), (
'invalid_unicode_sequence',
"var foo = 'fail\\u12'",
"Invalid unicode escape sequence `\\u12` at 1:16",
), (
'invalid_hex_sequence_multiline',
"var foo = 'foobar\\\r\nfail\\x1'",
# backticks are converted to single quotes
"Invalid hexadecimal escape sequence `\\x1` at 2:5",
), (
'invalid_unicode_sequence_multiline',
"var foo = 'foobar\\\nfail\\u12'",
"Invalid unicode escape sequence `\\u12` at 2:5",
), (
'long_invalid_string_truncated',
"var foo = '1234567890abcdetruncated",
'Unterminated string literal "\'1234567890abcde..." at 1:11',
)
]
es5_comment_cases = [
(
'line_comment_whole',
('//comment\na = 5;\n',
['LINE_COMMENT //comment', 'ID a', 'EQ =', 'NUMBER 5', 'SEMI ;']),
), (
'line_comment_trail',
('a//comment', ['ID a', 'LINE_COMMENT //comment']),
), (
'block_comment_single',
('/***/b/=3//line',
['BLOCK_COMMENT /***/', 'ID b', 'DIVEQUAL /=',
'NUMBER 3', 'LINE_COMMENT //line']),
), (
'block_comment_multiline',
('/*\n * Copyright LGPL 2011 \n*/\na = 1;',
['BLOCK_COMMENT /*\n * Copyright LGPL 2011 \n*/',
'ID a', 'EQ =', 'NUMBER 1', 'SEMI ;']),
), (
# this will replace the standard test cases
'section_7_comments',
("a = b\n/** **/\n\n/hi/s",
['ID a', 'EQ =', 'ID b', 'BLOCK_COMMENT /** **/', 'DIV /', 'ID hi',
'DIV /', 'ID s'])
)
]
# replace the section_7_comments test case
es5_all_cases = es5_cases[:-1] + es5_comment_cases
# double quote version
es5_error_cases_str_dq = [
(n, arg.translate(swapquotes), msg.translate(swapquotes))
for n, arg, msg in es5_error_cases_str
]
# single quote version
es5_error_cases_str_sq = [
(n, arg, msg.translate({96: 39}))
for n, arg, msg in es5_error_cases_str
]
es5_pos_cases = [
(
'single_line',
"""
var foo = bar; // line 1
""", ([
'var 1:0', 'foo 1:4', '= 1:8', 'bar 1:10', '; 1:13'
], [
'var 1:1', 'foo 1:5', '= 1:9', 'bar 1:11', '; 1:14',
])
), (
'multi_line',
"""
var foo = bar; // line 1
var bar = baz; // line 4
""", ([
'var 1:0', 'foo 1:4', '= 1:8', 'bar 1:10', '; 1:13',
'var 4:28', 'bar 4:32', '= 4:36', 'baz 4:38', '; 4:41',
], [
'var 1:1', 'foo 1:5', '= 1:9', 'bar 1:11', '; 1:14',
'var 4:1', 'bar 4:5', '= 4:9', 'baz 4:11', '; 4:14',
])
), (
'inline_comment',
"""
// this is a comment // line 1
var foo = bar; // line 2
// another one // line 4
var bar = baz; // line 5
""", ([
'var 2:32', 'foo 2:36', '= 2:40', 'bar 2:42', '; 2:45',
'var 5:85', 'bar 5:89', '= 5:93', 'baz 5:95', '; 5:98',
], [
'var 2:1', 'foo 2:5', '= 2:9', 'bar 2:11', '; 2:14',
'var 5:1', 'bar 5:5', '= 5:9', 'baz 5:11', '; 5:14',
])
), (
'block_comment',
"""
/*
This is a block comment
*/
var foo = bar; // line 4
/* block single line */ // line 6
var bar = baz; // line 7
/* oops */bar(); // line 9
foo();
""", ([
'var 4:30', 'foo 4:34', '= 4:38', 'bar 4:40', '; 4:43',
'var 7:91', 'bar 7:95', '= 7:99', 'baz 7:101', '; 7:104',
'bar 9:128', '( 9:131', ') 9:132', '; 9:133',
'foo 11:149', '( 11:152', ') 11:153', '; 11:154',
], [
'var 4:1', 'foo 4:5', '= 4:9', 'bar 4:11', '; 4:14',
'var 7:1', 'bar 7:5', '= 7:9', 'baz 7:11', '; 7:14',
'bar 9:11', '( 9:14', ') 9:15', '; 9:16',
'foo 11:3', '( 11:6', ') 11:7', '; 11:8',
])
), (
'syntax_error_heading_comma',
"""
var a;
, b;
""", ([
'var 1:0', 'a 1:4', '; 1:5',
', 2:7', 'b 2:9', '; 2:10'
], [
'var 1:1', 'a 1:5', '; 1:6',
', 2:1', 'b 2:3', '; 2:4'
])
)
]
def run_lexer(value, lexer_cls):
lexer = lexer_cls()
lexer.input(value)
return ['%s %s' % (token.type, token.value) for token in lexer]
def run_lexer_pos(value, lexer_cls):
lexer = lexer_cls()
lexer.input(textwrap.dedent(value).strip())
tokens = list(lexer)
return ([
'%s %d:%d' % (token.value, token.lineno, token.lexpos)
for token in tokens
], [
'%s %d:%d' % (token.value, token.lineno, token.colno)
for token in tokens
])<|fim▁end|>
|
# The structure and some test cases are taken
|
<|file_name|>system_interceptors.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Implementations of the Asan system interceptors. These are simple
// pass-throughs to the original functions for the purpose of Asan
// compatibility.
#include "windows.h"
extern "C" {
BOOL WINAPI asan_ReadFile(HANDLE hFile,
LPVOID lpBuffer,
DWORD nNumberOfBytesToRead,
LPDWORD lpNumberOfBytesRead,
LPOVERLAPPED lpOverlapped) {
return ::ReadFile(hFile, lpBuffer, nNumberOfBytesToRead, lpNumberOfBytesRead,
lpOverlapped);
}
BOOL WINAPI asan_ReadFileEx(
HANDLE hFile,
LPVOID lpBuffer,
DWORD nNumberOfBytesToRead,
LPOVERLAPPED lpOverlapped,
LPOVERLAPPED_COMPLETION_ROUTINE lpCompletionRoutine) {
return ::ReadFileEx(hFile, lpBuffer, nNumberOfBytesToRead, lpOverlapped,
lpCompletionRoutine);
}
BOOL WINAPI asan_WriteFile(HANDLE hFile,
LPCVOID lpBuffer,
DWORD nNumberOfBytesToWrite,
LPDWORD lpNumberOfBytesWritten,
LPOVERLAPPED lpOverlapped) {
return ::WriteFile(hFile, lpBuffer, nNumberOfBytesToWrite,
lpNumberOfBytesWritten, lpOverlapped);
}<|fim▁hole|>
BOOL WINAPI asan_WriteFileEx(
HANDLE hFile,
LPCVOID lpBuffer,
DWORD nNumberOfBytesToWrite,
LPOVERLAPPED lpOverlapped,
LPOVERLAPPED_COMPLETION_ROUTINE lpCompletionRoutine) {
return ::WriteFileEx(hFile, lpBuffer, nNumberOfBytesToWrite, lpOverlapped,
lpCompletionRoutine);
}
long WINAPI asan_InterlockedCompareExchange(long volatile* Destination,
long Exchange,
long Comperand) {
return ::InterlockedCompareExchange(Destination, Exchange, Comperand);
}
long WINAPI asan_InterlockedIncrement(long* lpAddend) {
return ::InterlockedIncrement(lpAddend);
}
long WINAPI asan_InterlockedDecrement(long* lpAddend) {
return ::InterlockedDecrement(lpAddend);
}
long WINAPI asan_InterlockedExchange(long volatile* Target, long Value) {
return ::InterlockedExchange(Target, Value);
}
long WINAPI asan_InterlockedExchangeAdd(long volatile* Addend, long Value) {
return ::InterlockedExchangeAdd(Addend, Value);
}
} // extern "C"<|fim▁end|>
| |
<|file_name|>test_unittest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
test.
"""
import torndb
import unittest<|fim▁hole|>
class PrimesTestCase(unittest.TestCase):
"""Tests for `primes.py`."""
def test_is_five_prime(self):
""""""
self.assertTrue(case.is_prime(2), msg='断言出错')
class WidgetTestCase(unittest.TestCase):
def setUp(self):
self.widget = 'widget'
self.db = torndb.Connection()
def tearDown(self):
self.db.close()
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
from ..views import case
|
<|file_name|>auth.go<|end_file_name|><|fim▁begin|>// Package auth handles user management: creation, deletion, and authentication.
package auth
import (
"encoding/json"
deis "github.com/deis/controller-sdk-go"
"github.com/deis/controller-sdk-go/api"
)
// Register a new user with the controller.
// If controller registration is set to administratiors only, a valid administrative
// user token is required in the client.
func Register(c *deis.Client, username, password, email string) error {
user := api.AuthRegisterRequest{Username: username, Password: password, Email: email}
body, err := json.Marshal(user)
<|fim▁hole|> res, err := c.Request("POST", "/v2/auth/register/", body)
if err == nil {
res.Body.Close()
}
return err
}
// Login to the controller and get a token
func Login(c *deis.Client, username, password string) (string, error) {
user := api.AuthLoginRequest{Username: username, Password: password}
reqBody, err := json.Marshal(user)
if err != nil {
return "", err
}
res, reqErr := c.Request("POST", "/v2/auth/login/", reqBody)
if reqErr != nil && !deis.IsErrAPIMismatch(reqErr) {
return "", reqErr
}
defer res.Body.Close()
token := api.AuthLoginResponse{}
if err = json.NewDecoder(res.Body).Decode(&token); err != nil {
return "", err
}
return token.Token, reqErr
}
// Delete deletes a user.
func Delete(c *deis.Client, username string) error {
var body []byte
var err error
if username != "" {
req := api.AuthCancelRequest{Username: username}
body, err = json.Marshal(req)
if err != nil {
return err
}
}
res, err := c.Request("DELETE", "/v2/auth/cancel/", body)
if err == nil {
res.Body.Close()
}
return err
}
// Regenerate auth tokens. This invalidates existing tokens, and if targeting a specific user
// returns a new token.
//
// If username is an empty string and all is false, this regenerates the
// client user's token and will return a new token. Make sure to update the client token
// with this new token to avoid authentication errors.
//
// If username is set and all is false, this will regenerate that user's token
// and return a new token. If not targeting yourself, regenerate requires administrative privileges.
//
// If all is true, this will regenerate every user's token. This requires administrative privileges.
func Regenerate(c *deis.Client, username string, all bool) (string, error) {
var reqBody []byte
var err error
if all {
reqBody, err = json.Marshal(api.AuthRegenerateRequest{All: all})
} else if username != "" {
reqBody, err = json.Marshal(api.AuthRegenerateRequest{Name: username})
}
if err != nil {
return "", err
}
res, reqErr := c.Request("POST", "/v2/auth/tokens/", reqBody)
if reqErr != nil && !deis.IsErrAPIMismatch(reqErr) {
return "", reqErr
}
defer res.Body.Close()
if all {
return "", nil
}
token := api.AuthRegenerateResponse{}
if err = json.NewDecoder(res.Body).Decode(&token); err != nil {
return "", err
}
return token.Token, reqErr
}
// Passwd changes a user's password.
//
// If username if an empty string, change the password of the client's user.
//
// If username is set, change the password of another user and do not require
// their password. This requires administrative privileges.
func Passwd(c *deis.Client, username, password, newPassword string) error {
req := api.AuthPasswdRequest{Password: password, NewPassword: newPassword}
if username != "" {
req.Username = username
}
body, err := json.Marshal(req)
if err != nil {
return err
}
res, err := c.Request("POST", "/v2/auth/passwd/", body)
if err == nil {
res.Body.Close()
}
return err
}
// Whoami retrives the user object for the authenticated user.
func Whoami(c *deis.Client) (api.User, error) {
res, err := c.Request("GET", "/v2/auth/whoami/", nil)
if err != nil {
return api.User{}, err
}
defer res.Body.Close()
resUser := api.User{}
if err = json.NewDecoder(res.Body).Decode(&resUser); err != nil {
return api.User{}, err
}
return resUser, nil
}<|fim▁end|>
|
if err != nil {
return err
}
|
<|file_name|>httpd.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Taboot - Client utility for performing deployments with Func.
# Copyright © 2009, Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
<|fim▁hole|><|fim▁end|>
|
"""TODO: Decide what to do with this file"""
|
<|file_name|>FACEWrapper.java<|end_file_name|><|fim▁begin|>package com.openMap1.mapper.converters;
import java.util.Iterator;
import org.eclipse.emf.common.util.EList;
import org.w3c.dom.Attr;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.openMap1.mapper.ElementDef;
import com.openMap1.mapper.MappedStructure;
import com.openMap1.mapper.core.MapperException;
import com.openMap1.mapper.structures.MapperWrapper;
import com.openMap1.mapper.util.XMLUtil;
import com.openMap1.mapper.util.XSLOutputFile;
import com.openMap1.mapper.writer.TemplateFilter;
public class FACEWrapper extends AbstractMapperWrapper implements MapperWrapper{
public static String FACE_PREFIX = "face";
public static String FACE_URI = "http://schemas.facecode.com/webservices/2010/01/";
//----------------------------------------------------------------------------------------
// Constructor and initialisation from the Ecore model
//----------------------------------------------------------------------------------------
public FACEWrapper(MappedStructure ms, Object spare) throws MapperException
{
super(ms,spare);
}
/**
* @return the file extension of the outer document, with initial '*.'
*/
public String fileExtension() {return ("*.xml");}
/**
* @return the type of document transformed to and from;
* see static constants in class AbstractMapperWrapper.
*/
public int transformType() {return AbstractMapperWrapper.XML_TYPE;}
//----------------------------------------------------------------------------------------
// In-wrapper transform
//----------------------------------------------------------------------------------------
@Override
public Document transformIn(Object incoming) throws MapperException
{
if (!(incoming instanceof Element)) throw new MapperException("Document root is not an Element");
Element mappingRoot = (Element)incoming;
String mappingRootPath = "/GetIndicativeBudget";
inResultDoc = XMLUtil.makeOutDoc();
Element inRoot = scanDocument(mappingRoot, mappingRootPath, AbstractMapperWrapper.IN_TRANSFORM);
inResultDoc.appendChild(inRoot);
return inResultDoc;
}
/**
* default behaviour is a shallow copy - copying the element name, attributes,
* and text content only if the element has no child elements.
* to be overridden for specific paths in implementing classes
*/
protected Element inTransformNode(Element el, String path) throws MapperException
{
// copy the element with namespaces, prefixed tag name, attributes but no text or child Elements
Element copy = (Element)inResultDoc.importNode(el, false);
// convert <FaceCompletedItem> elements to specific types of item
if (XMLUtil.getLocalName(el).equals("FaceCompletedItem"))
{
String questionCode = getPathValue(el,"QuestionId");
String newName = "FaceCompletedItem_" + questionCode;
copy = renameElement(el, newName, true);
}
// if the source element has no child elements but has text, copy the text
String text = textOnly(el);
if (!text.equals("")) copy.appendChild(inResultDoc.createTextNode(text));
return copy;
}
//----------------------------------------------------------------------------------------
// Out-wrapper transform
//----------------------------------------------------------------------------------------
@Override
public Object transformOut(Element outgoing) throws MapperException
{
String mappingRootPath = "/Envelope";
outResultDoc = XMLUtil.makeOutDoc();
Element outRoot = scanDocument(outgoing, mappingRootPath, AbstractMapperWrapper.OUT_TRANSFORM);
outResultDoc.appendChild(outRoot);
return outResultDoc;
}
/**
* default behaviour is a shallow copy - copying the element name, attributes,
* and text content only if the element has no child elements.
* to be overridden for specific paths in implementing classes
*/
protected Element outTransformNode(Element el, String path) throws MapperException
{
// copy the element with namespaces, prefixed tag name, attributes but no text or child Elements
Element copy = (Element)outResultDoc.importNode(el, false);
// convert specific types of <FaceCompletedItem_XX> back to plain <FACECompletedItem>
if (XMLUtil.getLocalName(el).startsWith("FaceCompletedItem"))
{
copy = renameElement(el,"FaceCompletedItem",false);
}
// if the source element has no child elements but has text, copy the text
String text = textOnly(el);
if (!text.equals("")) copy.appendChild(outResultDoc.createTextNode(text));
return copy;
}
/**
* copy an element and all its attributes to the new document, renaming it
* and putting it in no namespace.
* @param el
* @param newName
* @param isIn true for the in-transform, false for the out-transform
* @return
* @throws MapperException
*/
protected Element renameElement(Element el, String newName, boolean isIn) throws MapperException
{
Element newEl = null;
if (isIn) newEl = inResultDoc.createElementNS(FACE_URI, newName);
else if (!isIn) newEl = outResultDoc.createElementNS(FACE_URI, newName);
// set all attributes of the constrained element, including namespace attributes
for (int a = 0; a < el.getAttributes().getLength();a++)
{
Attr at = (Attr)el.getAttributes().item(a);
newEl.setAttribute(at.getName(), at.getValue());
}
return newEl;
}
//--------------------------------------------------------------------------------------------------------------
// XSLT Wrapper Transforms
//--------------------------------------------------------------------------------------------------------------
/**
* @param xout XSLT output being made
* @param templateFilter a filter on the templates, implemented by XSLGeneratorImpl
* append the templates and variables to be included in the XSL
* to do the full transformation, to apply the wrapper transform in the 'in' direction.
* Templates must have mode = "inWrapper"
*/
public void addWrapperInTemplates(XSLOutputFile xout, TemplateFilter templateFilter) throws MapperException
{
// see class AbstractMapperWrapper - adds a plain identity template
super.addWrapperInTemplates(xout, templateFilter);
// add the FACE namespace
xout.topOut().setAttribute("xmlns:" + FACE_PREFIX, FACE_URI);
for (Iterator<ElementDef> it = findFACEItemsElementDefs(ms()).iterator();it.hasNext();)
{
ElementDef FACEItem = it.next();
String tagName = FACEItem.getName();
if (tagName.startsWith("FaceCompletedItem_"))
{
String questionId = tagName.substring("FaceCompletedItem_".length());
addInTemplate(xout,tagName,questionId);
}
}
}
/**
* @param xout XSLT output being made
* @param templateFilter a filter on the templates to be included, implemented by XSLGeneratorImpl
* append the templates and variables to be included in the XSL
* to do the full transformation, to apply the wrapper transform in the 'out' direction.
* Templates must have mode = "outWrapper"
* @throws MapperException
*/
public void addWrapperOutTemplates(XSLOutputFile xout, TemplateFilter templateFilter) throws MapperException
{
// see class AbstractMapperWrapper - adds a plain identity template
super.addWrapperOutTemplates(xout, templateFilter);
// add the FACE namespace
xout.topOut().setAttribute("xmlns:" + FACE_PREFIX, FACE_URI);
for (Iterator<ElementDef> it = findFACEItemsElementDefs(ms()).iterator();it.hasNext();)
{
ElementDef FACEItem = it.next();
String tagName = FACEItem.getName();
if (tagName.startsWith("FaceCompletedItem_"))
{
String questionId = tagName.substring("FaceCompletedItem_".length());
addOutTemplate(xout,tagName,questionId);
}
}
}
/**
* add an in-wrapper template of the form
<xsl:template match="face:FaceCompletedItem[face:QuestionId='F14_14_46_11_15_33T61_38']" mode="inWrapper">
<face:FaceCompletedItem_F14_14_46_11_15_33T61_38>
<xsl:copy-of select="@*"/>
<xsl:apply-templates mode="inWrapper"/>
</face:FaceCompletedItem_F14_14_46_11_15_33T61_38>
</xsl:template>
* @param xout
* @param tagName
* @param questionId
*/
private void addInTemplate(XSLOutputFile xout,String tagName,String questionId) throws MapperException
{
<|fim▁hole|> tempEl.setAttribute("match", FACE_PREFIX + ":FaceCompletedItem[" + FACE_PREFIX + ":QuestionId='" + questionId + "']");
tempEl.setAttribute("mode", "inWrapper");
Element FACEEl = xout.NSElement(FACE_PREFIX, tagName, FACE_URI);
tempEl.appendChild(FACEEl);
addApplyChildren(xout,FACEEl,"inWrapper");
xout.topOut().appendChild(tempEl);
}
/**
* add an out-wrapper template of the form
<xsl:template match="face:FaceCompletedItem_F14_14_46_11_15_33T61_38" mode="outWrapper">
<face:FaceCompletedItem>
<xsl:copy-of select="@*"/>
<xsl:apply-templates mode="outWrapper"/>
</face:FaceCompletedItem>
</xsl:template>
* @param xout
* @param tagName
* @param questionId
*/
private void addOutTemplate(XSLOutputFile xout,String tagName,String questionId) throws MapperException
{
Element tempEl = xout.XSLElement("template");
tempEl.setAttribute("match", FACE_PREFIX + ":" + tagName);
tempEl.setAttribute("mode", "outWrapper");
Element FACEEl = xout.NSElement(FACE_PREFIX, "FaceCompletedItem", FACE_URI);
tempEl.appendChild(FACEEl);
addApplyChildren(xout,FACEEl,"outWrapper");
xout.topOut().appendChild(tempEl);
}
/**
* add two child nodes to a template to carry on copying down the tree
* @param xout
* @param FACEEl
* @param mode
* @throws MapperException
*/
private void addApplyChildren(XSLOutputFile xout,Element FACEEl, String mode) throws MapperException
{
Element copyOfEl = xout.XSLElement("copy-of");
copyOfEl.setAttribute("select", "@*");
FACEEl.appendChild(copyOfEl);
Element applyEl = xout.XSLElement("apply-templates");
applyEl.setAttribute("mode", mode);
FACEEl.appendChild(applyEl);
}
/**
*
* @param mappedStructure
* @return a lit of nodes in the mapping set which are children of the 'Items' node
* @throws MapperException
*/
public static EList<ElementDef> findFACEItemsElementDefs(MappedStructure mappedStructure) throws MapperException
{
ElementDef msRoot = mappedStructure.getRootElement();
if (msRoot == null) throw new MapperException("No root element in mapping set");
if (!msRoot.getName().equals("GetIndicativeBudget"))
throw new MapperException("Root Element of mapping set must be called 'GetIndicativeBudget'");
// there must be a chain of child ElementDefs with the names below; throw an exception if not
ElementDef payload = findChildElementDef(msRoot,"payload");
ElementDef items = findChildElementDef(payload,"Items");
return items.getChildElements();
}
/**
*
* @param parent
* @param childName
* @return the child ElementDef with given name
* @throws MapperException if it does not exist
*/
private static ElementDef findChildElementDef(ElementDef parent, String childName) throws MapperException
{
ElementDef child = parent.getNamedChildElement(childName);
if (child == null) throw new MapperException("Mapping set node '" + parent.getName() + "' has no child '" + childName + "'");
return child;
}
}<|fim▁end|>
|
Element tempEl = xout.XSLElement("template");
|
<|file_name|>tabnanny.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3
"""The Tab Nanny despises ambiguous indentation. She knows no mercy.
tabnanny -- Detection of ambiguous indentation
For the time being this module is intended to be called as a script.
However it is possible to import it into an IDE and use the function
check() described below.
Warning: The API provided by this module is likely to change in future
releases; such changes may not be backward compatible.
"""
# Released to the public domain, by Tim Peters, 15 April 1998.
# XXX Note: this is now a standard library module.
# XXX The API needs to undergo changes however; the current code is too
# XXX script-like. This will be addressed later.
__version__ = "6"
import os
import sys
import getopt
import tokenize
if not hasattr(tokenize, 'NL'):
raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
__all__ = ["check", "NannyNag", "process_tokens"]
verbose = 0
filename_only = 0
def errprint(*args):
sep = ""
for arg in args:
sys.stderr.write(sep + str(arg))
sep = " "
sys.stderr.write("\n")
def main():
global verbose, filename_only
try:
opts, args = getopt.getopt(sys.argv[1:], "qv")
except getopt.error as msg:
errprint(msg)
return
for o, a in opts:
if o == '-q':
filename_only = filename_only + 1
if o == '-v':
verbose = verbose + 1
if not args:
errprint("Usage:", sys.argv[0], "[-v] file_or_directory ...")
return
for arg in args:
check(arg)
class NannyNag(Exception):
"""
Raised by tokeneater() if detecting an ambiguous indent.
Captured and handled in check().
"""
def __init__(self, lineno, msg, line):
self.lineno, self.msg, self.line = lineno, msg, line
def get_lineno(self):
return self.lineno
def get_msg(self):
return self.msg
def get_line(self):
return self.line
def check(file):
"""check(file_or_dir)
If file_or_dir is a directory and not a symbolic link, then recursively
descend the directory tree named by file_or_dir, checking all .py files
along the way. If file_or_dir is an ordinary Python source file, it is
checked for whitespace related problems. The diagnostic messages are
written to standard output using the print statement.
"""
if os.path.isdir(file) and not os.path.islink(file):
if verbose:
print("%r: listing directory" % (file,))
names = os.listdir(file)
for name in names:
fullname = os.path.join(file, name)
if (os.path.isdir(fullname) and
not os.path.islink(fullname) or
os.path.normcase(name[-3:]) == ".py"):
check(fullname)
return
try:
f = tokenize.open(file)
except OSError as msg:
errprint("%r: I/O Error: %s" % (file, msg))
return
if verbose > 1:
print("checking %r ..." % file)
try:
process_tokens(tokenize.generate_tokens(f.readline))
except tokenize.TokenError as msg:
errprint("%r: Token Error: %s" % (file, msg))
return
except IndentationError as msg:
errprint("%r: Indentation Error: %s" % (file, msg))
return
except NannyNag as nag:
badline = nag.get_lineno()
line = nag.get_line()
if verbose:
print("%r: *** Line %d: trouble in tab city! ***" % (file, badline))
print("offending line: %r" % (line,))
print(nag.get_msg())
else:
if ' ' in file: file = '"' + file + '"'
if filename_only: print(file)
else: print(file, badline, repr(line))
return
finally:
f.close()
if verbose:
print("%r: Clean bill of health." % (file,))
class Whitespace:
# the characters used for space and tab
S, T = ' \t'
# members:
# raw
# the original string
# n
# the number of leading whitespace characters in raw
# nt
# the number of tabs in raw[:n]
# norm
# the normal form as a pair (count, trailing), where:
# count
# a tuple such that raw[:n] contains count[i]
# instances of S * i + T
# trailing
# the number of trailing spaces in raw[:n]
# It's A Theorem that m.indent_level(t) ==
# n.indent_level(t) for all t >= 1 iff m.norm == n.norm.
# is_simple
# true iff raw[:n] is of the form (T*)(S*)
def __init__(self, ws):
self.raw = ws
S, T = Whitespace.S, Whitespace.T
count = []
b = n = nt = 0
for ch in self.raw:
if ch == S:
n = n + 1
b = b + 1
elif ch == T:
n = n + 1
nt = nt + 1
if b >= len(count):
count = count + [0] * (b - len(count) + 1)
count[b] = count[b] + 1
b = 0
else:
break
self.n = n
self.nt = nt
self.norm = tuple(count), b
self.is_simple = len(count) <= 1
# return length of longest contiguous run of spaces (whether or not
# preceding a tab)
def longest_run_of_spaces(self):
count, trailing = self.norm
return max(len(count)-1, trailing)
def indent_level(self, tabsize):
# count, il = self.norm
# for i in range(len(count)):
# if count[i]:
# il = il + (i//tabsize + 1)*tabsize * count[i]
# return il
# quicker:
# il = trailing + sum (i//ts + 1)*ts*count[i] =
# trailing + ts * sum (i//ts + 1)*count[i] =
# trailing + ts * sum i//ts*count[i] + count[i] =
# trailing + ts * [(sum i//ts*count[i]) + (sum count[i])] =
# trailing + ts * [(sum i//ts*count[i]) + num_tabs]
# and note that i//ts*count[i] is 0 when i < ts
count, trailing = self.norm
il = 0
for i in range(tabsize, len(count)):
il = il + i//tabsize * count[i]
return trailing + tabsize * (il + self.nt)
# return true iff self.indent_level(t) == other.indent_level(t)
# for all t >= 1
def equal(self, other):
return self.norm == other.norm
# return a list of tuples (ts, i1, i2) such that
# i1 == self.indent_level(ts) != other.indent_level(ts) == i2.
# Intended to be used after not self.equal(other) is known, in which
# case it will return at least one witnessing tab size.
def not_equal_witness(self, other):
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
a = []
for ts in range(1, n+1):
if self.indent_level(ts) != other.indent_level(ts):
a.append( (ts,
self.indent_level(ts),
other.indent_level(ts)) )
return a
# Return True iff self.indent_level(t) < other.indent_level(t)
# for all t >= 1.
# The algorithm is due to Vincent Broman.
# Easy to prove it's correct.
# XXXpost that.
# Trivial to prove n is sharp (consider T vs ST).
# Unknown whether there's a faster general way. I suspected so at
# first, but no longer.
# For the special (but common!) case where M and N are both of the
# form (T*)(S*), M.less(N) iff M.len() < N.len() and
# M.num_tabs() <= N.num_tabs(). Proof is easy but kinda long-winded.
# XXXwrite that up.
# Note that M is of the form (T*)(S*) iff len(M.norm[0]) <= 1.
def less(self, other):
if self.n >= other.n:
return False
if self.is_simple and other.is_simple:
return self.nt <= other.nt
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
# the self.n >= other.n test already did it for ts=1
for ts in range(2, n+1):
if self.indent_level(ts) >= other.indent_level(ts):
return False
return True
# return a list of tuples (ts, i1, i2) such that
# i1 == self.indent_level(ts) >= other.indent_level(ts) == i2.
# Intended to be used after not self.less(other) is known, in which
# case it will return at least one witnessing tab size.
def not_less_witness(self, other):
n = max(self.longest_run_of_spaces(),
other.longest_run_of_spaces()) + 1
a = []
for ts in range(1, n+1):
if self.indent_level(ts) >= other.indent_level(ts):
a.append( (ts,
<|fim▁hole|> self.indent_level(ts),
other.indent_level(ts)) )
return a
def format_witnesses(w):
firsts = (str(tup[0]) for tup in w)
prefix = "at tab size"
if len(w) > 1:
prefix = prefix + "s"
return prefix + " " + ', '.join(firsts)
def process_tokens(tokens):
INDENT = tokenize.INDENT
DEDENT = tokenize.DEDENT
NEWLINE = tokenize.NEWLINE
JUNK = tokenize.COMMENT, tokenize.NL
indents = [Whitespace("")]
check_equal = 0
for (type, token, start, end, line) in tokens:
if type == NEWLINE:
# a program statement, or ENDMARKER, will eventually follow,
# after some (possibly empty) run of tokens of the form
# (NL | COMMENT)* (INDENT | DEDENT+)?
# If an INDENT appears, setting check_equal is wrong, and will
# be undone when we see the INDENT.
check_equal = 1
elif type == INDENT:
check_equal = 0
thisguy = Whitespace(token)
if not indents[-1].less(thisguy):
witness = indents[-1].not_less_witness(thisguy)
msg = "indent not greater e.g. " + format_witnesses(witness)
raise NannyNag(start[0], msg, line)
indents.append(thisguy)
elif type == DEDENT:
# there's nothing we need to check here! what's important is
# that when the run of DEDENTs ends, the indentation of the
# program statement (or ENDMARKER) that triggered the run is
# equal to what's left at the top of the indents stack
# Ouch! This assert triggers if the last line of the source
# is indented *and* lacks a newline -- then DEDENTs pop out
# of thin air.
# assert check_equal # else no earlier NEWLINE, or an earlier INDENT
check_equal = 1
del indents[-1]
elif check_equal and type not in JUNK:
# this is the first "real token" following a NEWLINE, so it
# must be the first token of the next program statement, or an
# ENDMARKER; the "line" argument exposes the leading whitespace
# for this statement; in the case of ENDMARKER, line is an empty
# string, so will properly match the empty string with which the
# "indents" stack was seeded
check_equal = 0
thisguy = Whitespace(line)
if not indents[-1].equal(thisguy):
witness = indents[-1].not_equal_witness(thisguy)
msg = "indent not equal e.g. " + format_witnesses(witness)
raise NannyNag(start[0], msg, line)
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>representation.go<|end_file_name|><|fim▁begin|>package rest
import (
"fmt"
"io"
"net/http"
"github.com/tliron/kutil/ard"
"github.com/tliron/kutil/js"
"github.com/tliron/prudence/platform"
)
//
// RepresentationFunc
//
type RepresentationFunc func(context *Context) error
func NewRepresentationFunc(function interface{}, jsContext *js.Context) (RepresentationFunc, error) {
// Unbind if necessary
functionContext := jsContext
if bind, ok := function.(js.Bind); ok {
var err error
if function, functionContext, err = bind.Unbind(); err != nil {
return nil, err
}
}
if function_, ok := function.(js.JavaScriptFunc); ok {
return func(context *Context) error {
functionContext.Environment.Call(function_, context)
return nil
}, nil
} else {
return nil, fmt.Errorf("not a JavaScript function: %T", function)
}
}
//
// Represention
//
type Representation struct {
Construct RepresentationFunc
Describe RepresentationFunc
Present RepresentationFunc
Erase RepresentationFunc
Modify RepresentationFunc
Call RepresentationFunc
}
func CreateRepresentation(node *ard.Node, context *js.Context) (*Representation, error) {
//panic(fmt.Sprintf("%v", node.Data))
var self Representation
var functions *ard.Node
functionsContext := context
if functions = node.Get("functions"); functions.Data != nil {
// Unbind "functions" property if necessary
if bind, ok := functions.Data.(js.Bind); ok {
var err error
if functions.Data, functionsContext, err = bind.Unbind(); err != nil {
return nil, err
}
}
}
getFunction := func(name string) (RepresentationFunc, error) {
if functions.Data != nil {
// Try "functions" property
if function := functions.Get(name).Data; function != nil {
return NewRepresentationFunc(function, functionsContext)
}
}
// Try individual function properties
if function := node.Get(name).Data; function != nil {
return NewRepresentationFunc(function, context)
}
return nil, nil
}
var err error
if self.Construct, err = getFunction("construct"); err != nil {
return nil, err
}
if self.Describe, err = getFunction("describe"); err != nil {
return nil, err
}
if self.Present, err = getFunction("present"); err != nil {
return nil, err
}
if self.Erase, err = getFunction("erase"); err != nil {
return nil, err
}
if self.Modify, err = getFunction("modify"); err != nil {
return nil, err
}
if self.Call, err = getFunction("call"); err != nil {
return nil, err
}
return &self, nil
}
// Handler interface
// HandleFunc signature
func (self *Representation) Handle(context *Context) bool {
context.Response.CharSet = "utf-8"
switch context.Request.Method {
case "GET":
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/GET
if self.construct(context) {
if self.tryCache(context, true) {
if self.describe(context) {
self.present(context, true)
}
}
}
case "HEAD":
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/HEAD
// Avoid wasting resources on writing
context.writer = io.Discard
if self.construct(context) {
if self.tryCache(context, false) {
if self.describe(context) {
self.present(context, false)
}
}
}
case "DELETE":
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/DELETE
if self.construct(context) {
self.erase(context)
}
case "PUT":
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/PUT
if self.construct(context) {
self.modify(context)
}
case "POST":
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST
if self.construct(context) {
self.call(context)
}
}
return context.Response.Status != http.StatusNotFound
}
func (self *Representation) construct(context *Context) bool {
context.CacheKey = context.Path
if self.Construct != nil {
if err := self.Construct(context); err != nil {
context.InternalServerError(err)
return false
}
}
return true
}
func (self *Representation) tryCache(context *Context, withBody bool) bool {
if context.CacheKey != "" {
if key, cached, ok := context.LoadCachedRepresentation(); ok {
if withBody && (len(cached.Body) == 0) {
// The cache entry was likely created by a previous HEAD request
context.Log.Debugf("ignoring cached representation because it has no body: %s", context.Path)
} else {
if changed := context.PresentCachedRepresentation(cached, withBody); changed {
cached.Update(key)
}
return false
}
}
}
return true
}
func (self *Representation) describe(context *Context) bool {
if self.Describe != nil {
if err := self.Describe(context); err != nil {
context.InternalServerError(err)
return false
}
if context.isNotModified(false) {
return false
}
}
return true
}
func (self *Representation) present(context *Context, withBody bool) {
if withBody {
// Encoding
SetBestEncodeWriter(context)
// Present
if self.Present != nil {
if err := self.Present(context); err != nil {
context.InternalServerError(err)
return
}
}
if context.isNotModified(false) {
return
}
}
context.flushWriters()
// Headers
context.Response.setContentType()
context.Response.setETag()
context.Response.setLastModified()
context.setCacheControl()
if (context.CacheDuration > 0.0) && (context.CacheKey != "") {
context.StoreCachedRepresentation(withBody)
}
}
func (self *Representation) erase(context *Context) {
if self.Erase != nil {
if err := self.Erase(context); err != nil {
context.InternalServerError(err)
return
}
if context.Done {
if context.Async {
// Will be erased later
context.Response.Status = http.StatusAccepted // 202
} else if context.Response.Buffer.Len() > 0 {
// Erased, has response
context.Response.Status = http.StatusOK // 200
} else {
// Erased, no response
context.Response.Status = http.StatusNoContent // 204
}
if context.CacheKey != "" {
context.DeleteCachedRepresentation()
}
} else {
context.Response.Status = http.StatusNotFound // 404
}
} else {
context.Response.Status = http.StatusMethodNotAllowed // 405
}
}
func (self *Representation) modify(context *Context) {
if self.Modify != nil {
if err := self.Modify(context); err != nil {
context.InternalServerError(err)
return
}
if context.Done {
if context.Created {
// Created
context.Response.Status = http.StatusCreated // 201
} else if context.Response.Buffer.Len() > 0 {
// Changed, has response
context.Response.Status = http.StatusOK // 200
} else {
// Changed, no response
context.Response.Status = http.StatusNoContent // 204
}
if (context.CacheDuration > 0.0) && (context.CacheKey != "") {
context.StoreCachedRepresentation(true)
}
} else {
context.Response.Status = http.StatusNotFound // 404
}
} else {
context.Response.Status = http.StatusMethodNotAllowed // 405
}
}
func (self *Representation) call(context *Context) {
if self.Call != nil {
if err := self.Call(context); err != nil {
context.InternalServerError(err)
return
}
}
}
//
// Representations
//
type RepresentationEntry struct {
Representation *Representation
ContentType ContentType
Language Language
}
type Representations struct {
Entries []*RepresentationEntry
}
func CreateRepresentations(config ard.Value, context *js.Context) (*Representations, error) {
var self Representations
for _, representation := range platform.AsConfigList(config) {
representation_ := ard.NewNode(representation)
if representation__, err := CreateRepresentation(representation_, context); err == nil {
contentTypes := platform.AsStringList(representation_.Get("contentTypes").Data)
languages := platform.AsStringList(representation_.Get("languages").Data)
self.Add(contentTypes, languages, representation__)
} else {
return nil, err
}
}
return &self, nil
}
func (self *Representations) Add(contentTypes []string, languages []string, representation *Representation) {
if len(contentTypes) == 0 {
contentTypes = []string{""}
}
if len(languages) == 0 {
languages = []string{""}
}
// The order signifies the *server* matching preferences
for _, contentType := range contentTypes {
contentType_ := NewContentType(contentType)
for _, language := range languages {
self.Entries = append(self.Entries, &RepresentationEntry{
Representation: representation,<|fim▁hole|> })
}
}
}
func (self *Representations) NegotiateBest(context *Context) (*Representation, string, string, bool) {
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Content_negotiation
contentTypePreferences := ParseContentTypePreferences(context.Request.Header.Get(HeaderAccept))
languagePreferences := ParseLanguagePreferences(context.Request.Header.Get(HeaderAcceptLanguage))
if len(languagePreferences) > 0 {
// Try exact match of contentType and language
for _, contentTypePreference := range contentTypePreferences {
for _, languagePreference := range languagePreferences {
for _, entry := range self.Entries {
if contentTypePreference.Matches(entry.ContentType) && languagePreference.Matches(entry.Language, false) {
return entry.Representation, entry.ContentType.Name, entry.Language.Name, true
}
}
}
}
// Try exact match of contentType and soft match of language
for _, contentTypePreference := range contentTypePreferences {
for _, languagePreference := range languagePreferences {
for _, entry := range self.Entries {
if contentTypePreference.Matches(entry.ContentType) && languagePreference.Matches(entry.Language, true) {
return entry.Representation, entry.ContentType.Name, entry.Language.Name, true
}
}
}
}
}
// Try exact match of contentType
for _, contentTypePreference := range contentTypePreferences {
for _, entry := range self.Entries {
if contentTypePreference.Matches(entry.ContentType) {
return entry.Representation, entry.ContentType.Name, entry.Language.Name, true
}
}
}
// Try default representation (no contentType)
for _, entry := range self.Entries {
if entry.ContentType.Name == "" {
return entry.Representation, "", "", true
}
}
// Just pick the first one
for _, entry := range self.Entries {
return entry.Representation, entry.ContentType.Name, entry.Language.Name, true
}
return nil, "", "", false
}<|fim▁end|>
|
ContentType: contentType_,
Language: NewLanguage(language),
|
<|file_name|>test_character_sets.py<|end_file_name|><|fim▁begin|># Copyright 2015 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
<|fim▁hole|>from f5.sdk_exception import UnsupportedOperation
import mock
import pytest
@pytest.fixture
def FakeChar():
fake_policy = mock.MagicMock()
fake_e = Character_Sets(fake_policy)
fake_e._meta_data['bigip'].tmos_version = '11.6.0'
return fake_e
class TestCharacterSets(object):
def test_create_raises(self, FakeChar):
with pytest.raises(UnsupportedOperation):
FakeChar.create()
def test_delete_raises(self, FakeChar):
with pytest.raises(UnsupportedOperation):
FakeChar.delete()<|fim▁end|>
|
from f5.bigip.tm.asm.policies.character_sets import Character_Sets
|
<|file_name|>ComplexLine.py<|end_file_name|><|fim▁begin|>#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
# This file is part of the E-Cell System
#
# Copyright (C) 1996-2016 Keio University
# Copyright (C) 2008-2016 RIKEN
# Copyright (C) 2005-2009 The Molecular Sciences Institute
#
#::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
#
#
# E-Cell System is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# E-Cell System is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with E-Cell System -- see the file COPYING.
# If not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#END_HEADER
try:
import gnomecanvas
except:
import gnome.canvas as gnomecanvas
from ecell.ui.model_editor.Constants import *
from ecell.ui.model_editor.Utils import *
from ecell.ui.model_editor.ResizeableText import *
class ComplexLine:
def __init__( self, anObject, aCanvas ):
self.theCanvas = aCanvas
self.parentObject = anObject
self.graphUtils = self.parentObject.getGraphUtils()
self.shapeMap = {}
self.lastmousex = 0
self.lastmousey = 0
self.buttonpressed = False
self.firstdrag=False
def show ( self ):
self.theRoot = self.parentObject.theCanvas.getRoot()
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).reCalculate()
self.__sortByZOrder( self.shapeDescriptorList )
self.isSelected = False
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[SD_TYPE] == CV_TEXT:
self.createText( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_LINE:
self.createLine( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_BPATH:
self.createBpath( aDescriptor )
self.isSelected = False
def repaint ( self ):
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).reCalculate()
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.__sortByZOrder( self.shapeDescriptorList )
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[SD_TYPE] == CV_TEXT:
self.redrawText( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_LINE:
self.redrawLine( aDescriptor )
elif aDescriptor[SD_TYPE] == CV_BPATH:
self.redrawBpath( aDescriptor )
def reName( self ):
self.shapeDescriptorList = self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptorList()
self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).renameLabel( self.parentObject.getProperty( CO_NAME ) )
aDescriptor = self.shapeDescriptorList["textbox"]
self.renameText( aDescriptor )
def delete( self ):
for aShapeName in self.shapeMap.keys():
self.shapeMap[ aShapeName ].destroy()
def selected( self ):
self.isSelected = True
def unselected( self ):
self.isSelected = False
def outlineColorChanged( self ):
self.fillColorChanged()
def fillColorChanged( self ):
# find shapes with outline color
anRGB = copyValue( self.parentObject.getProperty( OB_FILL_COLOR ) )
if self.isSelected:
for i in range(0,3):
anRGB[i] = 32768 + anRGB[i]
for aKey in self.shapeDescriptorList.keys():
aDescriptor = self.shapeDescriptorList[aKey]
if aDescriptor[ SD_COLOR ] == SD_FILL:
aColor = self.graphUtils.getGdkColorByRGB( anRGB )
if aDescriptor[SD_TYPE] in CV_LINE:
self.changeLineColor( aDescriptor[ SD_NAME ] , aColor )
elif aDescriptor[SD_TYPE] in CV_BPATH:
self.changeLineColorB( aDescriptor[ SD_NAME ] , aColor )
def createBpath(self, aDescriptor):
aSpecific= aDescriptor[SD_SPECIFIC]
# get pathdef
pathdef= aSpecific[BPATH_PATHDEF]
pd = gnomecanvas.path_def_new(pathdef)
aGdkColor = self.getGdkColor( aDescriptor )
#cheCk: 1starg > the Bpath, 2ndarg > Bpath width(def 3), 3rdarg > Color of Bpath(def black)
bpath = self.theRoot.add(gnomecanvas.CanvasBpath, width_units=3,
outline_color_gdk = aGdkColor)
bpath.set_bpath(pd)
self.addHandlers( bpath, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = bpath
#cheCk: createLine is in charge of the Simple Line, displaying it width, colour ..blabla..
#regardless of whether it is the arrowheads or the middle stuffs (MS), it creates all
#but, if the MS is a bpath (eg. curvedLineSD) it will overwrite the middle line, I THINK OLI
def createLine( self, aDescriptor ):
lineSpec = aDescriptor[SD_SPECIFIC]
( X1, X2, Y1, Y2 ) = [lineSpec[0], lineSpec[2], lineSpec[1], lineSpec[3] ]
aGdkColor = self.getGdkColor( aDescriptor )
firstArrow = lineSpec[4]
secondArrow = lineSpec[5]
aLine = self.theRoot.add( gnomecanvas.CanvasLine,points=[X1,Y1,X2,Y2], width_units=lineSpec[ 6 ], fill_color_gdk = aGdkColor, first_arrowhead = firstArrow, last_arrowhead = secondArrow,arrow_shape_a=5, arrow_shape_b=5, arrow_shape_c=5 )
self.addHandlers( aLine, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = aLine
def changeLineColor ( self, shapeName, aColor ):
aShape = self.shapeMap[ shapeName ]
aShape.set_property('fill_color_gdk', aColor )
def changeLineColorB ( self, shapeName, aColor ):
aShape = self.shapeMap[ shapeName ]
aShape.set_property('outline_color_gdk', aColor )
def createText( self, aDescriptor ):
textSpec = aDescriptor[SD_SPECIFIC]
(X1, Y1) = ( textSpec[TEXT_ABSX], textSpec[TEXT_ABSY] )
aGdkColor = self.getGdkColor( aDescriptor )
aText = ResizeableText( self.theRoot, self.theCanvas, X1, Y1, aGdkColor, textSpec[TEXT_TEXT], gtk.ANCHOR_NW )
self.addHandlers( aText, aDescriptor[ SD_NAME ] )
self.shapeMap[ aDescriptor[ SD_NAME ] ] = aText
def redrawLine( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
aSpecific = aDescriptor[ SD_SPECIFIC ]
x1 = aSpecific[0]
y1 = aSpecific[1]
x2 = aSpecific[2]
y2 = aSpecific[3]
hasFirstArrow = aSpecific[4]
hasLastArrow = aSpecific[5]
aShape.set_property( 'points', (x1, y1, x2, y2) )
aShape.set_property('first_arrowhead', hasFirstArrow )
aShape.set_property('last_arrowhead', hasLastArrow )
def redrawBpath( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
pathdef = aDescriptor[ SD_SPECIFIC ][BPATH_PATHDEF]
pd=gnomecanvas.path_def_new(pathdef)
aShape.set_bpath(pd)
def redrawText( self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]<|fim▁hole|> x = aSpecific[TEXT_ABSX]
y = aSpecific[TEXT_ABSY]
aShape.set_property( 'x', x )
aShape.set_property( 'y', y )
def renameText (self, aDescriptor ):
aShape = self.shapeMap[ aDescriptor[ SD_NAME ] ]
aSpecific = aDescriptor[ SD_SPECIFIC ]
label = aSpecific[ TEXT_TEXT ]
aShape.set_property( 'text', label )
def getGdkColor( self, aDescriptor ):
aColorType = aDescriptor[ SD_COLOR ]
if aColorType == SD_FILL:
queryProp = OB_FILL_COLOR
elif aColorType == CV_TEXT:
queryProp = OB_TEXT_COLOR
anRGBColor = self.parentObject.getProperty( queryProp )
return self.graphUtils.getGdkColorByRGB( anRGBColor )
def __sortByZOrder ( self, desclist ):
keys = desclist.keys()
fn = lambda x, y: ( x[SD_Z] < y[SD_Z] ) - ( y[SD_Z] < x[SD_Z] )
keys.sort(fn)
def leftClick( self, shapeName, x, y, shift_pressed = False ):
# usually select
self.parentObject.doSelect( shift_pressed )
if self.getShapeDescriptor(shapeName)[SD_FUNCTION] == SD_ARROWHEAD:
self.changeCursor( shapeName, x, y, True )
def rightClick ( self, shapeName, x, y, anEvent, shift ):
# usually show menu
if not self.parentObject.isSelected:
self.parentObject.doSelect( shift )
self.parentObject.showMenu( anEvent)
def getFirstDrag(self):
return self.firstdrag
def setFirstDrag(self,aValue):
self.firstdrag=aValue
def mouseDrag( self, shapeName, deltax, deltay, origx, origy ):
# decide whether resize or move or draw arrow
if self.getShapeDescriptor(shapeName)[SD_FUNCTION] == SD_MOVINGLINE:
'''
if shapeName == SHAPE_TYPE_MULTIBCURVE_LINE:
self.parentObject.getArrowType(SHAPE_TYPE_MULTIBCURVE_LINE)
#Accessing BPATH_DEF now, the coords like above
bpathDefcheCk = self.parentObject.theSD.theDescriptorList[SHAPE_TYPE_MULTIBCURVE_LINE][SD_SPECIFIC][BPATH_PATHDEF]
self.parentObject.thePropertyMap[CO_CONTROL_POINTS] = bpathDefcheCk
bpathDefcheCk[1][1] = deltax
bpathDefcheCk[1][2] = deltay
bpathDefcheCk[1][3] = deltax
bpathDefcheCk[1][4] = deltay
bpathDefcheCk[2][1] = deltax
bpathDefcheCk[2][2] = deltay
bpathDefcheCk[2][3] = deltax
bpathDefcheCk[2][4] = deltay
#bpathDefcheCk[2][1,2,3,4] = [deltax,deltay,deltax,deltay]
'''
elif self.getShapeDescriptor(shapeName)[SD_FUNCTION] == SD_ARROWHEAD:
if not self.firstdrag:
self.firstdrag=True
self.parentObject.arrowheadDragged( shapeName,deltax, deltay, origx, origy)
def checkConnection( self ):
self.parentObject.checkConnection()
def doubleClick( self, shapeName ):
self.parentObject.popupEditor()
def getShapeDescriptor( self, shapeName ):
return self.parentObject.getProperty( OB_SHAPEDESCRIPTORLIST ).getDescriptor( shapeName )
def addHandlers( self, canvasObject, aName ):
canvasObject.connect('event', self.rect_event, aName )
def releaseButton( self, shapeName, x, y ):
self.changeCursor( shapeName, x, y, False )
self.parentObject.mouseReleased( shapeName,x, y)
def mouseEntered( self, shapeName, x, y ):
self.changeCursor( shapeName, x, y )
def changeCursor( self, shapeName, x, y, buttonpressed = False):
aFunction = self.getShapeDescriptor(shapeName)[SD_FUNCTION]
aCursorType = self.parentObject.getCursorType( aFunction, x, y , buttonpressed)
self.theCanvas.setCursor( aCursorType )
def rect_event( self, *args ):
event = args[1]
item = args[0]
shapeName = args[2]
if event.type == gtk.gdk.BUTTON_PRESS:
if event.state>k.gdk.SHIFT_MASK == gtk.gdk.SHIFT_MASK:
shift_press = True
else:
shift_press = False
if event.button == 1:
self.lastmousex = event.x
self.lastmousey = event.y
self.buttonpressed = True
self.leftClick( shapeName, event.x, event.y, shift_press )
elif event.button == 3:
self.rightClick(shapeName, event.x, event.y, event, shift_press )
elif event.type == gtk.gdk.BUTTON_RELEASE:
if event.button == 1:
self.buttonpressed = False
self.releaseButton(shapeName, event.x, event.y )
elif event.type == gtk.gdk.MOTION_NOTIFY:
self.buttonpressed=(event.state>k.gdk.BUTTON1_MASK)>0
if not self.buttonpressed:
return
oldx = self.lastmousex
oldy = self.lastmousey
deltax = event.x - oldx
deltay = event.y - oldy
self.lastmousex = event.x
self.lastmousey = event.y
self.mouseDrag( shapeName, deltax, deltay, oldx, oldy )
elif event.type == gtk.gdk._2BUTTON_PRESS:
if event.button == 1:
self.doubleClick( shapeName )
elif event.type == gtk.gdk.ENTER_NOTIFY:
self.mouseEntered( shapeName, event.x, event.y )<|fim▁end|>
|
aSpecific = aDescriptor[ SD_SPECIFIC ]
|
<|file_name|>AirshipsBlock.java<|end_file_name|><|fim▁begin|>package com.octagon.airships.block;
import com.octagon.airships.reference.Reference;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.block.Block;
import net.minecraft.block.BlockContainer;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
public abstract class AirshipsBlock extends Block {
public AirshipsBlock(Material material) {
super(material);
}
public AirshipsBlock() {
this(Material.rock);
}
@Override
public String getUnlocalizedName()
{
return String.format("tile.%s%s", Reference.MOD_ID.toLowerCase() + ":", getUnwrappedUnlocalizedName(super.getUnlocalizedName()));
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister iconRegister)<|fim▁hole|> {
blockIcon = iconRegister.registerIcon(String.format("%s", getUnwrappedUnlocalizedName(this.getUnlocalizedName())));
}
protected String getUnwrappedUnlocalizedName(String unlocalizedName)
{
return unlocalizedName.substring(unlocalizedName.indexOf(".") + 1);
}
}<|fim▁end|>
| |
<|file_name|>Version.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.service;
import java.util.Objects;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ComparisonChain;
import org.onosproject.store.Timestamp;
import static com.google.common.base.Preconditions.checkArgument;
/**
* Logical timestamp for versions.
* <p>
* The version is a logical timestamp that represents a point in logical time at which an event occurs.
* This is used in both pessimistic and optimistic locking protocols to ensure that the state of a shared resource
* has not changed at the end of a transaction.
*/
public class Version implements Timestamp {
private final long version;
public Version(long version) {
this.version = version;
}
@Override
public int compareTo(Timestamp o) {
checkArgument(o instanceof Version,
"Must be LockVersion", o);
Version that = (Version) o;
return ComparisonChain.start()
.compare(this.version, that.version)
.result();
}
@Override<|fim▁hole|> return Long.hashCode(version);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Version)) {
return false;
}
Version that = (Version) obj;
return Objects.equals(this.version, that.version);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("version", version)
.toString();
}
/**
* Returns the lock version.
*
* @return the lock version
*/
public long value() {
return this.version;
}
}<|fim▁end|>
|
public int hashCode() {
|
<|file_name|>test_cron.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from cron_status import *
class TestChangeDetection(unittest.TestCase):<|fim▁hole|> """Test if the change detection is operational."""
# Please note that status_history_list is backwards,
# i.e., newest entry first.
def test_all_okay(self):
status_history_list = [
{'foo': (ContainerStatus.OKAY, 'no msg')}
] * (STATUS_HISTORY_LENGTH + 1)
changed, status = detect_flapping_and_changes(status_history_list)
self.assertFalse(changed)
self.assertEqual(changed, status[0].changed) # because there is only 1 container
self.assertEqual(status[0].overall_status, ContainerStatus.OKAY)
self.assertEqual(status[0].current_status, ContainerStatus.OKAY)
self.assertTrue(status[0].container_name in status_history_list[0])
self.assertEqual(status[0].current_msg, status_history_list[0][status[0].container_name][1])
def test_all_failed(self):
status_history_list = [
{'foo': (ContainerStatus.FAILED, 'no msg')}
] * (STATUS_HISTORY_LENGTH + 1)
changed, status = detect_flapping_and_changes(status_history_list)
self.assertFalse(changed)
self.assertEqual(changed, status[0].changed) # because there is only 1 container
self.assertEqual(status[0].overall_status, ContainerStatus.FAILED)
self.assertEqual(status[0].current_status, ContainerStatus.FAILED)
def test_failed_after_starting_short(self):
status_history_list = [{'foo': (ContainerStatus.FAILED, 'no msg')}]
status_history_list += [
{'foo': (ContainerStatus.STARTING, 'no msg')}
] * (STATUS_HISTORY_LENGTH - 1)
status_history_list += [{'foo': (ContainerStatus.OKAY, 'no msg')}]
changed, status = detect_flapping_and_changes(status_history_list)
self.assertTrue(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.FAILED)
def test_failed_after_starting_very_long(self):
status_history_list = [{'foo': (ContainerStatus.FAILED, 'no msg')}]
status_history_list += [
{'foo': (ContainerStatus.STARTING, 'no msg')}
] * STATUS_HISTORY_LENGTH
changed, status = detect_flapping_and_changes(status_history_list)
self.assertTrue(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.FAILED)
def test_okay_after_failed(self):
status_history_list = [
{'foo': (ContainerStatus.OKAY, 'no msg')}
]
status_history_list += [
{'foo': (ContainerStatus.FAILED, 'no msg')}
] * STATUS_HISTORY_LENGTH
changed, status = detect_flapping_and_changes(status_history_list)
self.assertTrue(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.OKAY)
def test_failed_after_okay(self):
status_history_list = [
{'foo': (ContainerStatus.FAILED, 'no msg')}
]
status_history_list += [
{'foo': (ContainerStatus.OKAY, 'no msg')}
] * STATUS_HISTORY_LENGTH
changed, status = detect_flapping_and_changes(status_history_list)
self.assertTrue(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.FAILED)
def test_missing_data(self):
status_history_list = [
{'foo': (ContainerStatus.FAILED, 'no msg')}
] * (STATUS_HISTORY_LENGTH - 1)
status_history_list += [{'foo': (ContainerStatus.OKAY, 'no msg')}]
changed, status = detect_flapping_and_changes(status_history_list)
self.assertFalse(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.FAILED)
def test_too_much_data(self):
status_history_list = [
{'foo': (ContainerStatus.OKAY, 'no msg')}
] * (STATUS_HISTORY_LENGTH + 1)
status_history_list += [{'foo': (ContainerStatus.FAILED, 'no msg')}]
changed, status = detect_flapping_and_changes(status_history_list)
self.assertFalse(changed)
self.assertEqual(status[0].overall_status, ContainerStatus.OKAY)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>cleanup.py<|end_file_name|><|fim▁begin|># Copyright 2010 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
import time
from datetime import date, datetime, timedelta
from optparse import make_option
import openid.store.nonce
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection, transaction
from django.utils.translation import ugettext as _
from identityprovider.models import Account, EmailAddress
SESSION_SQL = """DELETE FROM django_session
WHERE session_key = ANY(SELECT session_key FROM django_session
WHERE expire_date < CURRENT_TIMESTAMP LIMIT %s)"""
NONCES_SQL = """DELETE FROM openidnonce
WHERE timestamp = ANY(SELECT timestamp FROM openidnonce
WHERE timestamp < %s LIMIT %s)"""
NO_ITEMS = """No items selected to clean up. Please select at least one of:
--sessions
--nonces
--testdata
"""
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-s', '--sessions', dest='sessions', default=False,
action='store_true', help='Cleanup sessions.'),
make_option('-n', '--nonces', dest='nonces', default=False,
action='store_true', help='Cleanup nonces.'),
make_option('-t', '--testdata', dest='testdata', default=False,
action='store_true', help='Cleanup test data.'),
make_option('-l', '--limit', dest='limit', default=10000,
action='store',
help='Number of rows to process per batch.'),
make_option('-d', '--date-created', dest='date_created',
default=None, action='store',
help='Cleanup records created before this date.'),
)
help = _("""Clean unnecessary/stalled data from database.""")
def handle(self, *args, **options):
limit = int(options['limit'])
nonce_expire_stamp = int(time.time()) - openid.store.nonce.SKEW
test_email_pattern = settings.EMAIL_ADDRESS_PATTERN.replace(
'+', '\+').replace('.', '\.') % "[^@]+"
if options['date_created'] is None:
date_created = date.today() + timedelta(days=1)
else:
parsed = datetime.strptime(options['date_created'], '%Y-%m-%d')
date_created = parsed.date()
queries = {
'sessions': SESSION_SQL % limit,
'nonces': NONCES_SQL % (nonce_expire_stamp, limit),
}
verbosity = int(options['verbosity'])
testdata = options.get('testdata')
if testdata:
self.clean_testdata(test_email_pattern, date_created, limit,
verbosity)
selected_queries = [query for query in queries
if options.get(query)]
if not selected_queries and not testdata:
self.stdout.write(NO_ITEMS)
for item in selected_queries:
if verbosity >= 1:
self.stdout.write("\nCleaning %s..." % item)
cursor = connection.cursor()
cursor.execute(queries[item])
while cursor.rowcount > 0:
if verbosity >= 2:
self.stdout.write(".")
cursor.execute(queries[item])
transaction.commit_unless_managed()
def clean_testdata(self, email_pattern, date_created, limit, verbosity=0):
kwargs = {'email__iregex': email_pattern,
'date_created__lt': date_created}
if verbosity >= 1:
self.stdout.write("\nCleaning accounts...\n")
<|fim▁hole|> accounts = Account.objects.filter(emailaddress__in=email_ids)
if not accounts:
break
if verbosity >= 2:
self.stdout.write("\tDeleting %d accounts..." % (
accounts.count(),))
accounts.delete()
if verbosity >= 2:
self.stdout.write('\t [OK]\n')<|fim▁end|>
|
while True:
email_ids = EmailAddress.objects.filter(**kwargs).values_list(
'pk')[:limit]
|
<|file_name|>option.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Optional values
//!
//! Type `Option` represents an optional value: every `Option`
//! is either `Some` and contains a value, or `None`, and
//! does not. `Option` types are very common in Rust code, as
//! they have a number of uses:
//!
//! * Initial values
//! * Return values for functions that are not defined
//! over their entire input range (partial functions)
//! * Return value for otherwise reporting simple errors, where `None` is
//! returned on error
//! * Optional struct fields
//! * Struct fields that can be loaned or "taken"
//! * Optional function arguments
//! * Nullable pointers
//! * Swapping things out of difficult situations
//!
//! Options are commonly paired with pattern matching to query the presence
//! of a value and take action, always accounting for the `None` case.
//!
//! ```
//! fn divide(numerator: f64, denominator: f64) -> Option<f64> {
//! if denominator == 0.0 {
//! None
//! } else {
//! Some(numerator / denominator)
//! }
//! }
//!
//! // The return value of the function is an option
//! let result = divide(2.0, 3.0);
//!
//! // Pattern match to retrieve the value
//! match result {
//! // The division was valid
//! Some(x) => println!("Result: {}", x),
//! // The division was invalid
//! None => println!("Cannot divide by 0")
//! }
//! ```
//!
//
// FIXME: Show how `Option` is used in practice, with lots of methods
//
//! # Options and pointers ("nullable" pointers)
//!
//! Rust's pointer types must always point to a valid location; there are
//! no "null" pointers. Instead, Rust has *optional* pointers, like
//! the optional owned box, `Option<Box<T>>`.
//!
//! The following example uses `Option` to create an optional box of
//! `int`. Notice that in order to use the inner `int` value first the
//! `check_optional` function needs to use pattern matching to
//! determine whether the box has a value (i.e. it is `Some(...)`) or
//! not (`None`).
//!
//! ```
//! let optional: Option<Box<int>> = None;
//! check_optional(&optional);
//!
//! let optional: Option<Box<int>> = Some(box 9000);
//! check_optional(&optional);
//!
//! fn check_optional(optional: &Option<Box<int>>) {
//! match *optional {
//! Some(ref p) => println!("have value {}", p),
//! None => println!("have no value")
//! }
//! }
//! ```
//!
//! This usage of `Option` to create safe nullable pointers is so
//! common that Rust does special optimizations to make the
//! representation of `Option<Box<T>>` a single pointer. Optional pointers
//! in Rust are stored as efficiently as any other pointer type.
//!
//! # Examples
//!
//! Basic pattern matching on `Option`:
//!
//! ```
//! let msg = Some("howdy");
//!
//! // Take a reference to the contained string
//! match msg {
//! Some(ref m) => println!("{}", *m),
//! None => ()
//! }
//!
//! // Remove the contained string, destroying the Option
//! let unwrapped_msg = match msg {
//! Some(m) => m,
//! None => "default message"
//! };
//! ```
//!
//! Initialize a result to `None` before a loop:
//!
//! ```
//! enum Kingdom { Plant(uint, &'static str), Animal(uint, &'static str) }
//!
//! // A list of data to search through.
//! let all_the_big_things = [
//! Plant(250, "redwood"),
//! Plant(230, "noble fir"),
//! Plant(229, "sugar pine"),
//! Animal(25, "blue whale"),
//! Animal(19, "fin whale"),
//! Animal(15, "north pacific right whale"),
//! ];
//!
//! // We're going to search for the name of the biggest animal,
//! // but to start with we've just got `None`.
//! let mut name_of_biggest_animal = None;
//! let mut size_of_biggest_animal = 0;
//! for big_thing in all_the_big_things.iter() {
//! match *big_thing {
//! Animal(size, name) if size > size_of_biggest_animal => {
//! // Now we've found the name of some big animal
//! size_of_biggest_animal = size;
//! name_of_biggest_animal = Some(name);
//! }
//! Animal(..) | Plant(..) => ()
//! }
//! }
//!
//! match name_of_biggest_animal {
//! Some(name) => println!("the biggest animal is {}", name),
//! None => println!("there are no animals :(")
//! }
//! ```
#![stable]
use cmp::{PartialEq, Eq, Ord};
use default::Default;
use iter::{Iterator, DoubleEndedIterator, FromIterator, ExactSize};
use mem;
use result::{Result, Ok, Err};
use slice;
use slice::AsSlice;
// Note that this is not a lang item per se, but it has a hidden dependency on
// `Iterator`, which is one. The compiler assumes that the `next` method of
// `Iterator` is an enumeration with one type parameter and two variants,
// which basically means it must be `Option`.
/// The `Option` type.
#[deriving(Clone, PartialEq, PartialOrd, Eq, Ord, Show)]
#[stable]
pub enum Option<T> {
/// No value
None,
/// Some value `T`
Some(T)
}
/////////////////////////////////////////////////////////////////////////////
// Type implementation
/////////////////////////////////////////////////////////////////////////////
impl<T> Option<T> {
/////////////////////////////////////////////////////////////////////////
// Querying the contained values
/////////////////////////////////////////////////////////////////////////
/// Returns `true` if the option is a `Some` value
///
/// # Example
///
/// ```
/// let x: Option<uint> = Some(2);
/// assert_eq!(x.is_some(), true);
///
/// let x: Option<uint> = None;
/// assert_eq!(x.is_some(), false);
/// ```
#[inline]
#[stable]
pub fn is_some(&self) -> bool {
match *self {
Some(_) => true,
None => false
}
}
/// Returns `true` if the option is a `None` value
///
/// # Example
///
/// ```
/// let x: Option<uint> = Some(2);
/// assert_eq!(x.is_none(), false);
///
/// let x: Option<uint> = None;
/// assert_eq!(x.is_none(), true);
/// ```
#[inline]
#[stable]
pub fn is_none(&self) -> bool {
!self.is_some()
}
/////////////////////////////////////////////////////////////////////////
// Adapter for working with references
/////////////////////////////////////////////////////////////////////////
/// Convert from `Option<T>` to `Option<&T>`
///
/// # Example
///
/// Convert an `Option<String>` into an `Option<int>`, preserving the original.
/// The `map` method takes the `self` argument by value, consuming the original,
/// so this technique uses `as_ref` to first take an `Option` to a reference
/// to the value inside the original.
///
/// ```
/// let num_as_str: Option<String> = Some("10".to_string());
/// // First, cast `Option<String>` to `Option<&String>` with `as_ref`,
/// // then consume *that* with `map`, leaving `num_as_str` on the stack.
/// let num_as_int: Option<uint> = num_as_str.as_ref().map(|n| n.len());
/// println!("still can print num_as_str: {}", num_as_str);
/// ```
#[inline]
#[stable]
pub fn as_ref<'r>(&'r self) -> Option<&'r T> {
match *self { Some(ref x) => Some(x), None => None }
}
/// Convert from `Option<T>` to `Option<&mut T>`
///
/// # Example
///
/// ```
/// let mut x = Some(2u);
/// match x.as_mut() {
/// Some(&ref mut v) => *v = 42,
/// None => {},
/// }
/// assert_eq!(x, Some(42u));
/// ```
#[inline]
#[unstable = "waiting for mut conventions"]
pub fn as_mut<'r>(&'r mut self) -> Option<&'r mut T> {
match *self { Some(ref mut x) => Some(x), None => None }
}
/// Convert from `Option<T>` to `&mut [T]` (without copying)
///
/// # Example
///
/// ```
/// let mut x = Some("Diamonds");
/// {
/// let v = x.as_mut_slice();
/// assert!(v == ["Diamonds"]);
/// v[0] = "Dirt";
/// assert!(v == ["Dirt"]);
/// }
/// assert_eq!(x, Some("Dirt"));
/// ```
#[inline]
#[unstable = "waiting for mut conventions"]
pub fn as_mut_slice<'r>(&'r mut self) -> &'r mut [T] {
match *self {
Some(ref mut x) => {
let result: &mut [T] = slice::mut_ref_slice(x);
result
}
None => {
let result: &mut [T] = &mut [];
result
}
}
}
/////////////////////////////////////////////////////////////////////////
// Getting to contained values
/////////////////////////////////////////////////////////////////////////
/// Unwraps an option, yielding the content of a `Some`
///
/// # Failure
///
/// Fails if the value is a `None` with a custom failure message provided by
/// `msg`.
///
/// # Example
///
/// ```
/// let x = Some("value");
/// assert_eq!(x.expect("the world is ending"), "value");
/// ```
///
/// ```{.should_fail}
/// let x: Option<&str> = None;
/// x.expect("the world is ending"); // fails with `world is ending`
/// ```
#[inline]
#[unstable = "waiting for conventions"]
pub fn expect(self, msg: &str) -> T {
match self {
Some(val) => val,
None => fail!("{}", msg),
}
}
/// Returns the inner `T` of a `Some(T)`.
///
/// # Failure
///
/// Fails if the self value equals `None`.
///
/// # Safety note
///
/// In general, because this function may fail, its use is discouraged.
/// Instead, prefer to use pattern matching and handle the `None`
/// case explicitly.
///
/// # Example
///
/// ```
/// let x = Some("air");
/// assert_eq!(x.unwrap(), "air");
/// ```
///
/// ```{.should_fail}
/// let x: Option<&str> = None;
/// assert_eq!(x.unwrap(), "air"); // fails
/// ```
#[inline]
#[unstable = "waiting for conventions"]
pub fn unwrap(self) -> T {
match self {
Some(val) => val,
None => fail!("called `Option::unwrap()` on a `None` value"),
}
}
/// Returns the contained value or a default.
///
/// # Example
///
/// ```
/// assert_eq!(Some("car").unwrap_or("bike"), "car");
/// assert_eq!(None.unwrap_or("bike"), "bike");
/// ```
#[inline]
#[unstable = "waiting for conventions"]
pub fn unwrap_or(self, def: T) -> T {
match self {
Some(x) => x,
None => def
}
}
/// Returns the contained value or computes it from a closure.
///
/// # Example
///
/// ```
/// let k = 10u;
/// assert_eq!(Some(4u).unwrap_or_else(|| 2 * k), 4u);
/// assert_eq!(None.unwrap_or_else(|| 2 * k), 20u);
/// ```
#[inline]
#[unstable = "waiting for conventions"]
pub fn unwrap_or_else(self, f: || -> T) -> T {
match self {
Some(x) => x,
None => f()
}
}
/////////////////////////////////////////////////////////////////////////
// Transforming contained values
/////////////////////////////////////////////////////////////////////////
/// Maps an `Option<T>` to `Option<U>` by applying a function to a contained value
///
/// # Example
///
/// Convert an `Option<String>` into an `Option<uint>`, consuming the original:
///
/// ```
/// let num_as_str: Option<String> = Some("10".to_string());
/// // `Option::map` takes self *by value*, consuming `num_as_str`
/// let num_as_int: Option<uint> = num_as_str.map(|n| n.len());
/// ```
#[inline]
#[unstable = "waiting for unboxed closures"]
pub fn map<U>(self, f: |T| -> U) -> Option<U> {
match self { Some(x) => Some(f(x)), None => None }
}
/// Applies a function to the contained value or returns a default.
///
/// # Example
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.map_or(42u, |v| v.len()), 3u);
///
/// let x: Option<&str> = None;
/// assert_eq!(x.map_or(42u, |v| v.len()), 42u);
/// ```
#[inline]
#[unstable = "waiting for unboxed closures"]
pub fn map_or<U>(self, def: U, f: |T| -> U) -> U {
match self { None => def, Some(t) => f(t) }
}
/// Applies a function to the contained value or computes a default.
///
/// # Example
///
/// ```
/// let k = 21u;
///
/// let x = Some("foo");
/// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 3u);
///
/// let x: Option<&str> = None;
/// assert_eq!(x.map_or_else(|| 2 * k, |v| v.len()), 42u);
/// ```
#[inline]
#[unstable = "waiting for unboxed closures"]
pub fn map_or_else<U>(self, def: || -> U, f: |T| -> U) -> U {
match self { None => def(), Some(t) => f(t) }
}
/// Transforms the `Option<T>` into a `Result<T, E>`, mapping `Some(v)` to
/// `Ok(v)` and `None` to `Err(err)`.
///
/// # Example
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.ok_or(0i), Ok("foo"));
///
/// let x: Option<&str> = None;
/// assert_eq!(x.ok_or(0i), Err(0i));
/// ```
#[inline]
#[experimental]
pub fn ok_or<E>(self, err: E) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err),
}
}
/// Transforms the `Option<T>` into a `Result<T, E>`, mapping `Some(v)` to
/// `Ok(v)` and `None` to `Err(err())`.
///
/// # Example
///
/// ```
/// let x = Some("foo");
/// assert_eq!(x.ok_or_else(|| 0i), Ok("foo"));
///
/// let x: Option<&str> = None;
/// assert_eq!(x.ok_or_else(|| 0i), Err(0i));
/// ```
#[inline]
#[experimental]
pub fn ok_or_else<E>(self, err: || -> E) -> Result<T, E> {
match self {
Some(v) => Ok(v),
None => Err(err()),
}
}
/// Deprecated.
///
/// Applies a function to the contained value or does nothing.
/// Returns true if the contained value was mutated.
#[deprecated = "removed due to lack of use"]
pub fn mutate(&mut self, f: |T| -> T) -> bool {
if self.is_some() {
*self = Some(f(self.take().unwrap()));
true
} else { false }
}
/// Deprecated.
///
/// Applies a function to the contained value or sets it to a default.
/// Returns true if the contained value was mutated, or false if set to the default.
#[deprecated = "removed due to lack of use"]
pub fn mutate_or_set(&mut self, def: T, f: |T| -> T) -> bool {
if self.is_some() {
*self = Some(f(self.take().unwrap()));
true
} else {
*self = Some(def);
false
}
}
/////////////////////////////////////////////////////////////////////////
// Iterator constructors
/////////////////////////////////////////////////////////////////////////
/// Returns an iterator over the possibly contained value.
///
/// # Example
///
/// ```
/// let x = Some(4u);
/// assert_eq!(x.iter().next(), Some(&4));
///
/// let x: Option<uint> = None;
/// assert_eq!(x.iter().next(), None);
/// ```
#[inline]
#[unstable = "waiting for iterator conventions"]
pub fn iter<'r>(&'r self) -> Item<&'r T> {
Item{opt: self.as_ref()}
}
/// Deprecated: use `iter_mut`
#[deprecated = "use iter_mut"]
pub fn mut_iter<'r>(&'r mut self) -> Item<&'r mut T> {
self.iter_mut()
}
/// Returns a mutable iterator over the possibly contained value.
///
/// # Example
///
/// ```
/// let mut x = Some(4u);
/// match x.iter_mut().next() {
/// Some(&ref mut v) => *v = 42u,
/// None => {},
/// }
/// assert_eq!(x, Some(42));
///
/// let mut x: Option<uint> = None;
/// assert_eq!(x.iter_mut().next(), None);
/// ```
#[inline]
#[unstable = "waiting for iterator conventions"]
pub fn iter_mut<'r>(&'r mut self) -> Item<&'r mut T> {
Item{opt: self.as_mut()}
}
/// Deprecated: use `into_iter`.
#[deprecated = "use into_iter"]
pub fn move_iter(self) -> Item<T> {
self.into_iter()
}
/// Returns a consuming iterator over the possibly contained value.<|fim▁hole|> /// # Example
///
/// ```
/// let x = Some("string");
/// let v: Vec<&str> = x.into_iter().collect();
/// assert_eq!(v, vec!["string"]);
///
/// let x = None;
/// let v: Vec<&str> = x.into_iter().collect();
/// assert_eq!(v, vec![]);
/// ```
#[inline]
#[unstable = "waiting for iterator conventions"]
pub fn into_iter(self) -> Item<T> {
Item{opt: self}
}
/////////////////////////////////////////////////////////////////////////
// Boolean operations on the values, eager and lazy
/////////////////////////////////////////////////////////////////////////
/// Returns `None` if the option is `None`, otherwise returns `optb`.
///
/// # Example
///
/// ```
/// let x = Some(2u);
/// let y: Option<&str> = None;
/// assert_eq!(x.and(y), None);
///
/// let x: Option<uint> = None;
/// let y = Some("foo");
/// assert_eq!(x.and(y), None);
///
/// let x = Some(2u);
/// let y = Some("foo");
/// assert_eq!(x.and(y), Some("foo"));
///
/// let x: Option<uint> = None;
/// let y: Option<&str> = None;
/// assert_eq!(x.and(y), None);
/// ```
#[inline]
#[stable]
pub fn and<U>(self, optb: Option<U>) -> Option<U> {
match self {
Some(_) => optb,
None => None,
}
}
/// Returns `None` if the option is `None`, otherwise calls `f` with the
/// wrapped value and returns the result.
///
/// # Example
///
/// ```
/// fn sq(x: uint) -> Option<uint> { Some(x * x) }
/// fn nope(_: uint) -> Option<uint> { None }
///
/// assert_eq!(Some(2).and_then(sq).and_then(sq), Some(16));
/// assert_eq!(Some(2).and_then(sq).and_then(nope), None);
/// assert_eq!(Some(2).and_then(nope).and_then(sq), None);
/// assert_eq!(None.and_then(sq).and_then(sq), None);
/// ```
#[inline]
#[unstable = "waiting for unboxed closures"]
pub fn and_then<U>(self, f: |T| -> Option<U>) -> Option<U> {
match self {
Some(x) => f(x),
None => None,
}
}
/// Returns the option if it contains a value, otherwise returns `optb`.
///
/// # Example
///
/// ```
/// let x = Some(2u);
/// let y = None;
/// assert_eq!(x.or(y), Some(2u));
///
/// let x = None;
/// let y = Some(100u);
/// assert_eq!(x.or(y), Some(100u));
///
/// let x = Some(2u);
/// let y = Some(100u);
/// assert_eq!(x.or(y), Some(2u));
///
/// let x: Option<uint> = None;
/// let y = None;
/// assert_eq!(x.or(y), None);
/// ```
#[inline]
#[stable]
pub fn or(self, optb: Option<T>) -> Option<T> {
match self {
Some(_) => self,
None => optb
}
}
/// Returns the option if it contains a value, otherwise calls `f` and
/// returns the result.
///
/// # Example
///
/// ```
/// fn nobody() -> Option<&'static str> { None }
/// fn vikings() -> Option<&'static str> { Some("vikings") }
///
/// assert_eq!(Some("barbarians").or_else(vikings), Some("barbarians"));
/// assert_eq!(None.or_else(vikings), Some("vikings"));
/// assert_eq!(None.or_else(nobody), None);
/// ```
#[inline]
#[unstable = "waiting for unboxed closures"]
pub fn or_else(self, f: || -> Option<T>) -> Option<T> {
match self {
Some(_) => self,
None => f()
}
}
/////////////////////////////////////////////////////////////////////////
// Misc
/////////////////////////////////////////////////////////////////////////
/// Takes the value out of the option, leaving a `None` in its place.
///
/// # Example
///
/// ```
/// let mut x = Some(2u);
/// x.take();
/// assert_eq!(x, None);
///
/// let mut x: Option<uint> = None;
/// x.take();
/// assert_eq!(x, None);
/// ```
#[inline]
#[stable]
pub fn take(&mut self) -> Option<T> {
mem::replace(self, None)
}
/// Deprecated.
///
/// Filters an optional value using a given function.
#[inline(always)]
#[deprecated = "removed due to lack of use"]
pub fn filtered(self, f: |t: &T| -> bool) -> Option<T> {
match self {
Some(x) => if f(&x) { Some(x) } else { None },
None => None
}
}
/// Deprecated.
///
/// Applies a function zero or more times until the result is `None`.
#[inline]
#[deprecated = "removed due to lack of use"]
pub fn while_some(self, f: |v: T| -> Option<T>) {
let mut opt = self;
loop {
match opt {
Some(x) => opt = f(x),
None => break
}
}
}
/////////////////////////////////////////////////////////////////////////
// Common special cases
/////////////////////////////////////////////////////////////////////////
/// Deprecated: use `take().unwrap()` instead.
///
/// The option dance. Moves a value out of an option type and returns it,
/// replacing the original with `None`.
///
/// # Failure
///
/// Fails if the value equals `None`.
#[inline]
#[deprecated = "use take().unwrap() instead"]
pub fn take_unwrap(&mut self) -> T {
match self.take() {
Some(x) => x,
None => fail!("called `Option::take_unwrap()` on a `None` value")
}
}
/// Deprecated: use `as_ref().unwrap()` instead.
///
/// Gets an immutable reference to the value inside an option.
///
/// # Failure
///
/// Fails if the value equals `None`
///
/// # Safety note
///
/// In general, because this function may fail, its use is discouraged
/// (calling `get` on `None` is akin to dereferencing a null pointer).
/// Instead, prefer to use pattern matching and handle the `None`
/// case explicitly.
#[inline]
#[deprecated = "use .as_ref().unwrap() instead"]
pub fn get_ref<'a>(&'a self) -> &'a T {
match *self {
Some(ref x) => x,
None => fail!("called `Option::get_ref()` on a `None` value"),
}
}
/// Deprecated: use `as_mut().unwrap()` instead.
///
/// Gets a mutable reference to the value inside an option.
///
/// # Failure
///
/// Fails if the value equals `None`
///
/// # Safety note
///
/// In general, because this function may fail, its use is discouraged
/// (calling `get` on `None` is akin to dereferencing a null pointer).
/// Instead, prefer to use pattern matching and handle the `None`
/// case explicitly.
#[inline]
#[deprecated = "use .as_mut().unwrap() instead"]
pub fn get_mut_ref<'a>(&'a mut self) -> &'a mut T {
match *self {
Some(ref mut x) => x,
None => fail!("called `Option::get_mut_ref()` on a `None` value"),
}
}
}
impl<T: Default> Option<T> {
/// Returns the contained value or a default
///
/// Consumes the `self` argument then, if `Some`, returns the contained
/// value, otherwise if `None`, returns the default value for that
/// type.
///
/// # Example
///
/// Convert a string to an integer, turning poorly-formed strings
/// into 0 (the default value for integers). `from_str` converts
/// a string to any other type that implements `FromStr`, returning
/// `None` on error.
///
/// ```
/// let good_year_from_input = "1909";
/// let bad_year_from_input = "190blarg";
/// let good_year = from_str(good_year_from_input).unwrap_or_default();
/// let bad_year = from_str(bad_year_from_input).unwrap_or_default();
///
/// assert_eq!(1909i, good_year);
/// assert_eq!(0i, bad_year);
/// ```
#[inline]
#[unstable = "waiting for conventions"]
pub fn unwrap_or_default(self) -> T {
match self {
Some(x) => x,
None => Default::default()
}
}
}
/////////////////////////////////////////////////////////////////////////////
// Trait implementations
/////////////////////////////////////////////////////////////////////////////
impl<T> AsSlice<T> for Option<T> {
/// Convert from `Option<T>` to `&[T]` (without copying)
#[inline]
#[stable]
fn as_slice<'a>(&'a self) -> &'a [T] {
match *self {
Some(ref x) => slice::ref_slice(x),
None => {
let result: &[_] = &[];
result
}
}
}
}
impl<T> Default for Option<T> {
#[inline]
fn default() -> Option<T> { None }
}
/////////////////////////////////////////////////////////////////////////////
// The Option Iterator
/////////////////////////////////////////////////////////////////////////////
/// An `Option` iterator that yields either one or zero elements
///
/// The `Item` iterator is returned by the `iter`, `iter_mut` and `into_iter`
/// methods on `Option`.
#[deriving(Clone)]
#[unstable = "waiting for iterator conventions"]
pub struct Item<A> {
opt: Option<A>
}
impl<A> Iterator<A> for Item<A> {
#[inline]
fn next(&mut self) -> Option<A> {
self.opt.take()
}
#[inline]
fn size_hint(&self) -> (uint, Option<uint>) {
match self.opt {
Some(_) => (1, Some(1)),
None => (0, Some(0)),
}
}
}
impl<A> DoubleEndedIterator<A> for Item<A> {
#[inline]
fn next_back(&mut self) -> Option<A> {
self.opt.take()
}
}
impl<A> ExactSize<A> for Item<A> {}
/////////////////////////////////////////////////////////////////////////////
// Free functions
/////////////////////////////////////////////////////////////////////////////
/// Deprecated: use `Iterator::collect` instead.
#[inline]
#[deprecated = "use Iterator::collect instead"]
pub fn collect<T, Iter: Iterator<Option<T>>, V: FromIterator<T>>(mut iter: Iter) -> Option<V> {
iter.collect()
}
impl<A, V: FromIterator<A>> FromIterator<Option<A>> for Option<V> {
/// Takes each element in the `Iterator`: if it is `None`, no further
/// elements are taken, and the `None` is returned. Should no `None` occur, a
/// container with the values of each `Option` is returned.
///
/// Here is an example which increments every integer in a vector,
/// checking for overflow:
///
/// ```rust
/// use std::uint;
///
/// let v = vec!(1u, 2u);
/// let res: Option<Vec<uint>> = v.iter().map(|x: &uint|
/// if *x == uint::MAX { None }
/// else { Some(x + 1) }
/// ).collect();
/// assert!(res == Some(vec!(2u, 3u)));
/// ```
#[inline]
fn from_iter<I: Iterator<Option<A>>>(iter: I) -> Option<V> {
// FIXME(#11084): This could be replaced with Iterator::scan when this
// performance bug is closed.
struct Adapter<Iter> {
iter: Iter,
found_none: bool,
}
impl<T, Iter: Iterator<Option<T>>> Iterator<T> for Adapter<Iter> {
#[inline]
fn next(&mut self) -> Option<T> {
match self.iter.next() {
Some(Some(value)) => Some(value),
Some(None) => {
self.found_none = true;
None
}
None => None,
}
}
}
let mut adapter = Adapter { iter: iter, found_none: false };
let v: V = FromIterator::from_iter(adapter.by_ref());
if adapter.found_none {
None
} else {
Some(v)
}
}
}<|fim▁end|>
|
///
|
<|file_name|>suppliers.py<|end_file_name|><|fim▁begin|>from flask.ext.wtf import Form
from wtforms import IntegerField, StringField, FieldList
from wtforms.validators import DataRequired, Email, ValidationError
def word_length(limit=None, message=None):
message = message or 'Must not be more than %d words'
message = message % limit
def _length(form, field):
if not field.data or not limit:
return field
if len(field.data.split()) > limit:
raise ValidationError(message)
return _length
class EditSupplierForm(Form):
description = StringField('Supplier summary', validators=[
word_length(50, 'Your summary must not be more than %d words')
])
clients = FieldList(StringField())
def validate_clients(form, field):<|fim▁hole|> if len(field.data) > 10:
raise ValidationError('You must have 10 or fewer clients')
class EditContactInformationForm(Form):
id = IntegerField()
address1 = StringField('Business address')
address2 = StringField('Business address')
city = StringField('Town or city')
country = StringField()
postcode = StringField(validators=[
DataRequired(message="Postcode can not be empty"),
])
website = StringField()
phoneNumber = StringField('Phone number')
email = StringField('Email address', validators=[
DataRequired(message="Email can not be empty"),
Email(message="Please enter a valid email address")
])
contactName = StringField('Contact name', validators=[
DataRequired(message="Contact name can not be empty"),
])<|fim▁end|>
| |
<|file_name|>manage.js<|end_file_name|><|fim▁begin|>module.exports = require('../crud/list')({
view: 'users/manage',
model: require('../../models/user'),
sort: {
name: 1<|fim▁hole|>});<|fim▁end|>
|
}
|
<|file_name|>strings.js<|end_file_name|><|fim▁begin|>define({
root:({
mode: "Mode",
saveToPortal: "Save to Portal",
saveToDisk: "Save To Disk",
title: "Title",
description: "Description",
tags: "Tags",
defineExtent: "Define Extent",
submit: "Submit",
pixelSize: "Pixel Size",
outputSpatialReference: "Output Spatial Reference",
currentRenderer: "Current Renderer",
note: "Note",
currentRendererChecked: "If Current Renderer is checked, the rendering",
originalDataValues: "is exported, else the original data values",
exported: "will be exported.",
export: "Export",
exportImage: "Export Image",
layerSaved: "Layer saved.",
error: "Error! ",
errorNotification: "Error! No Imagery Layer visible on the map.",
utmZone: "WGS84 UTM Zone ",
WKID: "WKID : ",
webMercatorAs: "WebMercatorAS",
default: "Default",
pixelSizeRestricted: "PixelSize of export is restricted to ",
thisExtent: " for this extent.",<|fim▁hole|> exportLayerMsg: "No visible imagery layers on the map."
}),
"ar": 1,
"fr": 1
});<|fim▁end|>
|
errorPixelSize: "Error! No Imagery Layer visible on the map.",
layer: "Layer",
|
<|file_name|>ln-CF.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js<|fim▁hole|>const u = undefined;
function plural(n: number): number {
if (n === Math.floor(n) && n >= 0 && n <= 1) return 1;
return 5;
}
export default [
'ln-CF', [['ntɔ́ngɔ́', 'mpókwa'], u, u], u,
[
['e', 'y', 'm', 'm', 'm', 'm', 'p'], ['eye', 'ybo', 'mbl', 'mst', 'min', 'mtn', 'mps'],
[
'eyenga', 'mokɔlɔ mwa yambo', 'mokɔlɔ mwa míbalé', 'mokɔlɔ mwa mísáto',
'mokɔlɔ ya mínéi', 'mokɔlɔ ya mítáno', 'mpɔ́sɔ'
],
['eye', 'ybo', 'mbl', 'mst', 'min', 'mtn', 'mps']
],
u,
[
['y', 'f', 'm', 'a', 'm', 'y', 'y', 'a', 's', 'ɔ', 'n', 'd'],
['yan', 'fbl', 'msi', 'apl', 'mai', 'yun', 'yul', 'agt', 'stb', 'ɔtb', 'nvb', 'dsb'],
[
'sánzá ya yambo', 'sánzá ya míbalé', 'sánzá ya mísáto', 'sánzá ya mínei',
'sánzá ya mítáno', 'sánzá ya motóbá', 'sánzá ya nsambo', 'sánzá ya mwambe',
'sánzá ya libwa', 'sánzá ya zómi', 'sánzá ya zómi na mɔ̌kɔ́',
'sánzá ya zómi na míbalé'
]
],
u, [['libóso ya', 'nsima ya Y'], u, ['Yambo ya Yézu Krís', 'Nsima ya Yézu Krís']], 1, [6, 0],
['d/M/y', 'd MMM y', 'd MMMM y', 'EEEE d MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'], ['{1} {0}', u, u, u],
[',', '.', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0%', '#,##0.00 ¤', '#E0'], 'FCFA', 'Falánga CFA BEAC',
{'CDF': ['FC'], 'JPY': ['JP¥', '¥'], 'USD': ['US$', '$']}, plural
];<|fim▁end|>
| |
<|file_name|>producertask.py<|end_file_name|><|fim▁begin|>"""
Copyright 2015-2018 IBM
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Licensed Materials - Property of IBM
© Copyright IBM Corp. 2015-2018<|fim▁hole|>"""
import asyncio
from confluent_kafka import Producer
class ProducerTask(object):
def __init__(self, conf, topic_name):
self.topic_name = topic_name
self.producer = Producer(conf)
self.counter = 0
self.running = True
def stop(self):
self.running = False
def on_delivery(self, err, msg):
if err:
print('Delivery report: Failed sending message {0}'.format(msg.value()))
print(err)
# We could retry sending the message
else:
print('Message produced, offset: {0}'.format(msg.offset()))
@asyncio.coroutine
def run(self):
print('The producer has started')
while self.running:
message = 'This is a test message #{0}'.format(self.counter)
key = 'key'
sleep = 2 # Short sleep for flow control
try:
self.producer.produce(self.topic_name, message, key, -1, self.on_delivery)
self.producer.poll(0)
self.counter += 1
except Exception as err:
print('Failed sending message {0}'.format(message))
print(err)
sleep = 5 # Longer sleep before retrying
yield from asyncio.sleep(sleep)
self.producer.flush()<|fim▁end|>
| |
<|file_name|>student.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = function(sequelize, DataTypes) {
var Student = sequelize.define('Student', {
name: DataTypes.STRING,
timeReq: DataTypes.INTEGER,
}, {
classMethods: {
associate: function() {
}
}
});
return Student;<|fim▁hole|>};<|fim▁end|>
| |
<|file_name|>pass.hpp<|end_file_name|><|fim▁begin|>// Copyright © 2014-2016 Ryan Leckey, All Rights Reserved.
// Distributed under the MIT License
// See accompanying file LICENSE
#ifndef ARROW_PASS_H
#define ARROW_PASS_H
#include <memory>
#include <string>
#include "arrow/generator.hpp"
namespace arrow {
class Pass {
public:
explicit Pass(GContext& ctx) : _ctx(ctx) { }<|fim▁hole|> Pass(const Pass& other) = delete;
Pass(Pass&& other) = delete;
Pass& operator=(const Pass& other) = delete;
Pass& operator=(Pass&& other) = delete;
protected:
GContext& _ctx;
};
} // namespace arrow
#endif // ARROW_PASS_H<|fim▁end|>
| |
<|file_name|>overrides.py<|end_file_name|><|fim▁begin|>"""Implementation of __array_function__ overrides from NEP-18."""
import collections
import functools
import os
import textwrap
from numpy.core._multiarray_umath import (
add_docstring, implement_array_function, _get_implementing_args)
from numpy.compat._inspect import getargspec
ARRAY_FUNCTION_ENABLED = bool(
int(os.environ.get('NUMPY_EXPERIMENTAL_ARRAY_FUNCTION', 1)))
array_function_like_doc = (
"""like : array_like
Reference object to allow the creation of arrays which are not
NumPy arrays. If an array-like passed in as ``like`` supports
the ``__array_function__`` protocol, the result will be defined
by it. In this case, it ensures the creation of an array object
compatible with that passed in via this argument.
.. note::
The ``like`` keyword is an experimental feature pending on
acceptance of :ref:`NEP 35 <NEP35>`."""
)
def set_array_function_like_doc(public_api):
if public_api.__doc__ is not None:
public_api.__doc__ = public_api.__doc__.replace(
"${ARRAY_FUNCTION_LIKE}",
array_function_like_doc,
)
return public_api
add_docstring(
implement_array_function,
"""
Implement a function with checks for __array_function__ overrides.
All arguments are required, and can only be passed by position.
Parameters
----------<|fim▁hole|> Function that implements the operation on NumPy array without
overrides when called like ``implementation(*args, **kwargs)``.
public_api : function
Function exposed by NumPy's public API originally called like
``public_api(*args, **kwargs)`` on which arguments are now being
checked.
relevant_args : iterable
Iterable of arguments to check for __array_function__ methods.
args : tuple
Arbitrary positional arguments originally passed into ``public_api``.
kwargs : dict
Arbitrary keyword arguments originally passed into ``public_api``.
Returns
-------
Result from calling ``implementation()`` or an ``__array_function__``
method, as appropriate.
Raises
------
TypeError : if no implementation is found.
""")
# exposed for testing purposes; used internally by implement_array_function
add_docstring(
_get_implementing_args,
"""
Collect arguments on which to call __array_function__.
Parameters
----------
relevant_args : iterable of array-like
Iterable of possibly array-like arguments to check for
__array_function__ methods.
Returns
-------
Sequence of arguments with __array_function__ methods, in the order in
which they should be called.
""")
ArgSpec = collections.namedtuple('ArgSpec', 'args varargs keywords defaults')
def verify_matching_signatures(implementation, dispatcher):
"""Verify that a dispatcher function has the right signature."""
implementation_spec = ArgSpec(*getargspec(implementation))
dispatcher_spec = ArgSpec(*getargspec(dispatcher))
if (implementation_spec.args != dispatcher_spec.args or
implementation_spec.varargs != dispatcher_spec.varargs or
implementation_spec.keywords != dispatcher_spec.keywords or
(bool(implementation_spec.defaults) !=
bool(dispatcher_spec.defaults)) or
(implementation_spec.defaults is not None and
len(implementation_spec.defaults) !=
len(dispatcher_spec.defaults))):
raise RuntimeError('implementation and dispatcher for %s have '
'different function signatures' % implementation)
if implementation_spec.defaults is not None:
if dispatcher_spec.defaults != (None,) * len(dispatcher_spec.defaults):
raise RuntimeError('dispatcher functions can only use None for '
'default argument values')
def set_module(module):
"""Decorator for overriding __module__ on a function or class.
Example usage::
@set_module('numpy')
def example():
pass
assert example.__module__ == 'numpy'
"""
def decorator(func):
if module is not None:
func.__module__ = module
return func
return decorator
# Call textwrap.dedent here instead of in the function so as to avoid
# calling dedent multiple times on the same text
_wrapped_func_source = textwrap.dedent("""
@functools.wraps(implementation)
def {name}(*args, **kwargs):
relevant_args = dispatcher(*args, **kwargs)
return implement_array_function(
implementation, {name}, relevant_args, args, kwargs)
""")
def array_function_dispatch(dispatcher, module=None, verify=True,
docs_from_dispatcher=False):
"""Decorator for adding dispatch with the __array_function__ protocol.
See NEP-18 for example usage.
Parameters
----------
dispatcher : callable
Function that when called like ``dispatcher(*args, **kwargs)`` with
arguments from the NumPy function call returns an iterable of
array-like arguments to check for ``__array_function__``.
module : str, optional
__module__ attribute to set on new function, e.g., ``module='numpy'``.
By default, module is copied from the decorated function.
verify : bool, optional
If True, verify the that the signature of the dispatcher and decorated
function signatures match exactly: all required and optional arguments
should appear in order with the same names, but the default values for
all optional arguments should be ``None``. Only disable verification
if the dispatcher's signature needs to deviate for some particular
reason, e.g., because the function has a signature like
``func(*args, **kwargs)``.
docs_from_dispatcher : bool, optional
If True, copy docs from the dispatcher function onto the dispatched
function, rather than from the implementation. This is useful for
functions defined in C, which otherwise don't have docstrings.
Returns
-------
Function suitable for decorating the implementation of a NumPy function.
"""
if not ARRAY_FUNCTION_ENABLED:
def decorator(implementation):
if docs_from_dispatcher:
add_docstring(implementation, dispatcher.__doc__)
if module is not None:
implementation.__module__ = module
return implementation
return decorator
def decorator(implementation):
if verify:
verify_matching_signatures(implementation, dispatcher)
if docs_from_dispatcher:
add_docstring(implementation, dispatcher.__doc__)
# Equivalently, we could define this function directly instead of using
# exec. This version has the advantage of giving the helper function a
# more interpettable name. Otherwise, the original function does not
# show up at all in many cases, e.g., if it's written in C or if the
# dispatcher gets an invalid keyword argument.
source = _wrapped_func_source.format(name=implementation.__name__)
source_object = compile(
source, filename='<__array_function__ internals>', mode='exec')
scope = {
'implementation': implementation,
'dispatcher': dispatcher,
'functools': functools,
'implement_array_function': implement_array_function,
}
exec(source_object, scope)
public_api = scope[implementation.__name__]
if module is not None:
public_api.__module__ = module
public_api._implementation = implementation
return public_api
return decorator
def array_function_from_dispatcher(
implementation, module=None, verify=True, docs_from_dispatcher=True):
"""Like array_function_dispatcher, but with function arguments flipped."""
def decorator(dispatcher):
return array_function_dispatch(
dispatcher, module, verify=verify,
docs_from_dispatcher=docs_from_dispatcher)(implementation)
return decorator<|fim▁end|>
|
implementation : function
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { combineReducers } from 'redux'
import auth from './auth'
import watt from './watt'
import locations from './locations'<|fim▁hole|> auth,
watt,
locations
})<|fim▁end|>
|
export default combineReducers({
|
<|file_name|>model.rs<|end_file_name|><|fim▁begin|>// Copyright 2019-2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::format::Format;
use crate::{usize_to_nat, StoreError, StoreRatio, StoreResult, StoreUpdate};
use std::collections::HashMap;
/// Models the mutable operations of a store.
///
/// The model doesn't model the storage and read-only operations. This is done by the driver.
#[derive(Clone, Debug)]
pub struct StoreModel {
/// Represents the content of the store.
content: HashMap<usize, Box<[u8]>>,
/// The modeled storage configuration.
format: Format,
}
/// Mutable operations on a store.
#[derive(Clone, Debug)]
pub enum StoreOperation {
/// Applies a transaction.
Transaction {
/// The list of updates to be applied.
updates: Vec<StoreUpdate>,
},
/// Deletes all keys above a threshold.
Clear {
/// The minimum key to be deleted.
min_key: usize,
},
/// Compacts the store until a given capacity is immediately available.
Prepare {
/// How much capacity should be immediately available after compaction.
length: usize,
},
}
impl StoreModel {
/// Creates an empty model for a given storage configuration.
pub fn new(format: Format) -> StoreModel {
let content = HashMap::new();
StoreModel { content, format }
}
/// Returns the modeled content.
pub fn content(&self) -> &HashMap<usize, Box<[u8]>> {
&self.content
}
/// Returns the storage configuration.
pub fn format(&self) -> &Format {
&self.format
}
/// Simulates a store operation.
pub fn apply(&mut self, operation: StoreOperation) -> StoreResult<()> {
match operation {
StoreOperation::Transaction { updates } => self.transaction(updates),
StoreOperation::Clear { min_key } => self.clear(min_key),
StoreOperation::Prepare { length } => self.prepare(length),
}
}
/// Returns the capacity according to the model.
pub fn capacity(&self) -> StoreRatio {
let total = self.format.total_capacity();
let used = usize_to_nat(
self.content
.values()
.map(|x| self.format.entry_size(x) as usize)
.sum(),
);
StoreRatio { used, total }
}
/// Applies a transaction.
fn transaction(&mut self, updates: Vec<StoreUpdate>) -> StoreResult<()> {
// Fail if the transaction is invalid.
if self.format.transaction_valid(&updates).is_none() {
return Err(StoreError::InvalidArgument);
}
// Fail if there is not enough capacity.
let capacity = self.format.transaction_capacity(&updates) as usize;
if self.capacity().remaining() < capacity {<|fim▁hole|> }
// Apply the updates.
for update in updates {
match update {
StoreUpdate::Insert { key, value } => {
self.content.insert(key, value.into_boxed_slice());
}
StoreUpdate::Remove { key } => {
self.content.remove(&key);
}
}
}
Ok(())
}
/// Applies a clear operation.
fn clear(&mut self, min_key: usize) -> StoreResult<()> {
if min_key > self.format.max_key() as usize {
return Err(StoreError::InvalidArgument);
}
self.content.retain(|&k, _| k < min_key);
Ok(())
}
/// Applies a prepare operation.
fn prepare(&self, length: usize) -> StoreResult<()> {
if self.capacity().remaining() < length {
return Err(StoreError::NoCapacity);
}
Ok(())
}
}<|fim▁end|>
|
return Err(StoreError::NoCapacity);
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Stackdriver Logging API"""
import traceback
import google.cloud.logging.client
import six
class HTTPContext(object):
"""HTTPContext defines an object that captures the parameter for the
httpRequest part of Error Reporting API
:type method: str
:param method: The type of HTTP request, such as GET, POST, etc.
:type url: str
:param url: The URL of the request
:type user_agent: str
:param user_agent: The user agent information that is provided with the
request.
:type referrer: str
:param referrer: The referrer information that is provided with the
request.
:type response_status_code: int
:param response_status_code: The HTTP response status code for the request.
:type remote_ip: str
:param remote_ip: The IP address from which the request originated. This
can be IPv4, IPv6, or a token which is derived from
the IP address, depending on the data that has been
provided in the error report.
"""
def __init__(self, method=None, url=None,
user_agent=None, referrer=None,
response_status_code=None, remote_ip=None):
self.method = method
self.url = url
# intentionally camel case for mapping to JSON API expects
# pylint: disable=invalid-name
self.userAgent = user_agent
self.referrer = referrer
self.responseStatusCode = response_status_code
self.remoteIp = remote_ip
class Client(object):
"""Error Reporting client. Currently Error Reporting is done by creating
a Logging client.
:type project: str
:param project: the project which the client acts on behalf of. If not
passed falls back to the default inferred from the
environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
:type service: str
:param service: An identifier of the service, such as the name of the
executable, job, or Google App Engine service name. This
field is expected to have a low number of values that are
relatively stable over time, as opposed to version,
which can be changed whenever new code is deployed.
:type version: str
:param version: Represents the source code version that the developer
provided, which could represent a version label or a Git
SHA-1 hash, for example. If the developer did not provide
a version, the value is set to default.
:raises: :class:`ValueError` if the project is neither passed in nor
set in the environment.
"""
def __init__(self, project=None,
credentials=None,
http=None,
service=None,
version=None):
self.logging_client = google.cloud.logging.client.Client(
project, credentials, http)
self.service = service if service else self.DEFAULT_SERVICE
self.version = version
DEFAULT_SERVICE = 'python'
def _send_error_report(self, message,
report_location=None, http_context=None, user=None):
"""Makes the call to the Error Reporting API via the log stream.
This is the lower-level interface to build the payload, generally
users will use either report() or report_exception() to automatically
gather the parameters for this method.
Currently this method sends the Error Report by formatting a structured
log message according to
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: The stack trace that was reported or logged by the
service.
:type report_location: dict
:param report_location: The location in the source code where the
decision was made to report the error, usually the place
where it was logged. For a logged exception this would be the
source line where the exception is logged, usually close to
the place where it was caught.
This should be a Python dict that contains the keys 'filePath',
'lineNumber', and 'functionName'
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This can
be a user ID, an email address, or an arbitrary token that
uniquely identifies the user. When sending an error
report, leave this field empty if the user was not
logged in. In this case the Error Reporting system will
use other data, such as remote IP address,
to distinguish affected users.
"""
payload = {
'serviceContext': {
'service': self.service,
},
'message': '{0}'.format(message)
}
if self.version:
payload['serviceContext']['version'] = self.version
if report_location or http_context or user:
payload['context'] = {}
if report_location:
payload['context']['reportLocation'] = report_location
if http_context:
http_context_dict = http_context.__dict__
# strip out None values
payload['context']['httpContext'] = {
key: value for key, value in six.iteritems(http_context_dict)
if value is not None
}
if user:
payload['context']['user'] = user
logger = self.logging_client.logger('errors')
logger.log_struct(payload)
def report(self, message, http_context=None, user=None):
""" Reports a message to Stackdriver Error Reporting
https://cloud.google.com/error-reporting/docs/formatting-error-messages
:type message: str
:param message: A user-supplied message to report
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending
an error report, leave this field empty if the user
was not logged in. In this case the Error Reporting
system will use other data, such as remote IP address,
to distinguish affected users.
Example:
.. code-block:: python
>>> client.report("Something went wrong!")
"""
stack = traceback.extract_stack()
last_call = stack[-2]
file_path = last_call[0]
line_number = last_call[1]
function_name = last_call[2]
report_location = {
'filePath': file_path,
'lineNumber': line_number,
'functionName': function_name
}
self._send_error_report(message,
http_context=http_context,
user=user,
report_location=report_location)
def report_exception(self, http_context=None, user=None):
""" Reports the details of the latest exceptions to Stackdriver Error
Reporting.
:type http_context: :class`google.cloud.error_reporting.HTTPContext`
:param http_context: The HTTP request which was processed when the
error was triggered.
:type user: str
:param user: The user who caused or was affected by the crash. This
can be a user ID, an email address, or an arbitrary
token that uniquely identifies the user. When sending an
error report, leave this field empty if the user was
not logged in. In this case the Error Reporting system
will use other data, such as remote IP address,
to distinguish affected users.
Example::
>>> try:
>>> raise NameError
>>> except Exception:
>>> client.report_exception()
"""<|fim▁hole|><|fim▁end|>
|
self._send_error_report(traceback.format_exc(),
http_context=http_context,
user=user)
|
<|file_name|>CylinderMCNPX.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
MCNPX Model for Cylindrical RPM8
"""
import sys
sys.path.append('../MCNPTools/')
sys.path.append('../')
from MCNPMaterial import Materials
import subprocess
import math
import mctal
import numpy as np
import itertools
import os
class CylinderRPM(object):
# Material Dictionaries
cellForStr = '{:5d} {:d} -{:4.3f} {:d} -{:d} u={:d}\n'
surfForStr = '{:5d} cz {:5.3f}\n'
tranForStr = '*tr{:d} {:4.3f} {:4.3f} 0.000\n'
geoParam={'RPM8Size':12.7,'DetectorThickness':0.01,'DetectorSpacing':0.8,
'CylinderLightGuideRadius':0.5,'CylinderRadius':2.5}
def __init__(self,inp='INP.mcnp'):
""" Wrapped Cylinder MCNPX Model of RPM8
Keywords:
inp -- desired name of the input deck
"""
# Material dictionary for the moderator, light guide, and detector
self.material = {'Moderator':None,'Detector':None,'LightGuide':None}
self.material['Detector'] = {'name':'Detector','mt': 3, 'rho': 1.1,'matString':None} # detector
self.material['LightGuide'] = {'name': 'PMMA','mt':10, 'rho':0.93} # PMMA
self.material['Moderator'] = {'name':'HDPE','mt':456, 'rho': 0.93} # HPDE
# Cell and Surface Inital Numbering
self.CellStartNum = 600
self.SurfaceStartNum = 600
self.ZeroSurfaceNum = 500
self.UniverseNum = 200
self.surfGeo = None
self.inp = inp
self.name = 'OUT_'+self.inp.strip('.mcnp')+'.'
self.setMaterial(0.1,'PS')
def __str__(self):
s = '\tMCNPX Model of Wrapped Cylinder\n'
s += '\t Cell Number Starts: {0:d}\n'.format(self.CellStartNum)
s += '\t Surface Number Starts: {0:d}\n'.format(self.SurfaceStartNum)
return s
def getInteractionRate(self):
""" Returns the interaction rate """
m = mctal.MCTAL(self.name+'.m')
t = m.tallies[4]
# Returing the total
return t.data[-1],t.errors[-1]
def setMaterial(self,massFraction,polymer):
"""
Sets the detector material
"""
M = Materials()
num = self.material['Detector']['mt']
if polymer == 'PS':
self.material['Detector']['matString'] = M.GetPSLiF(massFraction,num)
elif polymer == 'PEN':
self.material['Detector']['matString'] = M.GetPENLiF(massFraction,num)
else:
raise ValueError('Polymer {} is not in the material database'.format(polymer))
def createSurfaceGeo(self):
"""
Creates a dictionary of surface positions and cylinders
"""
self.surfGeo = dict()
r = self.geoParam['CylinderLightGuideRadius']
self.surfGeo[r] = 'LightGuide'
#self.material = {'Moderator':None,'Detector':None,'LightGuide':None}
while(r + self.geoParam['DetectorThickness'] < self.geoParam['CylinderRadius']):
r += self.geoParam['DetectorThickness']
self.surfGeo[r] = 'Detector'
r += self.geoParam['DetectorSpacing']
if (r < self.geoParam['CylinderRadius']):
self.surfGeo[r] = 'LightGuide'
return self.surfGeo
def calculateDetectorArea(self):
"""
Calculates the area used in a detector
"""
area = 0.0
r = self.geoParam['CylinderLightGuideRadius']
while(r + self.geoParam['DetectorThickness'] < self.geoParam['CylinderRadius']):
area -= math.pow(r,2)
r += self.geoParam['DetectorThickness']
area += math.pow(r,2)
r += self.geoParam['DetectorSpacing']
return math.pi*area
def createDetectorCylinder(self,uNum=1):
"""
Creates a detector cylinder
Returns an ntuple of s,c,detectorCells
s - the surface string
c - the cell string
detectorCells - a list of the numbers corresponding to the detectors cells
"""
cellsCreated = 0
sNum = self.SurfaceStartNum
cNum = self.CellStartNum
detectorCells = list()
s = '{:5d} rcc 0 0 0 0 0 217.7 {}\n'.format(self.SurfaceStartNum,self.geoParam['CylinderRadius'])
c = ''
keyList = sorted(self.surfGeo.keys(), key = lambda x: float(x))
for key in keyList:
sPrev = sNum
sNum += 1
cNum += 1
s += self.surfForStr.format(sNum,key)
m = self.material[self.surfGeo[key]]
if cNum == self.CellStartNum+1:
c+= '{:5d} {:d} -{:4.3f} -{:d} u={:d}\n'.format(cNum,m['mt'],m['rho'],sNum,uNum)
else:
c += self.cellForStr.format(cNum,m['mt'],m['rho'],sPrev,sNum,uNum)
# List of cells for the detector
if self.surfGeo[key] is 'Detector':
detectorCells.append(cNum)
cellsCreated += 1
# Last cell up to universe boundary
m = self.material['Moderator']
c += '{:5d} {:d} -{:4.3f} {:d} u={:d}\n'.format(cNum+1,m['mt'],m['rho'],sNum,uNum)
cellsCreated += 1
return s,c,detectorCells,cellsCreated
def runModel(self):
"""
Runs the Model by submission to Tourqe / Maui
"""
qsub= subprocess.check_output('which qsub',shell=True).strip()
cmd = '#!/bin/bash\n'
cmd += '#PBS -N {0}\n#PBS -V\n#PBS -q gen1\n#PBS -l nodes=1:ppn=1\n'
cmd += 'cd $PBS_O_WORKDIR\nmpirun mcnpx inp={1} name={2}\n'
job = cmd.format('Job_RPMCylinder',self.inp,self.name)
with open('qsub','w') as o:
o.write(job)
subprocess.call(qsub+' qsub',shell=True)
subprocess.call('rm qsub',shell=True)
def createInputDeck(self,cylinderPositions,inp=None,name=None):
""" createInputDeck
Creates an input deck of the given geometry
"""
self.inp = inp
self.name = name
if not inp:
self.inp = 'INP_Cylinder.mcnp'
if not name:
self.name = 'OUT_Cylinder.'
oFile = self.inp
# Problem Constants
cellString = 'c ------------------------- Source ----------------------------------------\n'
cellString += '70 5 -15.1 -70 $ 252Cf source \n'
cellString += '71 406 -11.34 -71 70 $ Lead around source\n'
cellString += '72 456 -0.93 -72 71 $ Poly around source\n'
surfString = 'c ########################### Surface Cards ##############################\n'
surfString += 'c ------------------- Encasing Bounds (Size of RPM8) ---------------------\n'
surfString += '500 rpp 0 12.7 -15.25 15.25 0 217.7 \n'
# Add in other cells here
numCells = 4 # 3 Source, 1 RPM8 Encasing
##################################################################
# Add in Detector Cells and Surfaces #
##################################################################
universeNum = 1
(s,c,detectorCells,cellsCreated) = self.createDetectorCylinder(universeNum)
surfString += s
cellString += 'c ------------------- Detector Cylinder Universe ------------------------\n'
cellString += c
transNum = 1
uCellNum = self.UniverseNum
transString = ''
cellString += 'c ----------------------- Detector Universe ----------------------------\n'
for pos in cylinderPositions:
transString += self.tranForStr.format(transNum,pos[0],pos[1])
cellString += '{:5d} 0 -{:d} trcl={:d} fill={:d}\n'.format(uCellNum,self.SurfaceStartNum,transNum,universeNum)
transNum +=1
uCellNum +=1
# Adding the PMMA Moderator Block
m = self.material['Moderator']
cellString += 'c ------------------------- HDPE Moderator -----------------------------\n'
cellString += '{:5d} {:d} -{:4.3f} -{:d} '.format(500,m['mt'],m['rho'],self.ZeroSurfaceNum)
cellString += ''.join('#{:d} '.format(i) for i in range(self.UniverseNum,uCellNum))
cellString += '\n'
# Getting total number of cells
numCells += cellsCreated + uCellNum-self.UniverseNum +1
##################################################################
# Write the Tallies #
##################################################################
univCells = range(self.UniverseNum,uCellNum)
tallyString = 'c ------------------------- Tallies Yo! -----------------------------------\n'
tallies = {'F54:n':{'cells':detectorCells,'comments':'FC54 6Li Reaction Rates\n',
'options':' T\nSD54 1 {0:d}R\nFM54 -1 3 105'}}
for t in tallies:
# Getting a list of cells
tallyString += tallies[t]['comments']
tallyString += str(t)+' '
j = 0
for u in univCells:
cell = list('('+str(c)+'<'+str(u)+') ' for c in tallies[t]['cells'])
cell = [cell[i:i+6] for i in range(0,len(cell),6)]
if j > 0:
tallyString += ' '+''.join(''.join(i)+'\n' for i in cell)
else:
tallyString += ' '.join(''.join(i)+'\n' for i in cell)
j +=1
tallyString = tallyString.rstrip()
tallyString += tallies[t]['options'].format(len(univCells)*len(tallies[t]['cells']))
tallyString+='\n'
# Finish up the problem data
cellString += 'c ---------------------- Detector Encasing ------------------------------\n'
cellString += '700 488 -7.92 701 -700 $ SS-316 Encasing \n'
cellString += 'c -------------------------- Outside World -------------------------------\n'
cellString += '1000 204 -0.001225 -1000 700 #70 #71 #72 $ Atmosphere \n'
cellString += '1001 0 1000 \n'<|fim▁hole|> surfString += '700 rpp -0.3175 13.018 -15.5675 15.5675 -0.3175 218.018 \n'
surfString += '701 rpp 0.0 12.7 -15.25 15.25 0.0 217.7 \n'
surfString += 'c -------------- Source --------------------------------------------------\n'
surfString += '70 s -200 0 108.85 2.510E-04 $ Source \n'
surfString += '71 s -200 0 108.85 5.0025E-01 $ 0.5 cm lead surrounding source \n'
surfString += '72 s -200 0 108.85 3.00025 $ 2.5 cm poly surrounding source \n'
surfString += 'c -------------- Outside World -------------------------------------------\n'
surfString += '1000 so 250 \n'
matString = 'c -------------------------- Material Cards -----------------------------\n'
matString += self.material['Detector']['matString']
matString += self.getMaterialString()
with open(oFile,'w') as o:
o.write('MCNPX Simulation of RPM8 Cylinder\n')
o.write(cellString)
o.write('\n')
o.write(surfString)
o.write('\n')
o.write(self.getRunString().format(numCells))
o.write(self.getSrcString())
o.write(tallyString)
o.write(matString)
o.write(transString)
o.write('\n')
def getRunString(self):
runString ='c ------------------------------ Run Info ---------------------------------\n'
runString +='nps 1E6 \n'
runString +='IMP:N 1 {0:d}R 0 $ Particle Importances within cells \n'
runString +='c -------------- Output --------------------------------------------------\n'
runString +='PRDMP j j 1 $ Write a MCTAL File \n'
runString +='PRINT 40 \n'
runString +='c ------------------------------ Physics ---------------------------------\n'
runString +='MODE N \n'
runString +='PHYS:N 100 4j -1 2 \n'
runString +='CUT:N 2j 0 0 \n'
return runString
def getSrcString(self):
"""
Returns the MCNPX formated source string
"""
srcString = 'c -------------------------- Source Defination ----------------------------\n'
srcString += 'c 1 nanogram Cf-252 source = 1E-9 grams = 6.623E-11 cc \n'
srcString += 'sdef pos=-200 0 108.85 cel=70 par=SF rad=d1 \n'
srcString += 'si1 0 2.510E-04 \n'
srcString += 'sp1 -21 1 \n'
return srcString
def getMaterialString(self):
"""
Returns the MCNXP material string
"""
matString = 'm10 1001.70c -0.080538 $Lucite (PMMA / Plexiglass) rho = 1.19 g/cc\n'
matString += ' 6012.70c -0.599848 8016.70c -0.319614 \n'
matString += 'm204 7014.70c -0.755636 $air (US S. Atm at sea level) rho = 0.001225 \n'
matString += ' 8016.70c -0.231475 18036.70c -3.9e-005 18038.70c -8e-006\n'
matString += ' 18040.70c -0.012842 \n'
matString += 'm5 98252.66c 1 $ Cf-252, rho =15.1 g/cc wiki \n'
matString += 'm406 82204.70c -0.013781 $Lead, \n'
matString += ' 82206.70c -0.239557 82207.70c -0.220743 82208.70c -0.525919\n'
matString += 'm456 1001.70c -0.143716 $Polyethylene - rho = 0.93 g/cc \n'
matString += ' 6000.70c -0.856284 \n'
matString += 'm488 14028.70c -0.009187 $Steel, Stainless 316 rho = 7.92 \n'
matString += ' 14029.70c -0.000482 14030.70c -0.000331 24050.70c -0.007095\n'
matString += ' 24052.70c -0.142291 24053.70c -0.016443 24054.70c -0.004171\n'
matString += ' 25055.70c -0.02 26054.70c -0.037326 26056.70c -0.601748\n'
matString += ' 26057.70c -0.014024 26058.70c -0.001903 28058.70c -0.080873\n'
matString += ' 28060.70c -0.031984 28061.70c -0.001408 28062.70c -0.004546\n'
matString += ' 28064.70c -0.001189 42092.70c -0.003554 42094.70c -0.002264\n'
matString += ' 42095.70c -0.003937 42096.70c -0.004169 42097.70c -0.002412\n'
matString += ' 42098.70c -0.006157 42100.70c -0.002507 \n'
matString += 'mt3 poly.01t \n'
matString += 'mt456 poly.01t \n'
matString += 'mt10 poly.01t \n'
return matString
def run(loading,polymers):
"""
Runs a matrix of loading and polymers
"""
cylinderPositions = ((4.23,10.16),(4.23,-10.16))
cylinderPositions = ((4.23,7.625),(4.23,0),(4.23,-7.625))
cylinderPositions = ((4.23,9.15),(4.23,3.05),(4.23,-3.05),(4.23,-9.15))
cylinderPositions = ((4.23,10.16),(4.23,5.08),(4.23,0.0),(4.23,-5.08),(4.23,-10.16))
for l in loading:
for p in polymers:
RunCylinder(l,p,cylinderPositions)
def RunCylinder(l,p,cylinderPositions):
"""
Runs an mcnpx model of the cylinder of loading l, polymer p, with
cylinder positions cylinderPositions.
Keywords:
l - loading of the films
p - polymer
cylinderPositions - the cylinder positons
"""
# Creating input and output deck names
posString = ''
for pos in cylinderPositions:
posString += '{:2.1f}-'.format(pos[0])
posString = posString.rstrip('-')
inp='Cyl_{}LiF_{}_{}.mcnp'.format(int(l*100),p,posString)
name='OUTCyl_{}LiF_{}_{}.'.format(int(l*100),p,posString)
print inp
# Creating and running the model
m = CylinderRPM()
m.createSurfaceGeo()
m.setMaterial(l,p)
m.createDetectorCylinder()
m.createInputDeck(cylinderPositions,inp,name)
m.runModel()
def CreatePositions(yPos,numXPertubations):
"""
Creates and returns an array of positions, using a set array of y
positions, with equally spaced number of numXPertubations.
Keywords:
yPos - the number of y positions (or spacing of the cylinders). The
number of elements in this array corresponds to the number of
cylinders that are simulated.
numXPertubations - the number of pertubations in x. The arrays
positions returned are spaced linerly in the x from 2.54 to
10.16 cm
"""
pos = list()
xVals = np.linspace(2.54,10,numXPertubations)
xPos = [i for i in itertools.product(xVals,repeat=len(yPos))]
for x in xPos:
pos.append(zip(x,yPos))
return pos
def PositionOptimization(loading,polymers,positions):
"""
Runs a matrix of loading, polymers and positions
"""
for l in loading:
for p in polymers:
for pos in positions:
RunCylinder(l,p,pos)
def createInputPlotDecks():
positions = list()
positions.append(((4.23,10.16),(4.23,-10.16)))
positions.append(((4.23,7.625),(4.23,0),(4.23,-7.625)))
#positions.append(((4.23,9.15),(4.23,3.05),(4.23,-3.05),(4.23,-9.15)))
for pos in positions:
m = CylinderRPM()
m.createSurfaceGeo()
m.createDetectorCylinder()
inp='Cylinder_{}.mcnp'.format(len(pos))
name='OUTCylinder_{}.'.format(len(pos))
m.createInputDeck(pos,inp,name)
def computeMassLi(polymer,loading,density=1.1):
"""
Computes the mass of Li for a given polymer and loading
"""
M = Materials()
m = CylinderRPM()
area = m.calculateDetectorArea()
massLi = area*217.0*M.GetLiMassFraction(loading,polymer)*density
return massLi
def extractRunInfo(filename):
"""
Extracts the loading and polymer from the file name
"""
tokens = filename.split('_')
loading = tokens[1].strip('LiF')
polymer = tokens[2].strip('.m')
return (float(loading)/100, polymer)
###########################################################################
# #
# Summerizes / Analysis #
# #
###########################################################################
def GetInteractionRate(f,tallyNum=54,src=2.3E3):
"""
Returns the interaction rate of the mctal file
"""
m = mctal.MCTAL(f)
t = m.tallies[tallyNum]
return (t.data[-1]*src,t.errors[-1]*t.data[-1]*src)
import glob
def summerize():
files = glob.glob('OUTCylinder*.m')
s = 'Polymer, loading, mass Li, count rate, error, count rate per mass\n'
for f in files:
runParam = extractRunInfo(f)
massLi = computeMassLi(runParam[1],runParam[0])
countRate = GetInteractionRate(f)
s += '{}, {:5.2f} , {:5.3f} , {:5.3f} , {:4.2f} , {:5.3f}\n'.format(runParam[1].ljust(7),runParam[0],massLi,countRate[0],countRate[1],countRate[0]/massLi)
print s
def OptimizationSummary(path):
"""
Summerizes the Optimization Output
"""
# Getting the files
if not os.path.isdir(path):
raise IOError('Path {} is not found'.format(path))
files = glob.glob(path+'/*.m')
if not files:
print 'No files matched the pattern'
return
# Parsing the files
data = dict()
for f in files:
name = os.path.splitext(os.path.split(f)[1])[0]
data[name] = GetInteractionRate(f)
# Max value
sortedKeys = sorted(data, key=data.get,reverse=True)
#sortedKeys = sorted(data.items(), key=lambda x : float(x[1][0]),reverse=True)
for key in sortedKeys[0:9]:
print '{} -> {:5.2f} +/- {:5.2f}'.format(key,data[key][0],data[key][1])
for key in sortedKeys[-6:-1]:
print '{} -> {:5.2f} +/- {:5.2f}'.format(key,data[key][0],data[key][1])
def cleanup(path):
files = glob.glob(path+'/OUTCyl_*.m')
for f in files:
head,tail = os.path.split(f)
numCylinders = tail.count('-')+1
if numCylinders == 3:
newdir = 'ThreeCylPosOpt'
elif numCylinders == 4:
newdir = 'FourCylPosOpt'
elif numCylinders == 5:
newdir = 'FiveCylPosOpt'
os.rename(f,os.path.join(newdir,tail))
###########################################################################
# #
# MAIN #
# #
###########################################################################
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-r','--run',action="store_true",
default=False,help='Runs the cylinders for multiple polymers and precent loadings')
parser.add_argument('-p','--plot',action="store_true",
default=False,help='Creates input decks for plotting')
parser.add_argument('-c','--clean',action="store_true",
default=False,help='Cleans up the files')
parser.add_argument('-a','--analysis',action="store_true",default=False,help="Analyze the results")
parser.add_argument('path', nargs='?', default='CylPosOpt',help='Specifiy the output directory to summerize')
parser.add_argument('-o','--optimize',action='store',type=int,default=-1,help='Run a number of optimizations on the positions. If 0 is entered a summary is preformed on the directory provided with path')
parser.add_argument('loading',metavar='loading',type=float,nargs='*',action="store",default=(0.1,0.2,0.3),help='Precent Loading of LiF')
args = parser.parse_args()
if args.run:
run(args.loading,('PS','PEN'))
if args.plot:
createInputPlotDecks()
if args.optimize > 0:
yPos = (7.625,0,-7.625)
yPos = (9.15,3.05,-3.05,-9.15)
#yPos = (10.16,5.08,0.0,-5.08,-10.16)
pos = CreatePositions(yPos,args.optimize)
loading = (0.3,)
polymers = ('PS',)
PositionOptimization(loading,polymers,pos)
if args.optimize == 0:
OptimizationSummary(args.path)
if args.analysis:
summerize()
if args.clean:
cleanup(os.getcwd())<|fim▁end|>
|
surfString += 'c ------------------------ Encasing Material -----------------------------\n'
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import serializers
from . import models
class Invoice(serializers.ModelSerializer):
class Meta:
model = models.Invoice<|fim▁hole|><|fim▁end|>
|
fields = (
'id', 'name', 'additional_infos', 'owner',
'creation_date', 'update_date',
)
|
<|file_name|>GlobalSystems.cpp<|end_file_name|><|fim▁begin|>#include "GlobalSystems.h"
namespace globalSystem {
WindowManagerGL window;
TimeData time;
MouseData mouse;
KeyPressData keys;
TextureManager textures;
ModelManager models;
DynamicFloatMap runtimeData;
RandomNumberGenerator rng;
Font gameFont;
Font gameFontLarge;
Font gameFontHuge;
<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>switch_subdesign.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import sys
import popupcad
import qt.QtCore as qc
import qt.QtGui as qg
if __name__=='__main__':
app = qg.QApplication(sys.argv[0])
filename_from = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R08.cad'
filename_to = 'C:/Users/danaukes/Dropbox/zhis sentinal 11 files/modified/sentinal 11 manufacturing_R09.cad'
d = popupcad.filetypes.design.Design.load_yaml(filename_from)
widget = qg.QDialog()
layout = qg.QVBoxLayout()
layout1 = qg.QHBoxLayout()
layout2 = qg.QHBoxLayout()
list1 = qg.QListWidget()
list2 = qg.QListWidget()
button_ok = qg.QPushButton('Ok')
button_cancel = qg.QPushButton('Cancel')<|fim▁hole|>
subdesign_list = list(d.subdesigns.values())
for item in subdesign_list:
list1.addItem(str(item))
list2.addItem(str(item))
layout1.addWidget(list1)
layout1.addWidget(list2)
layout2.addWidget(button_ok)
layout2.addWidget(button_cancel)
layout.addLayout(layout1)
layout.addLayout(layout2)
widget.setLayout(layout)
button_ok.pressed.connect(widget.accept)
button_cancel.pressed.connect(widget.reject)
if widget.exec_():
if len(list1.selectedIndexes())==1 and len(list2.selectedIndexes())==1:
ii_from = list1.selectedIndexes()[0].row()
ii_to = list2.selectedIndexes()[0].row()
print(ii_from,ii_to)
d.replace_subdesign_refs(subdesign_list[ii_from].id,subdesign_list[ii_to].id)
d.subdesigns.pop(subdesign_list[ii_from].id)
d.save_yaml(filename_to)
sys.exit(app.exec_())<|fim▁end|>
| |
<|file_name|>Tracer.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012 - 2020 Splice Machine, Inc.
*
* This file is part of Splice Machine.
* Splice Machine is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either
* version 3, or (at your option) any later version.
* Splice Machine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License along with Splice Machine.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.splicemachine.si.impl;
import splice.com.google.common.base.Function;
import com.splicemachine.si.api.txn.TransactionStatus;
/**
* Provides hooks for tests to provide callbacks. Mainly used to provide thread coordination in tests. It allows tests
* to "trace" the internals of the SI execution.
*/
public class Tracer {
private static transient Function<byte[],byte[]> fRowRollForward = null;
private static transient Function<Long, Object> fTransactionRollForward = null;
private static transient Function<Object[], Object> fStatus = null;
private static transient Runnable fCompact = null;
private static transient Function<Long, Object> fCommitting = null;
private static transient Function<Long, Object> fWaiting = null;
private static transient Function<Object[], Object> fRegion = null;
private static transient Function<Object, String> bestAccess = null;
public static Integer rollForwardDelayOverride = null;
public static void registerRowRollForward(Function<byte[],byte[]> f) {
Tracer.fRowRollForward = f;
}
public static boolean isTracingRowRollForward() {
return Tracer.fRowRollForward != null;
}
public static void registerTransactionRollForward(Function<Long, Object> f) {
Tracer.fTransactionRollForward = f;
}
public static boolean isTracingTransactionRollForward() {
return Tracer.fTransactionRollForward != null;
}
public static void registerStatus(Function<Object[], Object> f) {
Tracer.fStatus = f;
}
public static void registerCompact(Runnable f) {
Tracer.fCompact = f;
}
public static void registerCommitting(Function<Long, Object> f) {
Tracer.fCommitting = f;
}
public static void registerBestAccess(Function<Object, String> f) {
Tracer.bestAccess = f;
}
public static void registerWaiting(Function<Long, Object> f) {
Tracer.fWaiting = f;
}
public static void registerRegion(Function<Object[], Object> f) {
Tracer.fRegion = f;
}
public static void traceRowRollForward(byte[] key) {
if (fRowRollForward != null) {
fRowRollForward.apply(key);
}
}
public static void traceTransactionRollForward(long transactionId) {
if (fTransactionRollForward != null) {
fTransactionRollForward.apply(transactionId);
}
}
public static void traceStatus(long transactionId, TransactionStatus newStatus, boolean beforeChange) {
if (fStatus != null) {
fStatus.apply(new Object[] {transactionId, newStatus, beforeChange});
}
}
public static void compact() {
if (fCompact != null) {
fCompact.run();
}
}
public static void traceCommitting(long transactionId) {
if (fCommitting != null) {
fCommitting.apply(transactionId);
}<|fim▁hole|> public static void traceWaiting(long transactionId) {
if (fWaiting != null) {
fWaiting.apply(transactionId);
}
}
public static void traceRegion(String tableName, Object region) {
if (fRegion != null) {
fRegion.apply(new Object[] {tableName, region});
}
}
public static void traceBestAccess(Object objectParam) {
if (bestAccess != null) {
bestAccess.apply(objectParam);
}
}
}<|fim▁end|>
|
}
|
<|file_name|>BenchmarkTest05767.java<|end_file_name|><|fim▁begin|>/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Nick Sanidas <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest05767")
public class BenchmarkTest05767 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {<|fim▁hole|> @Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String[] values = request.getParameterValues("foo");
String param;
if (values.length != 0)
param = request.getParameterValues("foo")[0];
else param = null;
String bar;
// Simple if statement that assigns constant to bar on true condition
int i = 86;
if ( (7*42) - i > 200 )
bar = "This_should_always_happen";
else bar = param;
try {
java.util.Random numGen = java.security.SecureRandom.getInstance("SHA1PRNG");
boolean randNumber = getNextNumber(numGen);
} catch (java.security.NoSuchAlgorithmException e) {
System.out.println("Problem executing SecureRandom.nextBoolean() - TestCase");
throw new ServletException(e);
}
response.getWriter().println("Weak Randomness Test java.security.SecureRandom.nextBoolean() executed");
}
boolean getNextNumber(java.util.Random generator) {
return generator.nextBoolean();
}
}<|fim▁end|>
|
doPost(request, response);
}
|
<|file_name|>guestimateIBUTests.rs<|end_file_name|><|fim▁begin|>use functions::commonFunctions::imperialOrMetric;
use functions::guestimateIBU::{guestimateIBUData, totalIBUMaths, totalIBUFormatting};
#[test]
fn totalBUMathsImperialGBTest() {
let allInputs: guestimateIBUData = guestimateIBUData {
preBoilBrix: 20.0,
wortVolume: 20.0,
firstHopAlpha: 7.0,
firstHopAmount: 3.0,
firstHopBoilTime: 60.0,
secondHopAlpha: 0.0,
secondHopAmount: 0.0,
secondHopBoilTime: 0.0,
thirdHopAlpha: 0.0,
thirdHopAmount: 0.0,
thirdHopBoilTime: 0.0,
fourthHopAlpha: 0.0,
fourthHopAmount: 0.0,
fourthHopBoilTime: 0.0,
fifthHopAlpha: 0.0,
fifthHopAmount: 0.0,
fifthHopBoilTime: 0.0,
sixthHopAlpha: 0.0,
sixthHopAmount: 0.0,
sixthHopBoilTime: 0.0,
seventhHopAlpha: 0.0,
seventhHopAmount: 0.0,
seventhHopBoilTime: 0.0,
totalIBU1: 0.0,
totalIBU2: 0.0,
totalIBU3: 0.0,
totalIBU4: 0.0,
totalIBU5: 0.0,
totalIBU6: 0.0,
totalIBU7: 0.0,
imperialOrMetric: imperialOrMetric::imperialGB,
};
let output: f64 = totalIBUMaths(allInputs);
assert_eq!(output, 11.230131642840979);
}
#[test]<|fim▁hole|> wortVolume: 20.0,
firstHopAlpha: 7.0,
firstHopAmount: 3.0,
firstHopBoilTime: 60.0,
secondHopAlpha: 0.0,
secondHopAmount: 0.0,
secondHopBoilTime: 0.0,
thirdHopAlpha: 0.0,
thirdHopAmount: 0.0,
thirdHopBoilTime: 0.0,
fourthHopAlpha: 0.0,
fourthHopAmount: 0.0,
fourthHopBoilTime: 0.0,
fifthHopAlpha: 0.0,
fifthHopAmount: 0.0,
fifthHopBoilTime: 0.0,
sixthHopAlpha: 0.0,
sixthHopAmount: 0.0,
sixthHopBoilTime: 0.0,
seventhHopAlpha: 0.0,
seventhHopAmount: 0.0,
seventhHopBoilTime: 0.0,
totalIBU1: 0.0,
totalIBU2: 0.0,
totalIBU3: 0.0,
totalIBU4: 0.0,
totalIBU5: 0.0,
totalIBU6: 0.0,
totalIBU7: 0.0,
imperialOrMetric: imperialOrMetric::imperialUS,
};
let output: f64 = totalIBUMaths(allInputs);
assert_eq!(output, 13.486826596469873);
}
#[test]
fn totalBUMathsMetricTest() {
let allInputs: guestimateIBUData = guestimateIBUData {
preBoilBrix: 20.0,
wortVolume: 20.0,
firstHopAlpha: 7.0,
firstHopAmount: 3.0,
firstHopBoilTime: 60.0,
secondHopAlpha: 0.0,
secondHopAmount: 0.0,
secondHopBoilTime: 0.0,
thirdHopAlpha: 0.0,
thirdHopAmount: 0.0,
thirdHopBoilTime: 0.0,
fourthHopAlpha: 0.0,
fourthHopAmount: 0.0,
fourthHopBoilTime: 0.0,
fifthHopAlpha: 0.0,
fifthHopAmount: 0.0,
fifthHopBoilTime: 0.0,
sixthHopAlpha: 0.0,
sixthHopAmount: 0.0,
sixthHopBoilTime: 0.0,
seventhHopAlpha: 0.0,
seventhHopAmount: 0.0,
seventhHopBoilTime: 0.0,
totalIBU1: 0.0,
totalIBU2: 0.0,
totalIBU3: 0.0,
totalIBU4: 0.0,
totalIBU5: 0.0,
totalIBU6: 0.0,
totalIBU7: 0.0,
imperialOrMetric: imperialOrMetric::metric,
};
let output: f64 = totalIBUMaths(allInputs);
assert_eq!(output, 1.8008483685642402);
}
#[test]
fn totalIBUFormattingTest() {
let finalOutputFloatSingle: f64 = 0.9957383389985669;
let finalOutputFloatMultiple: f64 = 2.9984451486385413;
let finalOutputFloatDecimal: f64 = 11.230131642840979;
assert_eq!(totalIBUFormatting(finalOutputFloatSingle), "1 IBU");
assert_eq!(totalIBUFormatting(finalOutputFloatMultiple), "3 IBUs");
assert_eq!(totalIBUFormatting(finalOutputFloatDecimal), "11.23 IBUs");
}<|fim▁end|>
|
fn totalBUMathsImperialUSTest() {
let allInputs: guestimateIBUData = guestimateIBUData {
preBoilBrix: 20.0,
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from cl.api import views
from cl.audio import api_views as audio_views
from cl.people_db import api_views as judge_views
from cl.search import api_views as search_views
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
# Search & Audio
router.register(r'dockets', search_views.DocketViewSet)
router.register(r'courts', search_views.CourtViewSet)
router.register(r'audio', audio_views.AudioViewSet)
router.register(r'clusters', search_views.OpinionClusterViewSet)
router.register(r'opinions', search_views.OpinionViewSet)
router.register(r'opinions-cited', search_views.OpinionsCitedViewSet)
router.register(r'search', search_views.SearchViewSet, base_name='search')
# Judges
router.register(r'people', judge_views.PersonViewSet)
router.register(r'positions', judge_views.PositionViewSet)
router.register(r'retention-events', judge_views.RetentionEventViewSet)
router.register(r'educations', judge_views.EducationViewSet)
router.register(r'schools', judge_views.SchoolViewSet)
router.register(r'political-affiliations',
judge_views.PoliticalAffiliationViewSet)
router.register(r'sources', judge_views.SourceViewSet)
router.register(r'aba-ratings', judge_views.ABARatingViewSet)
urlpatterns = [
url(r'^api-auth/',
include('rest_framework.urls', namespace='rest_framework')),
url(r'^api/rest/(?P<version>[v3]+)/', include(router.urls)),
# Documentation
url(r'^api/$',
views.api_index,
name='api_index'),
url(r'^api/jurisdictions/$',
views.court_index,
name='court_index'),
url(r'^api/rest-info/(?P<version>v[123])?/?$',
views.rest_docs,
name='rest_docs'),
url(r'^api/bulk-info/$',
views.bulk_data_index,
name='bulk_data_index'),
url(r'^api/rest/v(?P<version>[123])/coverage/(?P<court>.+)/$',
views.coverage_data,
name='coverage_data'),
# Pagerank file
url(r'^api/bulk/external_pagerank/$',
views.serve_pagerank_file,
name='pagerank_file'),
# Deprecation Dates:
# v1: 2016-04-01
# v2: 2016-04-01
url(r'^api/rest/v(?P<v>[12])/.*',
views.deprecated_api,<|fim▁hole|>]<|fim▁end|>
|
name='deprecated_api'),
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var ErrorHandler = require('./error').errorHandler;
module.exports = exports = function(app) {
// The main page of the blog
app.get('/', function(req, res, next){
return res.render('index', {
title: 'Editor homepage'
});
});
/* The main page of the blog, filtered by tag
app.get('/tag/:tag', contentHandler.displayMainPageByTag);
app.get("/post/:permalink", contentHandler.displayPostByPermalink);<|fim▁hole|> app.get('/newpost', contentHandler.displayNewPostPage);
app.post('/newpost', contentHandler.handleNewPost);
app.get('/login', sessionHandler.displayLoginPage);
app.post('/login', sessionHandler.handleLoginRequest);
app.get('/logout', sessionHandler.displayLogoutPage);
app.get("/welcome", sessionHandler.displayWelcomePage);
app.get('/signup', sessionHandler.displaySignupPage);
app.post('/signup', sessionHandler.handleSignup);
app.use(ErrorHandler);
*/
}<|fim▁end|>
|
app.post('/newcomment', contentHandler.handleNewComment);
app.get("/post_not_found", contentHandler.displayPostNotFound);
|
<|file_name|>background.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
var numPings = 0;
chrome.extension.onRequest.addListener(function(data) {
if (data != "ping")
chrome.test.fail("Unexpected request: " + JSON.stringify(data));
if (++numPings == 2)
chrome.test.notifyPass();
});
<|fim▁hole|> // Add a window.
var w = window.open(test_file_url);
// Add an iframe.
var iframe = document.createElement("iframe");
iframe.src = test_file_url;
document.getElementById("iframeContainer").appendChild(iframe);
});<|fim▁end|>
|
chrome.test.getConfig(function(config) {
var test_file_url = "http://localhost:PORT/files/extensions/test_file.html"
.replace(/PORT/, config.testServer.port);
|
<|file_name|>agents.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Image Grounded Conversations (IGC) Task.
See https://www.aclweb.org/anthology/I17-1047/ for more details. One must download
the data from https://www.microsoft.com/en-us/download/details.aspx?id=55324
prior to using this teacher.
The images are then downloaded from the urls specified in the data. Unfortunately,
not all links are live, and thus some examples do not have valid images.
As there is no training set, we manually split 90% of the validation set
into train.
"""
from typing import Optional
from parlai.core.params import ParlaiParser
import csv
import os
from abc import ABC, abstractmethod
from PIL import Image
from typing import List, Dict, Any
from parlai.core.build_data import download_multiprocess
from parlai.core.params import Opt
from parlai.core.teachers import AbstractImageTeacher
from parlai.utils.io import PathManager
import parlai.utils.typing as PT
class IGCTeacher(AbstractImageTeacher):
"""
Teacher for Image Grounded Conversations (IGC) Task.
See https://arxiv.org/abs/1701.08251 for more details
"""
def __init__(self, opt: Opt, shared: PT.TShared = None):
self.blank_image_id = '0000'
super().__init__(opt, shared)
if shared is not None:
self.valid_image_ids = shared['valid_image_ids']
if self.image_features_dict is not None:
self.image_features_dict[self.blank_image_id] = self.blank_image_features
self.multi_ref = opt.get('igc_multi_ref', False)
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
"""
Include arg.
for multi-reference labels.
"""
super().add_cmdline_args(parser, partial_opt=partial_opt)
agent = parser.add_argument_group('IGC Arguments')
agent.add_argument(
'--igc-multi-ref',
type='bool',
default=False,
help='specify to evaluate on multi-reference labels',
)
return parser
def image_id_to_image_path(self, image_id: str) -> str:
"""
Return image path given image id.
As this function is used in building the image features, and some of the
:param image_id:
image_id key, for IGC this is a str
:return:
the image path associated with the given image key
"""
if image_id not in self.valid_image_ids:
image_id = self.blank_image_id
return os.path.join(self.get_image_path(self.opt), image_id)
def get_data_path(self, opt: Opt) -> str:
"""
Determines path to the data file.
:param opt:
Opt with all options
:return:
the path to the dataset
"""
data_path = os.path.join(opt['datapath'], 'igc')
return data_path
def get_image_features_path(self, task, image_model_name, dt):
"""
Override so that subclasses can see same image features.
"""
# In default implementation, self.data_path already has task name added
image_features_path = os.path.join(self.data_path, 'image_features')
if not os.path.isdir(image_features_path):
PathManager.mkdirs(image_features_path)
return os.path.join(
image_features_path, f'{image_model_name}_{dt}_features_dict'
)
def num_episodes(self) -> int:
"""
Number of episodes.
Iterate through each episode twice, playing each side of the conversation once.
"""
return 2 * len(self.data)
def num_examples(self) -> int:
"""
Number of examples.
There are three turns of dialogue in the IGC task -
Context, Question, Response.
Thus, return 3 * number of data examples.
"""
return 3 * len(self.data)
def get(self, episode_idx: int, entry_idx: int = 0) -> dict:
"""
Override to handle corrupt images and multi-reference labels.
"""
entry_idx *= 2
if episode_idx >= len(self.data):
data = self.data[episode_idx % len(self.data)]
entry_idx += 1
else:
data = self.data[episode_idx]
image_id = data[self.image_id_key]
if data[self.image_id_key] not in self.valid_image_ids:
data[self.image_id_key] = self.blank_image_id
image_features = self.get_image_features(data)
conversation = [data['context'], data['question'], data['response']]
labels = [conversation[entry_idx]]
if self.multi_ref and entry_idx != 0:
key = 'questions' if entry_idx == 1 else 'responses'
labels = data[f'multiref_{key}'].split('***')
text = '' if entry_idx == 0 else conversation[entry_idx - 1]
episode_done = entry_idx >= len(conversation) - 2
action = {
'text': text,
'image_id': image_id,
'episode_done': episode_done,
'image': image_features,
'labels': labels,
}
return action
def load_data(self, data_path: str, opt: Opt) -> List[Dict[str, Any]]:
"""
Override to load CSV files.
"""
dt = opt['datatype'].split(':')[0]
dt_str = 'test' if dt == 'test' else 'val'
dp = os.path.join(self.get_data_path(opt), f'IGC_crowd_{dt_str}.csv')
if not os.path.exists(dp):
raise RuntimeError(
'Please download the IGC Dataset from '
'https://www.microsoft.com/en-us/download/details.aspx?id=55324. '
'Then, make sure to put the two .csv files in {}'.format(
self.get_data_path(opt)
)
)
if (
not os.path.exists(self.get_image_path(opt))
or len(os.listdir(self.get_image_path(opt))) <= 1
):
self._download_images(opt)
self.data = []
with PathManager.open(dp, newline='\n') as csv_file:
reader = csv.reader(csv_file, delimiter=',')
fields = []
for i, row in enumerate(reader):
if i == 0:
fields = row
else:
ep = dict(zip(fields, row))
ep['image_id'] = f'{ep["id"]}'
self.data.append(ep)
if dt == 'train':
# Take first 90% of valid set as train
self.data = self.data[: int(len(self.data) * 0.9)]
elif dt == 'valid':
self.data = self.data[int(len(self.data) * 0.9) :]
self.valid_image_ids = []
for d in self.data:
img_path = os.path.join(self.get_image_path(opt), d['image_id'])
if PathManager.exists(img_path):
self.valid_image_ids.append(d['image_id'])
self.valid_image_ids = set(self.valid_image_ids)
return self.data
def _download_images(self, opt: Opt):
"""
Download available IGC images.
"""
urls = []
ids = []
for dt in ['test', 'val']:
df = os.path.join(self.get_data_path(opt), f'IGC_crowd_{dt}.csv')
with PathManager.open(df, newline='\n') as csv_file:
reader = csv.reader(csv_file, delimiter=',')
fields = []
for i, row in enumerate(reader):
if i == 0:
fields = row
else:
data = dict(zip(fields, row))
urls.append(data['url'])
ids.append(data['id'])
PathManager.mkdirs(self.get_image_path(opt))
# Make one blank image
image = Image.new('RGB', (100, 100), color=0)
image.save(os.path.join(self.get_image_path(opt), self.blank_image_id), 'JPEG')
# Download the rest
download_multiprocess(urls, self.get_image_path(opt), dest_filenames=ids)
# Remove bad images
for fp in os.listdir(self.get_image_path(opt)):
img_path = os.path.join(self.get_image_path(opt), fp)
if PathManager.exists(img_path):
try:
Image.open(img_path).convert('RGB')
except OSError:
PathManager.rm(img_path)
def share(self) -> PT.TShared:
shared = super().share()
shared['valid_image_ids'] = self.valid_image_ids
return shared
class IGCOneSideTeacher(ABC, IGCTeacher):
"""
Override to only return one side of the conversation.
"""
@classmethod
def add_cmdline_args(
cls, parser: ParlaiParser, partial_opt: Optional[Opt] = None
) -> ParlaiParser:
super().add_cmdline_args(parser, partial_opt=partial_opt)
agent = parser.add_argument_group('IGCResponseOnly Arguments')
agent.add_argument(
'--igc-multi-ref',
type='bool',
default=False,
help='specify true to evaluate on multi-reference labels',
)
return parser
def num_episodes(self) -> int:
return len(self.data)
def num_examples(self) -> int:
return len(self.data)
@abstractmethod
def get_label_key(self) -> str:
"""
Return key into data dictionary for the label.
"""
pass
@abstractmethod
def get_text(self, data) -> str:
"""
Return text for an example.
"""
pass
def get(self, episode_idx: int, entry_idx: int = 0) -> Dict[str, Any]:
"""
Override to handle one-sided conversation.
"""
data = self.data[episode_idx]
image_id = data[self.image_id_key]
if data[self.image_id_key] not in self.valid_image_ids:
data[self.image_id_key] = self.blank_image_id
image_features = self.get_image_features(data)
labels = [data[self.get_label_key()]]
if self.multi_ref:
labels = data[f'multiref_{self.get_label_key()}s'].split('***')
text = self.get_text(data)
action = {<|fim▁hole|> 'episode_done': True,
'image': image_features,
'labels': labels,
}
return action
class ResponseOnlyTeacher(IGCOneSideTeacher):
"""
Responses Only.
"""
def get_label_key(self) -> str:
return 'response'
def get_text(self, data) -> str:
return '\n'.join([data['context'], data['question']])
class QuestionOnlyTeacher(IGCOneSideTeacher):
"""
Questions Only.
"""
def get_label_key(self) -> str:
return 'question'
def get_text(self, data) -> str:
return data['context']
class DefaultTeacher(IGCTeacher):
pass<|fim▁end|>
|
'text': text,
'image_id': image_id,
|
<|file_name|>OpenstackSubnetWebResource.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacknetworking.web;
/**
* Handles Rest API call from Neutron ML2 plugin.
*/
import org.onosproject.rest.AbstractWebResource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;<|fim▁hole|>import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.InputStream;
@Path("subnets")
public class OpenstackSubnetWebResource extends AbstractWebResource {
protected static final Logger log = LoggerFactory
.getLogger(OpenstackSubnetWebResource.class);
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createSubnet(InputStream input) {
return Response.status(Response.Status.OK).build();
}
@PUT
@Path("{subnetId}")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public Response updateSubnet(@PathParam("subnetId") String id,
final InputStream input) {
return Response.status(Response.Status.OK).build();
}
@DELETE
@Path("{subnetId}")
@Produces(MediaType.APPLICATION_JSON)
public Response deleteSubnet(@PathParam("subnetId") String id) {
return Response.status(Response.Status.OK).build();
}
}<|fim▁end|>
|
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.POST;
|
<|file_name|>dependency-plugin.js<|end_file_name|><|fim▁begin|>/**
* @plugin.js
* @App Options Dependency Manager Plugin JS
*
* @License: http://www.siteeditor.org/license
* @Contributing: http://www.siteeditor.org/contributing
*/
/*global siteEditor:true */
(function( exports, $ ){
var api = sedApp.editor;
api.pageConditions = api.pageConditions || {};
api.OptionDependency = api.Class.extend({
initialize: function( id , options ){
this.queries = {};
this.operators = [];
this.fieldType = "control";
$.extend( this, options || {} );
this.id = id;
},
changeActiveRender : function(){
if( _.isEmpty( this.id ) || !_.isString( this.id ) || !this.id ){
return ;
}
var isShow = this.isActive();
if( this.fieldType == "control" ){
var control = api.control.instance( this.id );
control.active.set( isShow );
}else{
var selector = '#sed-app-panel-' + this.id;
if( isShow ) {
$(selector).parents(".row_settings:first").removeClass("sed-hide-dependency").fadeIn("slow");
}else{
$( selector ).parents(".row_settings:first").addClass("sed-hide-dependency").fadeOut( 200 );
}
}
},
isActive : function( ){
return this.checkQueries( this.queries );
},
checkQueries : function( queries ){
var self = this ,
isShowArr = [] ,
relation = "AND";
$.each( queries , function( key , query ){
if ( key === 'relation' ) {
relation = query;
}else if( $.isArray( query ) || _.isObject( query ) ){
var isShow;
if( self.isFirstOrderClause( query ) ) {
isShow = self.checkConditionLogic( query , key ) ? 1 : 0;
isShowArr.push( isShow );
}else{
isShow = self.checkQueries( query ) ? 1 : 0;
isShowArr.push( isShow );
}
}
});
if( isShowArr.length > 0 ) {
if (relation == "AND") {
return $.inArray(0, isShowArr) == -1;
} else if (relation == "OR") {
return $.inArray(1, isShowArr) > -1;
}
}
return true;
},
isFirstOrderClause : function( query ){
return !_.isUndefined( query.key ) || ( !_.isUndefined( query.key ) && !_.isUndefined( query.value ) );
},
checkConditionLogic : function( query , key ){
if( ! _.isUndefined( query['compare'] ) && _.isString( query['compare'] ) && $.inArray( query['compare'] , this.operators ) > -1 ){
query['compare'] = query['compare'].toUpperCase();
}else{
query['compare'] = ! _.isUndefined( query['value'] ) && $.isArray( query['value'] ) ? 'IN' : '=' ;
}
if( ! _.isUndefined( query['type'] ) && _.isString( query['type'] ) ){
query['type'] = query['type'].toLowerCase();
}else{
query['type'] = 'control';
}
var compare = query['compare'] ,
isShow ,
type = query['type'] ,
id = query['key'] ,
currentValue ,
pattern;
switch ( type ){
case "control" :
if( $.inArray(id , _.keys( api.settings.controls ) ) == -1 ){
return true;
}
var thisControl = api.control.instance( id );
currentValue = thisControl.currentValue ;
break;
case "setting" :
if ( ! api.has( id ) ) {
return true;
}
currentValue = api.setting( id ).get();
break;
case "page_condition" :
if ( _.isUndefined( api.pageConditions[id] ) ) {
return true;
}
currentValue = api.pageConditions[id];
return currentValue;
break;
}
if( _.isUndefined( query['value'] ) ){
return true;
}
switch ( compare ){
case "=" :
case "==" :
isShow = ( currentValue == query['value'] );
break;
case "===" :
isShow = ( currentValue === query['value'] );
break;
case "!=" :
isShow = ( currentValue != query['value'] );
break;
case "!==" :
isShow = ( currentValue !== query['value'] );
break;
case ">" :
isShow = ( currentValue > query['value'] );
break;
case ">=" :
isShow = ( currentValue >= query['value'] );
break;
case "<" :
isShow = ( currentValue < query['value'] );
break;
case "<=" :
isShow = ( currentValue <= query['value'] );
break;
case "IN" :
if( ! $.isArray( query['value'] ) ){
return true;
}
isShow = $.inArray( currentValue , query['value'] ) > -1;
break;
case "NOT IN" :
if( ! $.isArray( query['value'] ) ){
return true;
}
isShow = $.inArray( currentValue , query['value'] ) == -1;
break;
case "BETWEEN" :
if( ! $.isArray( query['value'] ) || query['value'].length != 2 ){
return true;
}
isShow = currentValue > query['value'][0] && currentValue < query['value'][1];
break;
case "NOT BETWEEN" :
if( ! $.isArray( query['value'] ) || query['value'].length != 2 ){
return true;
}
isShow = currentValue < query['value'][0] && currentValue > query['value'][1];
break;
case "DEFINED" :
isShow = ! _.isUndefined( currentValue );
break;
case "UNDEFINED" :
isShow = _.isUndefined( currentValue );
break;
case "EMPTY" :
isShow = _.isEmpty( currentValue );
break;
case "NOT EMPTY" :
isShow = !_.isEmpty( currentValue );
break;
case "REGEXP" :
pattern = new RegExp( query['value'] );
isShow = pattern.test( currentValue );
break;
case "NOT REGEXP" :
pattern = new RegExp( query['value'] );
isShow = !pattern.test( currentValue );
break;
}
return isShow;
}
});
api.fn._executeFunctionByName = function(functionName, context , args ) {
args = $.isArray( args ) ? args : [];
var namespaces = functionName.split(".");
var func = namespaces.pop();
for(var i = 0; i < namespaces.length; i++) {
context = context[namespaces[i]];
}
if (typeof context[func] === "function") {
return context[func].apply(context, args);
}else{
return true;
}
};
api.OptionCallbackDependency = api.OptionDependency.extend({
isActive : function( ){
if( ! _.isUndefined( this.callback ) ){
var args = [];
if( !_.isUndefined( this.callback_args ) ){
args = this.callback_args;
}
return api.fn._executeFunctionByName( this.callback , window , args );
}
return true;
}
});
api.AppOptionsDependency = api.Class.extend({
initialize: function (options) {
this.operators = [];
$.extend(this, options || {});
this.dependencies = {};
this.dialogSelector = "#sed-dialog-settings";
this.updatedGroups = [];
//this.updatedResetGroups = [];
this.ready();
},
ready: function () {
var self = this;
this.initUpdateOptions();
api.previewer.bind( 'sedCurrentPageConditions' , function( conditions ){
api.pageConditions = conditions;
//self.updatedResetGroups = [];
api.Events.trigger( "afterResetPageConditions" );
});
api.Events.bind( "afterResetPageConditions" , function(){
var isOpen = $( self.dialogSelector ).dialog( "isOpen" );
if( isOpen ){
var optionsGroup = api.sedDialogSettings.optionsGroup;
self.update( optionsGroup );
//self.updatedResetGroups.push( optionsGroup );
}
});
},
initUpdateOptions: function () {
var self = this;
/*
* @param : group === sub_category in controls data (api.settings.controls)
*/
api.Events.bind( "after_group_settings_update" , function( group ){
self.update( group );
if( $.inArray( group , self.updatedGroups ) == -1 ){
self.updatedGroups.push( group );
//self.updatedResetGroups.push( group );
}
});
api.Events.bind( 'afterOpenInitDialogAppSettings' , function( optionsGroup ){
if( $.inArray( optionsGroup , self.updatedGroups ) > -1 ) { //&& $.inArray( optionsGroup , self.updatedResetGroups ) == -1
self.update( optionsGroup );
//self.updatedResetGroups.push( optionsGroup );
}
});
},
update : function( group ){
var self = this;
if( !_.isUndefined( api.settingsPanels[group] ) ) {
_.each(api.settingsPanels[group], function (data, panelId) {
if ( !_.isUndefined( api.settingsRelations[group] ) && !_.isUndefined( api.settingsRelations[group][panelId] ) ) {
self.dependencyRender( panelId , "panel" , group );
api.Events.trigger("after_apply_single_panel_relations_update", group, data, panelId);
}
});
}
if( !_.isUndefined( api.sedGroupControls[group] ) ) {
_.each(api.sedGroupControls[group], function (data) {
var control = api.control.instance(data.control_id);
if ( !_.isUndefined(control) ) {
if (!_.isUndefined(api.settingsRelations[group]) && !_.isUndefined(api.settingsRelations[group][control.id])) {
var id = control.id;
self.dependencyRender(id, "control" , group);
api.Events.trigger("after_apply_single_control_relations_update", group, control, control.currentValue);
}
}
});
}
api.Events.trigger( "after_apply_settings_relations_update" , group );
},
<|fim▁hole|> var dependencyArgs = api.settingsRelations[group][id] ,
dependency ,
type = !_.isUndefined( dependencyArgs.type ) && !_.isEmpty( dependencyArgs.type ) ? dependencyArgs.type : "query";
if( ! this.has( id ) ) {
var constructor = api.dependencyConstructor[type] || api.OptionDependency,
params = $.extend( {} , {
operators : this.operators ,
fieldType : fieldType
} , dependencyArgs );
dependency = this.add( id , new constructor( id , params) );
this.initControlRefresh( dependency , dependencyArgs );
this.initSettingRefresh( dependency , dependencyArgs );
}else{
dependency = this.get( id );
}
dependency.changeActiveRender();
},
initControlRefresh : function( dependency , dependencyArgs ){
var self = this;
if( !_.isUndefined( dependencyArgs.controls ) ) {
var controls = dependencyArgs.controls;
_.each( controls , function ( controlId ) {
self.controlRefresh( controlId , dependency );
});
}
},
//support nested relations
controlRefresh : function( controlId , dependency ){
var self = this;
api.Events.bind("afterControlValueRefresh", function (group, control, value) {
if ( controlId == control.id ) {
dependency.changeActiveRender();
if ( !_.isUndefined( api.settingsRelations[group] ) && !_.isUndefined( api.settingsRelations[group][control.id] ) ) {
var dependencyArgs = api.settingsRelations[group][control.id];
if( !_.isUndefined( dependencyArgs.controls ) && !_.isEmpty( dependencyArgs.controls ) ){
$.each( dependencyArgs.controls , function( cId ){
self.controlRefresh( cId , dependency );
});
}
}
}
api.Events.trigger("after_apply_settings_relations_refresh", group, control, value);
});
},
initSettingRefresh : function( dependency , dependencyArgs ){
var self = this;
if( !_.isUndefined( dependencyArgs.settings ) ) {
var settings = dependencyArgs.settings;
_.each( settings , function ( settingId ) {
self.settingRefresh( settingId , dependency );
});
}
},
settingRefresh : function( settingId , dependency ){
api( settingId , function( setting ) {
setting.bind(function( value ) {
dependency.changeActiveRender();
});
});
},
add : function( id , dependencyObject ){
if( ! this.has( id ) && _.isObject( dependencyObject ) ){
this.dependencies[id] = dependencyObject;
}
return dependencyObject;
},
has : function( id ){
return !_.isUndefined( this.dependencies[id] );
},
get : function( id ){
if( this.has( id ) ){
return this.dependencies[id];
}else{
return null;
}
}
});
api.dependencyConstructor = {
query : api.OptionDependency,
callback : api.OptionCallbackDependency
};
api.Events.bind( "after_apply_single_control_relations_update" , function(group, control, currentValue){
if( control.params.active === false ) {
control.active.set(control.params.active);
}
});
$( function() {
api.settingsRelations = window._sedAppModulesSettingsRelations;
api.appOptionsDependency = new api.AppOptionsDependency({
operators : window._sedAppDependenciesOperators
});
});
})( sedApp, jQuery );<|fim▁end|>
|
dependencyRender : function( id , fieldType , group ){
fieldType = _.isUndefined( fieldType ) || _.isEmpty( fieldType ) ? "control" : fieldType;
|
<|file_name|>firewalld.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Adam Miller ([email protected])
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: firewalld
short_description: Manage arbitrary ports/services with firewalld
description:
- This module allows for addition or deletion of services and ports either tcp or udp in either running or permanent firewalld rules.
version_added: "1.4"
options:
service:
description:
- "Name of a service to add/remove to/from firewalld - service must be listed in /etc/services."
required: false
default: null
port:
description:
- "Name of a port or port range to add/remove to/from firewalld. Must be in the form PORT/PROTOCOL or PORT-PORT/PROTOCOL for port ranges."
required: false
default: null
rich_rule:
description:
- "Rich rule to add/remove to/from firewalld."
required: false
default: null
source:
description:
- 'The source/network you would like to add/remove to/from firewalld'
required: false
default: null
version_added: "2.0"
interface:
description:
- 'The interface you would like to add/remove to/from a zone in firewalld'
required: false
default: null
version_added: "2.1"
zone:
description:
- 'The firewalld zone to add/remove to/from (NOTE: default zone can be configured per system but "public" is default from upstream. Available choices can be extended based on per-system configs, listed here are "out of the box" defaults).'
required: false
default: system-default(public)
choices: [ "work", "drop", "internal", "external", "trusted", "home", "dmz", "public", "block" ]
permanent:
description:
- "Should this configuration be in the running firewalld configuration or persist across reboots."
required: false
default: null
immediate:
description:
- "Should this configuration be applied immediately, if set as permanent"
required: false
default: false
version_added: "1.9"
state:
description:
- "Should this port accept(enabled) or reject(disabled) connections."
required: true
choices: [ "enabled", "disabled" ]
timeout:
description:
- "The amount of time the rule should be in effect for when non-permanent."
required: false
default: 0
masquerade:
description:
- 'The masquerade setting you would like to enable/disable to/from zones within firewalld'
required: false
default: null
version_added: "2.1"
notes:
- Not tested on any Debian based system.
- Requires the python2 bindings of firewalld, who may not be installed by default if the distribution switched to python 3
requirements: [ 'firewalld >= 0.2.11' ]
author: "Adam Miller (@maxamillion)"
'''
EXAMPLES = '''
- firewalld: service=https permanent=true state=enabled
- firewalld: port=8081/tcp permanent=true state=disabled
- firewalld: port=161-162/udp permanent=true state=enabled
- firewalld: zone=dmz service=http permanent=true state=enabled
- firewalld: rich_rule='rule service name="ftp" audit limit value="1/m" accept' permanent=true state=enabled
- firewalld: source='192.168.1.0/24' zone=internal state=enabled
- firewalld: zone=trusted interface=eth2 permanent=true state=enabled
- firewalld: masquerade=yes state=enabled permanent=true zone=dmz
'''
import os
import re
try:
import firewall.config
FW_VERSION = firewall.config.VERSION
from firewall.client import Rich_Rule
from firewall.client import FirewallClient
fw = FirewallClient()
if not fw.connected:
HAS_FIREWALLD = False
else:
HAS_FIREWALLD = True
except ImportError:
HAS_FIREWALLD = False
#####################
# masquerade handling
#
def get_masquerade_enabled(zone):
if fw.queryMasquerade(zone) == True:
return True
else:
return False
def get_masquerade_enabled_permanent(zone):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
if fw_settings.getMasquerade() == True:
return True
else:
return False
def set_masquerade_enabled(zone):
fw.addMasquerade(zone)
def set_masquerade_disabled(zone):
fw.removeMasquerade(zone)
def set_masquerade_permanent(zone, masquerade):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.setMasquerade(masquerade)
fw_zone.update(fw_settings)
################
# port handling
#
def get_port_enabled(zone, port_proto):
if port_proto in fw.getPorts(zone):
return True
else:
return False
def set_port_enabled(zone, port, protocol, timeout):
fw.addPort(zone, port, protocol, timeout)
def set_port_disabled(zone, port, protocol):
fw.removePort(zone, port, protocol)
def get_port_enabled_permanent(zone, port_proto):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
if tuple(port_proto) in fw_settings.getPorts():
return True
else:
return False
def set_port_enabled_permanent(zone, port, protocol):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.addPort(port, protocol)
fw_zone.update(fw_settings)
def set_port_disabled_permanent(zone, port, protocol):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.removePort(port, protocol)
fw_zone.update(fw_settings)
####################
# source handling
#
def get_source(zone, source):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
if source in fw_settings.getSources():
return True
else:
return False
def add_source(zone, source):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.addSource(source)
fw_zone.update(fw_settings)
def remove_source(zone, source):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.removeSource(source)
fw_zone.update(fw_settings)
####################
# interface handling
#
def get_interface(zone, interface):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
if interface in fw_settings.getInterfaces():
return True
else:
return False
def add_interface(zone, interface):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.addInterface(interface)
fw_zone.update(fw_settings)
def remove_interface(zone, interface):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.removeInterface(interface)
fw_zone.update(fw_settings)
####################
# service handling
#
def get_service_enabled(zone, service):
if service in fw.getServices(zone):
return True
else:
return False
def set_service_enabled(zone, service, timeout):
fw.addService(zone, service, timeout)
def set_service_disabled(zone, service):
fw.removeService(zone, service)
def get_service_enabled_permanent(zone, service):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
if service in fw_settings.getServices():
return True
else:
return False
def set_service_enabled_permanent(zone, service):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.addService(service)
fw_zone.update(fw_settings)
def set_service_disabled_permanent(zone, service):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.removeService(service)
fw_zone.update(fw_settings)
####################
# rich rule handling
#
def get_rich_rule_enabled(zone, rule):
# Convert the rule string to standard format
# before checking whether it is present
rule = str(Rich_Rule(rule_str=rule))
if rule in fw.getRichRules(zone):
return True
else:
return False
def set_rich_rule_enabled(zone, rule, timeout):
fw.addRichRule(zone, rule, timeout)
def set_rich_rule_disabled(zone, rule):
fw.removeRichRule(zone, rule)
def get_rich_rule_enabled_permanent(zone, rule):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
# Convert the rule string to standard format
# before checking whether it is present
rule = str(Rich_Rule(rule_str=rule))
if rule in fw_settings.getRichRules():
return True
else:
return False
def set_rich_rule_enabled_permanent(zone, rule):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.addRichRule(rule)
fw_zone.update(fw_settings)
def set_rich_rule_disabled_permanent(zone, rule):
fw_zone = fw.config().getZoneByName(zone)
fw_settings = fw_zone.getSettings()
fw_settings.removeRichRule(rule)
fw_zone.update(fw_settings)
def main():
module = AnsibleModule(
argument_spec = dict(
service=dict(required=False,default=None),
port=dict(required=False,default=None),
rich_rule=dict(required=False,default=None),
zone=dict(required=False,default=None),
immediate=dict(type='bool',default=False),
source=dict(required=False,default=None),
permanent=dict(type='bool',required=False,default=None),
state=dict(choices=['enabled', 'disabled'], required=True),
timeout=dict(type='int',required=False,default=0),
interface=dict(required=False,default=None),
masquerade=dict(required=False,default=None),
),
supports_check_mode=True
)
if module.params['source'] == None and module.params['permanent'] == None:
module.fail_json(msg='permanent is a required parameter')
if module.params['interface'] != None and module.params['zone'] == None:
module.fail(msg='zone is a required parameter')
if not HAS_FIREWALLD:
module.fail_json(msg='firewalld and its python 2 module are required for this module')
## Pre-run version checking
if FW_VERSION < "0.2.11":
module.fail_json(msg='unsupported version of firewalld, requires >= 2.0.11')
## Global Vars
changed=False
msgs = []
service = module.params['service']
rich_rule = module.params['rich_rule']
source = module.params['source']
if module.params['port'] != None:
port, protocol = module.params['port'].split('/')
if protocol == None:
module.fail_json(msg='improper port format (missing protocol?)')
else:
port = None
if module.params['zone'] != None:
zone = module.params['zone']
else:
zone = fw.getDefaultZone()
permanent = module.params['permanent']
desired_state = module.params['state']
immediate = module.params['immediate']
timeout = module.params['timeout']
interface = module.params['interface']
masquerade = module.params['masquerade']
## Check for firewalld running
try:
if fw.connected == False:
module.fail_json(msg='firewalld service must be running')
except AttributeError:
module.fail_json(msg="firewalld connection can't be established,\
version likely too old. Requires firewalld >= 2.0.11")
modification_count = 0
if service != None:
modification_count += 1
if port != None:
modification_count += 1
if rich_rule != None:
modification_count += 1
if interface != None:
modification_count += 1
if masquerade != None:
modification_count += 1
if modification_count > 1:
module.fail_json(msg='can only operate on port, service, rich_rule or interface at once')
if service != None:
if permanent:
is_enabled = get_service_enabled_permanent(zone, service)
msgs.append('Permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_service_enabled_permanent(zone, service)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_service_disabled_permanent(zone, service)
changed=True
if immediate or not permanent:
is_enabled = get_service_enabled(zone, service)
msgs.append('Non-permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_service_enabled(zone, service, timeout)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_service_disabled(zone, service)
changed=True
if changed == True:
msgs.append("Changed service %s to %s" % (service, desired_state))
if source != None:
is_enabled = get_source(zone, source)
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
add_source(zone, source)
changed=True
msgs.append("Added %s to zone %s" % (source, zone))
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
remove_source(zone, source)
changed=True
msgs.append("Removed %s from zone %s" % (source, zone))
if port != None:
if permanent:
is_enabled = get_port_enabled_permanent(zone, [port, protocol])
msgs.append('Permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_port_enabled_permanent(zone, port, protocol)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_port_disabled_permanent(zone, port, protocol)
changed=True
if immediate or not permanent:
is_enabled = get_port_enabled(zone, [port,protocol])
msgs.append('Non-permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_port_enabled(zone, port, protocol, timeout)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_port_disabled(zone, port, protocol)
changed=True
if changed == True:
msgs.append("Changed port %s to %s" % ("%s/%s" % (port, protocol), \
desired_state))
if rich_rule != None:
if permanent:
is_enabled = get_rich_rule_enabled_permanent(zone, rich_rule)
msgs.append('Permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_rich_rule_enabled_permanent(zone, rich_rule)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_rich_rule_disabled_permanent(zone, rich_rule)
changed=True
if immediate or not permanent:<|fim▁hole|> if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_rich_rule_enabled(zone, rich_rule, timeout)
changed=True
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_rich_rule_disabled(zone, rich_rule)
changed=True
if changed == True:
msgs.append("Changed rich_rule %s to %s" % (rich_rule, desired_state))
if interface != None:
is_enabled = get_interface(zone, interface)
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
add_interface(zone, interface)
changed=True
msgs.append("Added %s to zone %s" % (interface, zone))
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
remove_interface(zone, interface)
changed=True
msgs.append("Removed %s from zone %s" % (interface, zone))
if masquerade != None:
if permanent:
is_enabled = get_masquerade_enabled_permanent(zone)
msgs.append('Permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_masquerade_permanent(zone, True)
changed=True
msgs.append("Added masquerade to zone %s" % (zone))
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_masquerade_permanent(zone, False)
changed=True
msgs.append("Removed masquerade from zone %s" % (zone))
if immediate or not permanent:
is_enabled = get_masquerade_enabled(zone)
msgs.append('Non-permanent operation')
if desired_state == "enabled":
if is_enabled == False:
if module.check_mode:
module.exit_json(changed=True)
set_masquerade_enabled(zone)
changed=True
msgs.append("Added masquerade to zone %s" % (zone))
elif desired_state == "disabled":
if is_enabled == True:
if module.check_mode:
module.exit_json(changed=True)
set_masquerade_disabled(zone)
changed=True
msgs.append("Removed masquerade from zone %s" % (zone))
module.exit_json(changed=changed, msg=', '.join(msgs))
#################################################
# import module snippets
from ansible.module_utils.basic import *
main()<|fim▁end|>
|
is_enabled = get_rich_rule_enabled(zone, rich_rule)
msgs.append('Non-permanent operation')
if desired_state == "enabled":
|
<|file_name|>OffsetDateTimeConverterTest.java<|end_file_name|><|fim▁begin|>/*
* MIT License
*
* Copyright 2017-2018 Sabre GLBL Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.sabre.oss.conf4j.converter;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Type;
import java.time.*;
import java.util.Map;
import static com.sabre.oss.conf4j.converter.AbstractNumberConverter.FORMAT;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
public class OffsetDateTimeConverterTest {
private OffsetDateTimeConverter offsetDateTimeConverter;
@BeforeEach
public void setUp() {
offsetDateTimeConverter = new OffsetDateTimeConverter();
}
@Test
public void shouldBeApplicableWhenOffsetDateTimeType() {
// given
Type type = OffsetDateTime.class;
// when<|fim▁hole|>
// then
assertThat(applicable).isTrue();
}
@Test
public void shouldNotBeApplicableWhenNotOffsetDateTimeType() {
// given
Type type = Boolean.class;
// when
boolean applicable = offsetDateTimeConverter.isApplicable(type, emptyMap());
// then
assertThat(applicable).isFalse();
}
@Test
public void shouldThrowExceptionWhenCheckingIfApplicableAndTypeIsNull() {
// then
assertThatThrownBy(() -> offsetDateTimeConverter.isApplicable(null, emptyMap()))
.isExactlyInstanceOf(NullPointerException.class)
.hasMessage("type cannot be null");
}
@Test
public void shouldConvertToStringWhenFormatNotSpecified() {
// given
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime toConvert = OffsetDateTime.now(clock);
// when
String converted = offsetDateTimeConverter.toString(OffsetDateTime.class, toConvert, emptyMap());
// then
assertThat(converted).isEqualTo("1970-01-01T00:00Z");
}
@Test
public void shouldConvertToStringWhenFormatSpecified() {
// given
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime toConvert = OffsetDateTime.now(clock);
String format = "yyyy-MM-dd HH:mm x";
Map<String, String> attributes = singletonMap(FORMAT, format);
// when
String converted = offsetDateTimeConverter.toString(OffsetDateTime.class, toConvert, attributes);
// then
assertThat(converted).isEqualTo("1970-01-01 00:00 +00");
}
@Test
public void shouldReturnNullWhenConvertingToStringAndValueToConvertIsNull() {
// when
String converted = offsetDateTimeConverter.toString(OffsetDateTime.class, null, emptyMap());
// then
assertThat(converted).isNull();
}
@Test
public void shouldThrowExceptionWhenConvertingToStringAndWrongFormat() {
// given
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime toConvert = OffsetDateTime.now(clock);
String format = "invalid format";
Map<String, String> attributes = singletonMap(FORMAT, format);
// then
assertThatThrownBy(() -> offsetDateTimeConverter.toString(OffsetDateTime.class, toConvert, attributes))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessage("Unable to convert OffsetDateTime to String. Invalid format: 'invalid format'");
}
@Test
public void shouldThrowExceptionWhenConvertingToStringAndTypeIsNull() {
// given
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime toConvert = OffsetDateTime.now(clock);
// then
assertThatThrownBy(() -> offsetDateTimeConverter.toString(null, toConvert, emptyMap()))
.isExactlyInstanceOf(NullPointerException.class)
.hasMessage("type cannot be null");
}
@Test
public void shouldConvertFromStringWhenFormatNotSpecified() {
// given
String dateInString = "1970-01-01T00:00Z";
// when
OffsetDateTime fromConversion = offsetDateTimeConverter.fromString(OffsetDateTime.class, dateInString, emptyMap());
// then
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime expected = OffsetDateTime.now(clock);
assertThat(fromConversion).isEqualTo(expected);
}
@Test
public void shouldConvertFromStringWhenFormatSpecified() {
// given
String dateInString = "1970 01 01 00:00 +00";
String format = "yyyy MM dd HH:mm x";
Map<String, String> attributes = singletonMap(FORMAT, format);
// when
OffsetDateTime fromConversion = offsetDateTimeConverter.fromString(OffsetDateTime.class, dateInString, attributes);
// then
LocalDateTime localDateTime = LocalDateTime.of(1970, 1, 1, 0, 0);
ZoneOffset zoneOffset = ZoneOffset.of("+00");
OffsetDateTime expected = OffsetDateTime.of(localDateTime, zoneOffset);
assertThat(fromConversion).isEqualTo(expected);
}
@Test
public void shouldReturnNullWhenConvertingFromStringAndValueToConvertIsNull() {
// when
OffsetDateTime fromConversion = offsetDateTimeConverter.fromString(OffsetDateTime.class, null, emptyMap());
// then
assertThat(fromConversion).isNull();
}
@Test
public void shouldThrowExceptionWhenConvertingFromStringAndWrongValueString() {
// given
String dateInString = "invalid value string";
// then
assertThatThrownBy(() -> offsetDateTimeConverter.fromString(OffsetDateTime.class, dateInString, emptyMap()))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessage("Unable to convert to OffsetDateTime: invalid value string. The value doesn't match specified format null.");
}
@Test
public void shouldThrowExceptionWhenConvertingFromStringAndWrongFormat() {
// given
String dateInString = "1970 01 01 00:00 +00";
String format = "invalid format";
Map<String, String> attributes = singletonMap(FORMAT, format);
// then
assertThatThrownBy(() -> offsetDateTimeConverter.fromString(OffsetDateTime.class, dateInString, attributes))
.isExactlyInstanceOf(IllegalArgumentException.class)
.hasMessage("Unable to convert to OffsetDateTime: 1970 01 01 00:00 +00. Invalid format: 'invalid format'");
}
@Test
public void shouldThrowExceptionWhenConvertingFromStringAndTypeIsNull() {
// given
Clock clock = Clock.fixed(Instant.EPOCH, ZoneId.of("Z"));
OffsetDateTime toConvert = OffsetDateTime.now(clock);
// then
assertThatThrownBy(() -> offsetDateTimeConverter.toString(null, toConvert, emptyMap()))
.isExactlyInstanceOf(NullPointerException.class)
.hasMessage("type cannot be null");
}
}<|fim▁end|>
|
boolean applicable = offsetDateTimeConverter.isApplicable(type, emptyMap());
|
<|file_name|>graphml.cpp<|end_file_name|><|fim▁begin|>// Copyright (C) 2006 Tiago de Paula Peixoto <[email protected]>
// Copyright (C) 2004,2009 The Trustees of Indiana University.
//
// Use, modification and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Authors: Douglas Gregor
// Jeremiah Willcock
// Andrew Lumsdaine
// Tiago de Paula Peixoto
#define BOOST_GRAPH_SOURCE
#include <boost/foreach.hpp>
#include <boost/optional.hpp>
#include <boost/throw_exception.hpp>
#include <boost/graph/graphml.hpp>
#include <boost/graph/dll_import_export.hpp>
#include <boost/property_tree/ptree.hpp>
#include <boost/property_tree/xml_parser.hpp>
using namespace boost;
namespace {
class graphml_reader
{
public:
graphml_reader(mutate_graph& g)
: m_g(g) { }
static boost::property_tree::ptree::path_type path(const std::string& str) {
return boost::property_tree::ptree::path_type(str, '/');
}
static void get_graphs(const boost::property_tree::ptree& top,
size_t desired_idx /* or -1 for all */,
std::vector<const boost::property_tree::ptree*>& result) {
using boost::property_tree::ptree;
size_t current_idx = 0;
BOOST_FOREACH(const ptree::value_type& n, top) {
if (n.first == "graph") {
if (current_idx == desired_idx || desired_idx == (size_t)(-1)) {
result.push_back(&n.second);
get_graphs(n.second, (size_t)(-1), result);
if (desired_idx != (size_t)(-1)) break;
}
++current_idx;
}
}
}
void run(std::istream& in, size_t desired_idx)
{
using boost::property_tree::ptree;
ptree pt;
read_xml(in, pt, boost::property_tree::xml_parser::no_comments | boost::property_tree::xml_parser::trim_whitespace);
ptree gml = pt.get_child(path("graphml"));
// Search for attributes
BOOST_FOREACH(const ptree::value_type& child, gml) {
if (child.first != "key") continue;
std::string id = child.second.get(path("<xmlattr>/id"), "");
std::string for_ = child.second.get(path("<xmlattr>/for"), "");
std::string name = child.second.get(path("<xmlattr>/attr.name"), "");
std::string type = child.second.get(path("<xmlattr>/attr.type"), "");
key_kind kind = all_key;
if (for_ == "graph") kind = graph_key;
else if (for_ == "node") kind = node_key;
else if (for_ == "edge") kind = edge_key;
else if (for_ == "hyperedge") kind = hyperedge_key;
else if (for_ == "port") kind = port_key;
else if (for_ == "endpoint") kind = endpoint_key;
else if (for_ == "all") kind = all_key;
else {BOOST_THROW_EXCEPTION(parse_error("Attribute for is not valid: " + for_));}
m_keys[id] = kind;
m_key_name[id] = name;
m_key_type[id] = type;
boost::optional<std::string> default_ = child.second.get_optional<std::string>(path("default"));
if (default_) m_key_default[id] = default_.get();
}
// Search for graphs
std::vector<const ptree*> graphs;
get_graphs(gml, desired_idx, graphs);
BOOST_FOREACH(const ptree* gr, graphs) {
// Search for nodes
BOOST_FOREACH(const ptree::value_type& node, *gr) {
if (node.first != "node") continue;
std::string id = node.second.get<std::string>(path("<xmlattr>/id"));
handle_vertex(id);<|fim▁hole|> std::string key = attr.second.get<std::string>(path("<xmlattr>/key"));
std::string value = attr.second.get_value("");
handle_node_property(key, id, value);
}
}
}
BOOST_FOREACH(const ptree* gr, graphs) {
bool default_directed = gr->get<std::string>(path("<xmlattr>/edgedefault")) == "directed";
// Search for edges
BOOST_FOREACH(const ptree::value_type& edge, *gr) {
if (edge.first != "edge") continue;
std::string source = edge.second.get<std::string>(path("<xmlattr>/source"));
std::string target = edge.second.get<std::string>(path("<xmlattr>/target"));
std::string local_directed = edge.second.get(path("<xmlattr>/directed"), "");
bool is_directed = (local_directed == "" ? default_directed : local_directed == "true");
if (is_directed != m_g.is_directed()) {
if (is_directed) {
BOOST_THROW_EXCEPTION(directed_graph_error());
} else {
BOOST_THROW_EXCEPTION(undirected_graph_error());
}
}
size_t old_edges_size = m_edge.size();
handle_edge(source, target);
BOOST_FOREACH(const ptree::value_type& attr, edge.second) {
if (attr.first != "data") continue;
std::string key = attr.second.get<std::string>(path("<xmlattr>/key"));
std::string value = attr.second.get_value("");
handle_edge_property(key, old_edges_size, value);
}
}
}
}
private:
/// The kinds of keys. Not all of these are supported
enum key_kind {
graph_key,
node_key,
edge_key,
hyperedge_key,
port_key,
endpoint_key,
all_key
};
void
handle_vertex(const std::string& v)
{
bool is_new = false;
if (m_vertex.find(v) == m_vertex.end())
{
m_vertex[v] = m_g.do_add_vertex();
is_new = true;
}
if (is_new)
{
std::map<std::string, std::string>::iterator iter;
for (iter = m_key_default.begin(); iter != m_key_default.end(); ++iter)
{
if (m_keys[iter->first] == node_key)
handle_node_property(iter->first, v, iter->second);
}
}
}
any
get_vertex_descriptor(const std::string& v)
{
return m_vertex[v];
}
void
handle_edge(const std::string& u, const std::string& v)
{
handle_vertex(u);
handle_vertex(v);
any source, target;
source = get_vertex_descriptor(u);
target = get_vertex_descriptor(v);
any edge;
bool added;
boost::tie(edge, added) = m_g.do_add_edge(source, target);
if (!added) {
BOOST_THROW_EXCEPTION(bad_parallel_edge(u, v));
}
size_t e = m_edge.size();
m_edge.push_back(edge);
std::map<std::string, std::string>::iterator iter;
for (iter = m_key_default.begin(); iter != m_key_default.end(); ++iter)
{
if (m_keys[iter->first] == edge_key)
handle_edge_property(iter->first, e, iter->second);
}
}
void handle_node_property(const std::string& key_id, const std::string& descriptor, const std::string& value)
{
m_g.set_vertex_property(m_key_name[key_id], m_vertex[descriptor], value, m_key_type[key_id]);
}
void handle_edge_property(const std::string& key_id, size_t descriptor, const std::string& value)
{
m_g.set_edge_property(m_key_name[key_id], m_edge[descriptor], value, m_key_type[key_id]);
}
mutate_graph& m_g;
std::map<std::string, key_kind> m_keys;
std::map<std::string, std::string> m_key_name;
std::map<std::string, std::string> m_key_type;
std::map<std::string, std::string> m_key_default;
std::map<std::string, any> m_vertex;
std::vector<any> m_edge;
};
}
namespace boost
{
void BOOST_GRAPH_DECL
read_graphml(std::istream& in, mutate_graph& g, size_t desired_idx)
{
graphml_reader reader(g);
reader.run(in, desired_idx);
}
}<|fim▁end|>
|
BOOST_FOREACH(const ptree::value_type& attr, node.second) {
if (attr.first != "data") continue;
|
<|file_name|>boss_moam.cpp<|end_file_name|><|fim▁begin|>/*
* This file is part of the OregonCore Project. See AUTHORS file for Copyright information
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <https://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Boss_Moam
SD%Complete: 100
SDComment: VERIFY SCRIPT AND SQL
SDCategory: Ruins of Ahn'Qiraj<|fim▁hole|>EndScriptData */
#include "ScriptMgr.h"
#include "ScriptedCreature.h"
#define EMOTE_AGGRO -1509000
#define EMOTE_MANA_FULL -1509001
#define SPELL_TRAMPLE 15550
#define SPELL_DRAINMANA 27256
#define SPELL_ARCANEERUPTION 25672
#define SPELL_SUMMONMANA 25681
#define SPELL_GRDRSLEEP 24360 //Greater Dreamless Sleep
struct boss_moamAI : public ScriptedAI
{
boss_moamAI(Creature* c) : ScriptedAI(c) {}
Unit* pTarget;
uint32 TRAMPLE_Timer;
uint32 DRAINMANA_Timer;
uint32 SUMMONMANA_Timer;
uint32 i;
uint32 j;
void Reset()
{
i = 0;
j = 0;
pTarget = NULL;
TRAMPLE_Timer = 30000;
DRAINMANA_Timer = 30000;
}
void EnterCombat(Unit* who)
{
DoScriptText(EMOTE_AGGRO, me);
pTarget = who;
}
void UpdateAI(const uint32 diff)
{
if (!UpdateVictim())
return;
//If we are 100%MANA cast Arcane Erruption
//if (j == 1 && me->GetMana()*100 / me->GetMaxMana() == 100 && !me->IsNonMeleeSpellCast(false))
{
DoCastVictim(SPELL_ARCANEERUPTION);
DoScriptText(EMOTE_MANA_FULL, me);
}
//If we are <50%HP cast MANA FIEND (Summon Mana) and Sleep
//if (i == 0 && me->GetHealth()*100 / me->GetMaxHealth() <= 50 && !me->IsNonMeleeSpellCast(false))
{
i = 1;
DoCastVictim(SPELL_SUMMONMANA);
DoCastVictim(SPELL_GRDRSLEEP);
}
//SUMMONMANA_Timer
if (i == 1 && SUMMONMANA_Timer <= diff)
{
DoCastVictim(SPELL_SUMMONMANA);
SUMMONMANA_Timer = 90000;
}
else SUMMONMANA_Timer -= diff;
//TRAMPLE_Timer
if (TRAMPLE_Timer <= diff)
{
DoCastVictim(SPELL_TRAMPLE);
j = 1;
TRAMPLE_Timer = 30000;
}
else TRAMPLE_Timer -= diff;
//DRAINMANA_Timer
if (DRAINMANA_Timer <= diff)
{
DoCastVictim(SPELL_DRAINMANA);
DRAINMANA_Timer = 30000;
}
else DRAINMANA_Timer -= diff;
DoMeleeAttackIfReady();
}
};
CreatureAI* GetAI_boss_moam(Creature* pCreature)
{
return new boss_moamAI (pCreature);
}
void AddSC_boss_moam()
{
Script* newscript;
newscript = new Script;
newscript->Name = "boss_moam";
newscript->GetAI = &GetAI_boss_moam;
newscript->RegisterSelf();
}<|fim▁end|>
| |
<|file_name|>flag.go<|end_file_name|><|fim▁begin|>package cli
import (
"flag"
"fmt"
"io/ioutil"
"reflect"
"runtime"
"strconv"
"strings"
"syscall"
)
const defaultPlaceholder = "value"
// BashCompletionFlag enables bash-completion for all commands and subcommands
var BashCompletionFlag Flag = BoolFlag{
Name: "generate-bash-completion",<|fim▁hole|>var VersionFlag Flag = BoolFlag{
Name: "version, v",
Usage: "print the version",
}
// HelpFlag prints the help for all commands and subcommands
// Set to the zero value (BoolFlag{}) to disable flag -- keeps subcommand
// unless HideHelp is set to true)
var HelpFlag Flag = BoolFlag{
Name: "help, h",
Usage: "show help",
}
// FlagStringer converts a flag definition to a string. This is used by help
// to display a flag.
var FlagStringer FlagStringFunc = stringifyFlag
// FlagNamePrefixer converts a full flag name and its placeholder into the help
// message flag prefix. This is used by the default FlagStringer.
var FlagNamePrefixer FlagNamePrefixFunc = prefixedNames
// FlagEnvHinter annotates flag help message with the environment variable
// details. This is used by the default FlagStringer.
var FlagEnvHinter FlagEnvHintFunc = withEnvHint
// FlagFileHinter annotates flag help message with the environment variable
// details. This is used by the default FlagStringer.
var FlagFileHinter FlagFileHintFunc = withFileHint
// FlagsByName is a slice of Flag.
type FlagsByName []Flag
func (f FlagsByName) Len() int {
return len(f)
}
func (f FlagsByName) Less(i, j int) bool {
return lexicographicLess(f[i].GetName(), f[j].GetName())
}
func (f FlagsByName) Swap(i, j int) {
f[i], f[j] = f[j], f[i]
}
// Flag is a common interface related to parsing flags in cli.
// For more advanced flag parsing techniques, it is recommended that
// this interface be implemented.
type Flag interface {
fmt.Stringer
// Apply Flag settings to the given flag set
Apply(*flag.FlagSet)
GetName() string
}
// RequiredFlag is an interface that allows us to mark flags as required
// it allows flags required flags to be backwards compatible with the Flag interface
type RequiredFlag interface {
Flag
IsRequired() bool
}
// DocGenerationFlag is an interface that allows documentation generation for the flag
type DocGenerationFlag interface {
Flag
// TakesValue returns true if the flag takes a value, otherwise false
TakesValue() bool
// GetUsage returns the usage string for the flag
GetUsage() string
// GetValue returns the flags value as string representation and an empty
// string if the flag takes no value at all.
GetValue() string
}
// errorableFlag is an interface that allows us to return errors during apply
// it allows flags defined in this library to return errors in a fashion backwards compatible
// TODO remove in v2 and modify the existing Flag interface to return errors
type errorableFlag interface {
Flag
ApplyWithError(*flag.FlagSet) error
}
func flagSet(name string, flags []Flag) (*flag.FlagSet, error) {
set := flag.NewFlagSet(name, flag.ContinueOnError)
for _, f := range flags {
//TODO remove in v2 when errorableFlag is removed
if ef, ok := f.(errorableFlag); ok {
if err := ef.ApplyWithError(set); err != nil {
return nil, err
}
} else {
f.Apply(set)
}
}
set.SetOutput(ioutil.Discard)
return set, nil
}
func eachName(longName string, fn func(string)) {
parts := strings.Split(longName, ",")
for _, name := range parts {
name = strings.Trim(name, " ")
fn(name)
}
}
func visibleFlags(fl []Flag) []Flag {
var visible []Flag
for _, f := range fl {
field := flagValue(f).FieldByName("Hidden")
if !field.IsValid() || !field.Bool() {
visible = append(visible, f)
}
}
return visible
}
func prefixFor(name string) (prefix string) {
if len(name) == 1 {
prefix = "-"
} else {
prefix = "--"
}
return
}
// Returns the placeholder, if any, and the unquoted usage string.
func unquoteUsage(usage string) (string, string) {
for i := 0; i < len(usage); i++ {
if usage[i] == '`' {
for j := i + 1; j < len(usage); j++ {
if usage[j] == '`' {
name := usage[i+1 : j]
usage = usage[:i] + name + usage[j+1:]
return name, usage
}
}
break
}
}
return "", usage
}
func prefixedNames(fullName, placeholder string) string {
var prefixed string
parts := strings.Split(fullName, ",")
for i, name := range parts {
name = strings.Trim(name, " ")
prefixed += prefixFor(name) + name
if placeholder != "" {
prefixed += " " + placeholder
}
if i < len(parts)-1 {
prefixed += ", "
}
}
return prefixed
}
func withEnvHint(envVar, str string) string {
envText := ""
if envVar != "" {
prefix := "$"
suffix := ""
sep := ", $"
if runtime.GOOS == "windows" {
prefix = "%"
suffix = "%"
sep = "%, %"
}
envText = " [" + prefix + strings.Join(strings.Split(envVar, ","), sep) + suffix + "]"
}
return str + envText
}
func withFileHint(filePath, str string) string {
fileText := ""
if filePath != "" {
fileText = fmt.Sprintf(" [%s]", filePath)
}
return str + fileText
}
func flagValue(f Flag) reflect.Value {
fv := reflect.ValueOf(f)
for fv.Kind() == reflect.Ptr {
fv = reflect.Indirect(fv)
}
return fv
}
func stringifyFlag(f Flag) string {
fv := flagValue(f)
switch f.(type) {
case IntSliceFlag:
return FlagFileHinter(
fv.FieldByName("FilePath").String(),
FlagEnvHinter(
fv.FieldByName("EnvVar").String(),
stringifyIntSliceFlag(f.(IntSliceFlag)),
),
)
case Int64SliceFlag:
return FlagFileHinter(
fv.FieldByName("FilePath").String(),
FlagEnvHinter(
fv.FieldByName("EnvVar").String(),
stringifyInt64SliceFlag(f.(Int64SliceFlag)),
),
)
case StringSliceFlag:
return FlagFileHinter(
fv.FieldByName("FilePath").String(),
FlagEnvHinter(
fv.FieldByName("EnvVar").String(),
stringifyStringSliceFlag(f.(StringSliceFlag)),
),
)
}
placeholder, usage := unquoteUsage(fv.FieldByName("Usage").String())
needsPlaceholder := false
defaultValueString := ""
if val := fv.FieldByName("Value"); val.IsValid() {
needsPlaceholder = true
defaultValueString = fmt.Sprintf(" (default: %v)", val.Interface())
if val.Kind() == reflect.String && val.String() != "" {
defaultValueString = fmt.Sprintf(" (default: %q)", val.String())
}
}
if defaultValueString == " (default: )" {
defaultValueString = ""
}
if needsPlaceholder && placeholder == "" {
placeholder = defaultPlaceholder
}
usageWithDefault := strings.TrimSpace(usage + defaultValueString)
return FlagFileHinter(
fv.FieldByName("FilePath").String(),
FlagEnvHinter(
fv.FieldByName("EnvVar").String(),
FlagNamePrefixer(fv.FieldByName("Name").String(), placeholder)+"\t"+usageWithDefault,
),
)
}
func stringifyIntSliceFlag(f IntSliceFlag) string {
var defaultVals []string
if f.Value != nil && len(f.Value.Value()) > 0 {
for _, i := range f.Value.Value() {
defaultVals = append(defaultVals, strconv.Itoa(i))
}
}
return stringifySliceFlag(f.Usage, f.Name, defaultVals)
}
func stringifyInt64SliceFlag(f Int64SliceFlag) string {
var defaultVals []string
if f.Value != nil && len(f.Value.Value()) > 0 {
for _, i := range f.Value.Value() {
defaultVals = append(defaultVals, strconv.FormatInt(i, 10))
}
}
return stringifySliceFlag(f.Usage, f.Name, defaultVals)
}
func stringifyStringSliceFlag(f StringSliceFlag) string {
var defaultVals []string
if f.Value != nil && len(f.Value.Value()) > 0 {
for _, s := range f.Value.Value() {
if len(s) > 0 {
defaultVals = append(defaultVals, strconv.Quote(s))
}
}
}
return stringifySliceFlag(f.Usage, f.Name, defaultVals)
}
func stringifySliceFlag(usage, name string, defaultVals []string) string {
placeholder, usage := unquoteUsage(usage)
if placeholder == "" {
placeholder = defaultPlaceholder
}
defaultVal := ""
if len(defaultVals) > 0 {
defaultVal = fmt.Sprintf(" (default: %s)", strings.Join(defaultVals, ", "))
}
usageWithDefault := strings.TrimSpace(usage + defaultVal)
return FlagNamePrefixer(name, placeholder) + "\t" + usageWithDefault
}
func flagFromFileEnv(filePath, envName string) (val string, ok bool) {
for _, envVar := range strings.Split(envName, ",") {
envVar = strings.TrimSpace(envVar)
if envVal, ok := syscall.Getenv(envVar); ok {
return envVal, true
}
}
for _, fileVar := range strings.Split(filePath, ",") {
if data, err := ioutil.ReadFile(fileVar); err == nil {
return string(data), true
}
}
return "", false
}<|fim▁end|>
|
Hidden: true,
}
// VersionFlag prints the version for the application
|
<|file_name|>clean_geoindex.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from ..extensions import celery, redis_store
from ..models.taxis import Taxi
import time
from flask import current_app
@celery.task
def clean_geoindex():
keys_to_clean = []
cursor = 0
taxi_id = set()
cursor = None
while cursor != 0:
if cursor == None:
cursor = 0
cursor, result = redis_store.scan(cursor, 'taxi:*')
pipe = redis_store.pipeline()
for key in result:
pipe.hvals(key)
values = pipe.execute()
lower_bound = int(time.time()) - 60 * 60
pipe = redis_store.pipeline()
for (key, l) in zip(result, values):
if any(map(lambda v: Taxi.parse_redis(v)['timestamp'] >= lower_bound, l)):
continue
pipe.zrem(current_app.config['REDIS_GEOINDEX'], key)
pipe.execute()
#Maybe it'll more efficient to delete some of the taxis in the global map, but
#if we do it we'll lose the information of when this taxis was active for the
#last time, it will be great to log it in database.<|fim▁end|>
| |
<|file_name|>simplelevel.py<|end_file_name|><|fim▁begin|>from levels import AbstractLevel, LevelsFactory, check
import bomblevel
def allBombed(self, player):
""" Check, is all players units are bombed """
return player.units.count('default_unit') == self.fieldSize()
class Level(AbstractLevel):
def fieldSize(self):
return 5
def units(self):
return ["default_unit" for i in range(self.fieldSize())]
@check(allBombed)<|fim▁hole|>
def nextLevel(self):
return "bomblevel"
LevelsFactory.levels["default"] = Level()<|fim▁end|>
|
def isGameOver(self, player):
return False
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
import argparse
import numpy as np
parser = argparse.ArgumentParser(description='Configuration file')
arg_lists = []
def add_argument_group(name):
arg = parser.add_argument_group(name)
arg_lists.append(arg)
return arg
def str2bool(v):
return v.lower() in ('true', '1')
# Network
net_arg = add_argument_group('Network')
net_arg.add_argument('--hidden_dim', type=int, default=128, help='actor LSTM num_neurons')
net_arg.add_argument('--num_heads', type=int, default=16, help='actor input embedding') ###
net_arg.add_argument('--num_stacks', type=int, default=3, help='actor LSTM num_neurons')
# Data
data_arg = add_argument_group('Data')
data_arg.add_argument('--batch_size', type=int, default=128, help='batch size')
data_arg.add_argument('--input_dimension', type=int, default=2, help='city dimension')
data_arg.add_argument('--max_length', type=int, default=20, help='number of deliveries')
# Training / test parameters
train_arg = add_argument_group('Training')
train_arg.add_argument('--nb_epoch', type=int, default=100000, help='nb epoch')
train_arg.add_argument('--lr1_start', type=float, default=0.001, help='actor learning rate')
train_arg.add_argument('--lr1_decay_step', type=int, default=5000, help='lr1 decay step')
train_arg.add_argument('--lr1_decay_rate', type=float, default=0.96, help='lr1 decay rate')
train_arg.add_argument('--alpha', type=float, default=0.99, help='update factor moving average baseline')
train_arg.add_argument('--init_baseline', type=float, default=7.0, help='initial baseline - REINFORCE')
train_arg.add_argument('--temperature', type=float, default=3.0, help='pointer_net initial temperature')
train_arg.add_argument('--C', type=float, default=10.0, help='pointer_net tan clipping')
# Misc
misc_arg = add_argument_group('User options') #####################################################
misc_arg.add_argument('--inference_mode', type=str2bool, default=True, help='switch to inference mode when model is trained')
misc_arg.add_argument('--restore_model', type=str2bool, default=True, help='whether or not model is retrieved')
misc_arg.add_argument('--save_to', type=str, default='20/model', help='saver sub directory')
misc_arg.add_argument('--restore_from', type=str, default='20/model', help='loader sub directory') ###
misc_arg.add_argument('--log_dir', type=str, default='summary/20/repo', help='summary writer log directory')
def get_config():
config, unparsed = parser.parse_known_args()
return config, unparsed
def print_config():
config, _ = get_config()
print('\n')
print('Data Config:')
print('* Batch size:',config.batch_size)
print('* Sequence length:',config.max_length)
print('* City coordinates:',config.input_dimension)
print('\n')
print('Network Config:')<|fim▁hole|> print('\n')
if config.inference_mode==False:
print('Training Config:')
print('* Nb epoch:',config.nb_epoch)
print('* Temperature:',config.temperature)
print('* Actor learning rate (init,decay_step,decay_rate):',config.lr1_start,config.lr1_decay_step,config.lr1_decay_rate)
else:
print('Testing Config:')
print('* Summary writer log dir:',config.log_dir)
print('\n')<|fim▁end|>
|
print('* Restored model:',config.restore_model)
print('* Actor hidden_dim (embed / num neurons):',config.hidden_dim)
print('* Actor tan clipping:',config.C)
|
<|file_name|>test_provider_config.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ddt
import fixtures
import microversion_parse
import os
from unittest import mock
from oslo_utils.fixture import uuidsentinel
from oslotest import base
from nova.compute import provider_config
from nova import exception as nova_exc
class SchemaValidationMixin(base.BaseTestCase):
"""This class provides the basic methods for running schema validation test
cases. It can be used along with ddt.file_data to test a specific schema
version using tests defined in yaml files. See SchemaValidationTestCasesV1
for an example of how this was done for schema version 1.
Because decorators can only access class properties of the class they are
defined in (even when overriding values in the subclass), the decorators
need to be placed in the subclass. This is why there are test_ functions in
the subclass that call the run_test_ methods in this class. This should
keep things simple as more schema versions are added.
"""
def setUp(self):
super(SchemaValidationMixin, self).setUp()
self.mock_load_yaml = self.useFixture(
fixtures.MockPatchObject(
provider_config, '_load_yaml_file')).mock
self.mock_LOG = self.useFixture(
fixtures.MockPatchObject(
provider_config, 'LOG')).mock
def set_config(self, config=None):
data = config or {}
self.mock_load_yaml.return_value = data
return data
def run_test_validation_errors(self, config, expected_messages):
self.set_config(config=config)
actual_msg = self.assertRaises(
nova_exc.ProviderConfigException,
provider_config._parse_provider_yaml, 'test_path').message
for msg in expected_messages:
self.assertIn(msg, actual_msg)
def run_test_validation_success(self, config):
reference = self.set_config(config=config)
actual = provider_config._parse_provider_yaml('test_path')
self.assertEqual(reference, actual)
def run_schema_version_matching(
self, min_schema_version, max_schema_version):
# note _load_yaml_file is mocked so the value is not important
# however it may appear in logs messages so changing it could
# result in tests failing unless the expected_messages field
# is updated in the test data.
path = 'test_path'
# test exactly min and max versions are supported
self.set_config(config={
'meta': {'schema_version': str(min_schema_version)}})
provider_config._parse_provider_yaml(path)
self.set_config(config={
'meta': {'schema_version': str(max_schema_version)}})
provider_config._parse_provider_yaml(path)
self.mock_LOG.warning.assert_not_called()
# test max major+1 raises
higher_major = microversion_parse.Version(
major=max_schema_version.major + 1, minor=max_schema_version.minor)
self.set_config(config={'meta': {'schema_version': str(higher_major)}})
self.assertRaises(nova_exc.ProviderConfigException,
provider_config._parse_provider_yaml, path)
# test max major with max minor+1 is logged
higher_minor = microversion_parse.Version(
major=max_schema_version.major, minor=max_schema_version.minor + 1)
expected_log_call = (
"Provider config file [%(path)s] is at schema version "
"%(schema_version)s. Nova supports the major version, but "
"not the minor. Some fields may be ignored." % {
"path": path, "schema_version": higher_minor})
self.set_config(config={'meta': {'schema_version': str(higher_minor)}})
provider_config._parse_provider_yaml(path)
self.mock_LOG.warning.assert_called_once_with(expected_log_call)
@ddt.ddt
class SchemaValidationTestCasesV1(SchemaValidationMixin):
MIN_SCHEMA_VERSION = microversion_parse.Version(1, 0)
MAX_SCHEMA_VERSION = microversion_parse.Version(1, 0)
@ddt.unpack
@ddt.file_data('provider_config_data/v1/validation_error_test_data.yaml')
def test_validation_errors(self, config, expected_messages):
self.run_test_validation_errors(config, expected_messages)
@ddt.unpack
@ddt.file_data('provider_config_data/v1/validation_success_test_data.yaml')
def test_validation_success(self, config):
self.run_test_validation_success(config)
def test_schema_version_matching(self):
self.run_schema_version_matching(self.MIN_SCHEMA_VERSION,
self.MAX_SCHEMA_VERSION)
@ddt.ddt
class ValidateProviderConfigTestCases(base.BaseTestCase):
@ddt.unpack
@ddt.file_data('provider_config_data/validate_provider_good_config.yaml')
def test__validate_provider_good_config(self, sample):
provider_config._validate_provider_config(sample, "fake_path")
@ddt.unpack
@ddt.file_data('provider_config_data/validate_provider_bad_config.yaml')
def test__validate_provider_bad_config(self, sample, expected_messages):
actual_msg = self.assertRaises(
nova_exc.ProviderConfigException,
provider_config._validate_provider_config,
sample, 'fake_path').message
self.assertIn(actual_msg, expected_messages)
@mock.patch.object(provider_config, 'LOG')
def test__validate_provider_config_one_noop_provider(self, mock_log):
expected = {
"providers": [
{
"identification": {"name": "NAME1"},
"inventories": {
"additional": [
{"CUSTOM_RESOURCE_CLASS": {}}
]
}
},
{
"identification": {"name": "NAME_453764"},
"inventories": {
"additional": []
},
"traits": {
"additional": []
}
}
]
}
data = copy.deepcopy(expected)
valid = provider_config._validate_provider_config(data, "fake_path")
mock_log.warning.assert_called_once_with(
"Provider NAME_453764 defined in "
"fake_path has no additional "
"inventories or traits and will be ignored."
)
# assert that _validate_provider_config does not mutate inputs
self.assertEqual(expected, data)
# assert that the first entry in the returned tuple is the full set
# of providers not a copy and is equal to the expected providers.
self.assertIs(data['providers'][0], valid[0])
self.assertEqual(expected['providers'][0], valid[0])
class GetProviderConfigsTestCases(base.BaseTestCase):
@mock.patch.object(provider_config, 'glob')
def test_get_provider_configs_one_file(self, mock_glob):
expected = {
"$COMPUTE_NODE": {
"__source_file": "example_provider.yaml",
"identification": {
"name": "$COMPUTE_NODE"
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1.0
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT_ONE",
"CUSTOM_TRAIT2"
]
}
}
}
example_file = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'provider_config_data/v1/example_provider.yaml')
mock_glob.glob.return_value = [example_file]
actual = provider_config.get_provider_configs('path')
self.assertEqual(expected, actual)
mock_glob.glob.assert_called_with('path/*.yaml')
@mock.patch.object(provider_config, 'glob')
@mock.patch.object(provider_config, '_parse_provider_yaml')
def test_get_provider_configs_one_file_uuid_conflict(
self, mock_parser, mock_glob):
# one config file with conflicting identification
providers = [
{"__source_file": "file1.yaml",
"identification": {
"uuid": uuidsentinel.uuid1
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS1": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT1"
]
}
},
{"__source_file": "file1.yaml",
"identification": {
"uuid": uuidsentinel.uuid1
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS2": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT2"
]
}
}
]
mock_parser.side_effect = [{"providers": providers}]
mock_glob.glob.return_value = ['file1.yaml']
# test that correct error is raised and message matches
error = self.assertRaises(nova_exc.ProviderConfigException,
provider_config.get_provider_configs,
'dummy_path').kwargs['error']
self.assertEqual("Provider %s has multiple definitions in source "
"file(s): ['file1.yaml']." % uuidsentinel.uuid1,
error)<|fim▁hole|> @mock.patch.object(provider_config, 'glob')
@mock.patch.object(provider_config, '_parse_provider_yaml')
def test_get_provider_configs_two_files(self, mock_parser, mock_glob):
expected = {
"EXAMPLE_RESOURCE_PROVIDER1": {
"__source_file": "file1.yaml",
"identification": {
"name": "EXAMPLE_RESOURCE_PROVIDER1"
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS1": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT1"
]
}
},
"EXAMPLE_RESOURCE_PROVIDER2": {
"__source_file": "file2.yaml",
"identification": {
"name": "EXAMPLE_RESOURCE_PROVIDER2"
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS2": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT2"
]
}
}
}
mock_parser.side_effect = [
{"providers": [provider]} for provider in expected.values()]
mock_glob_return = ['file1.yaml', 'file2.yaml']
mock_glob.glob.return_value = mock_glob_return
dummy_path = 'dummy_path'
actual = provider_config.get_provider_configs(dummy_path)
mock_glob.glob.assert_called_once_with(os.path.join(dummy_path,
'*.yaml'))
mock_parser.assert_has_calls([mock.call(param)
for param in mock_glob_return])
self.assertEqual(expected, actual)
@mock.patch.object(provider_config, 'glob')
@mock.patch.object(provider_config, '_parse_provider_yaml')
def test_get_provider_configs_two_files_name_conflict(self, mock_parser,
mock_glob):
# two config files with conflicting identification
configs = {
"EXAMPLE_RESOURCE_PROVIDER1": {
"__source_file": "file1.yaml",
"identification": {
"name": "EXAMPLE_RESOURCE_PROVIDER1"
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS1": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT1"
]
}
},
"EXAMPLE_RESOURCE_PROVIDER2": {
"__source_file": "file2.yaml",
"identification": {
"name": "EXAMPLE_RESOURCE_PROVIDER1"
},
"inventories": {
"additional": [
{
"CUSTOM_EXAMPLE_RESOURCE_CLASS1": {
"total": 100,
"reserved": 0,
"min_unit": 1,
"max_unit": 10,
"step_size": 1,
"allocation_ratio": 1
}
}
]
},
"traits": {
"additional": [
"CUSTOM_TRAIT1"
]
}
}
}
mock_parser.side_effect = [{"providers": [configs[provider]]}
for provider in configs]
mock_glob.glob.return_value = ['file1.yaml', 'file2.yaml']
# test that correct error is raised and message matches
error = self.assertRaises(nova_exc.ProviderConfigException,
provider_config.get_provider_configs,
'dummy_path').kwargs['error']
self.assertEqual("Provider EXAMPLE_RESOURCE_PROVIDER1 has multiple "
"definitions in source file(s): "
"['file1.yaml', 'file2.yaml'].", error)
@mock.patch.object(provider_config, 'LOG')
def test_get_provider_configs_no_configs(self, mock_log):
path = "invalid_path!@#"
actual = provider_config.get_provider_configs(path)
self.assertEqual({}, actual)
mock_log.info.assert_called_once_with(
"No provider configs found in %s. If files are present, "
"ensure the Nova process has access.", path)<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.